|
Boost-Commit : |
From: grafikrobot_at_[hidden]
Date: 2008-03-15 14:55:36
Author: grafik
Date: 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
New Revision: 43635
URL: http://svn.boost.org/trac/boost/changeset/43635
Log:
Merge latest BBv2 from trunk to release.
Added:
branches/release/tools/build/v2/test/conditionals_multiple.py
- copied unchanged from r43634, /trunk/tools/build/v2/test/conditionals_multiple.py
branches/release/tools/build/v2/test/configuration.py
- copied unchanged from r43634, /trunk/tools/build/v2/test/configuration.py
branches/release/tools/build/v2/test/default_toolset.py
- copied unchanged from r43634, /trunk/tools/build/v2/test/default_toolset.py
branches/release/tools/build/v2/tools/intel-darwin.jam
- copied unchanged from r43634, /trunk/tools/build/v2/tools/intel-darwin.jam
Text files modified:
branches/release/tools/build/v2/build-system.jam | 1251 ++++++++++++++++++++++-----------------
branches/release/tools/build/v2/build/alias.jam | 55
branches/release/tools/build/v2/build/build-request.jam | 185 ++--
branches/release/tools/build/v2/build/feature.jam | 869 +++++++++++++-------------
branches/release/tools/build/v2/build/generators.jam | 903 ++++++++++++++--------------
branches/release/tools/build/v2/build/modifiers.jam | 16
branches/release/tools/build/v2/build/project.jam | 799 ++++++++++++------------
branches/release/tools/build/v2/build/property-set.jam | 296 ++++-----
branches/release/tools/build/v2/build/property.jam | 424 ++++++------
branches/release/tools/build/v2/build/scanner.jam | 1
branches/release/tools/build/v2/build/targets.jam | 1262 +++++++++++++++++++--------------------
branches/release/tools/build/v2/build/toolset.jam | 374 +++++-----
branches/release/tools/build/v2/build/type.jam | 299 ++++----
branches/release/tools/build/v2/build/version.jam | 8
branches/release/tools/build/v2/build/virtual-target.jam | 879 +++++++++++++--------------
branches/release/tools/build/v2/doc/src/advanced.xml | 6
branches/release/tools/build/v2/doc/src/architecture.xml | 2
branches/release/tools/build/v2/doc/src/extending.xml | 106 +-
branches/release/tools/build/v2/doc/src/faq.xml | 10
branches/release/tools/build/v2/doc/src/reference.xml | 198 ++++++
branches/release/tools/build/v2/doc/src/tasks.xml | 33
branches/release/tools/build/v2/doc/src/tutorial.xml | 11
branches/release/tools/build/v2/hacking.txt | 80 +-
branches/release/tools/build/v2/kernel/bootstrap.jam | 3
branches/release/tools/build/v2/kernel/errors.jam | 134 ++--
branches/release/tools/build/v2/kernel/modules.jam | 265 ++++----
branches/release/tools/build/v2/notes/build_dir_option.txt | 49
branches/release/tools/build/v2/options/help.jam | 11
branches/release/tools/build/v2/roll.sh | 18
branches/release/tools/build/v2/test/BoostBuild.py | 460 +++++++-------
branches/release/tools/build/v2/test/abs_workdir.py | 29
branches/release/tools/build/v2/test/alias.py | 122 ++-
branches/release/tools/build/v2/test/conditionals.py | 58 -
branches/release/tools/build/v2/test/conditionals2.py | 33
branches/release/tools/build/v2/test/conditionals3.py | 21
branches/release/tools/build/v2/test/double_loading.py | 39
branches/release/tools/build/v2/test/empty.jam | 4
branches/release/tools/build/v2/test/inherit_toolset.py | 13
branches/release/tools/build/v2/test/library_property.py | 2
branches/release/tools/build/v2/test/module-actions/bootstrap.jam | 46
branches/release/tools/build/v2/test/module_actions.py | 35
branches/release/tools/build/v2/test/project_test3.py | 2
branches/release/tools/build/v2/test/readme.txt | 7
branches/release/tools/build/v2/test/searched_lib.py | 15
branches/release/tools/build/v2/test/tag.py | 148 ++--
branches/release/tools/build/v2/test/test_all.py | 9
branches/release/tools/build/v2/test/test_system.html | 2
branches/release/tools/build/v2/test/timedata.py | 33
branches/release/tools/build/v2/tools/builtin.jam | 945 ++++++++++++++--------------
branches/release/tools/build/v2/tools/cast.jam | 54
branches/release/tools/build/v2/tools/common.jam | 487 +++++++-------
branches/release/tools/build/v2/tools/darwin.jam | 141 ++++
branches/release/tools/build/v2/tools/docutils.jam | 7
branches/release/tools/build/v2/tools/doxproc.py | 10
branches/release/tools/build/v2/tools/doxygen.jam | 2
branches/release/tools/build/v2/tools/gcc.jam | 400 +++++------
branches/release/tools/build/v2/tools/generate.jam | 74 +-
branches/release/tools/build/v2/tools/gettext.jam | 96 +-
branches/release/tools/build/v2/tools/intel.jam | 8
branches/release/tools/build/v2/tools/mpi.jam | 6
branches/release/tools/build/v2/tools/msvc.jam | 460 +++++++-------
branches/release/tools/build/v2/tools/notfile.jam | 46
branches/release/tools/build/v2/tools/package.jam | 2
branches/release/tools/build/v2/tools/pathscale.jam | 4
branches/release/tools/build/v2/tools/pch.jam | 56
branches/release/tools/build/v2/tools/pgi.jam | 10
branches/release/tools/build/v2/tools/python.jam | 93 +-
branches/release/tools/build/v2/tools/qt4.jam | 48
branches/release/tools/build/v2/tools/quickbook.jam | 3
branches/release/tools/build/v2/tools/stage.jam | 365 ++++++-----
branches/release/tools/build/v2/tools/testing.jam | 424 ++++++------
branches/release/tools/build/v2/util/doc.jam | 96 +-
branches/release/tools/build/v2/util/indirect.jam | 65 +-
branches/release/tools/build/v2/util/option.jam | 2
branches/release/tools/build/v2/util/order.jam | 130 +--
branches/release/tools/build/v2/util/os.jam | 78 +-
branches/release/tools/build/v2/util/path.jam | 231 ++++---
77 files changed, 7250 insertions(+), 6718 deletions(-)
Modified: branches/release/tools/build/v2/build-system.jam
==============================================================================
--- branches/release/tools/build/v2/build-system.jam (original)
+++ branches/release/tools/build/v2/build-system.jam 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -1,717 +1,882 @@
-# Copyright 2003, 2005, 2007 Dave Abrahams
-# Copyright 2006, 2007 Rene Rivera
-# Copyright 2003, 2004, 2005, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# This file is part of Boost.Build version 2. You can think of it as
-# forming the main() routine. It is invoked by the bootstrapping code
-# in bootstrap.jam.
-#
-# The version of bootstrap.jam invoking this lives in
-# tools/build/kernel until BBv1 is retired, so that BBv1 can have its
-# bootstrap.jam in this directory.
+# Copyright 2003, 2005, 2007 Dave Abrahams
+# Copyright 2006, 2007 Rene Rivera
+# Copyright 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-import project ;
-import targets ;
-import sequence ;
-import modules ;
-import feature ;
-import property-set ;
-import build-request ;
-import errors : error ;
-import virtual-target ;
-import "class" : new ;
-import toolset ;
-import regex ;
+# This file is part of Boost Build version 2. You can think of it as forming the
+# the main() routine. It is invoked by the bootstrapping code in bootstrap.jam.
+import build-request ;
import builtin ;
+import "class" : new ;
+import errors ;
+import feature ;
import make ;
+import modules ;
import os ;
-
+import path ;
+import project ;
+import property-set ;
+import regex ;
+import sequence ;
+import targets ;
+import toolset ;
import version ;
+import virtual-target ;
-# Returns the location of the build system. The primary use case
-# is building Boost, where it's sometimes needed to get location
-# of other components (like BoostBook files), and it's convenient
-# to use location relatively to Boost.Build path.
-rule location ( )
-{
- local r = [ modules.binding build-system ] ;
- return $(r:P) ;
-}
-# Returns the property set with the
-# free features from the currently processed
+################################################################################
+#
+# Module global data.
+#
+################################################################################
+
+# Shortcut used in this module for accessing used command-line parameters.
+.argv = [ modules.peek : ARGV ] ;
+
+# Flag indicating we should display additional debugging information related to
+# locating and loading Boost Build configuration files.
+.debug-config = [ MATCH ^(--debug-configuration)$ : $(.argv) ] ;
+
+# Legacy option doing too many things, some of which are not even documented.
+# Should be phased out.
+# * Disables loading site and user configuration files.
+# * Disables auto-configuration for toolsets specified explicitly on the
+# command-line.
+# * Causes --toolset command-line options to be ignored.
+# * Prevents the default toolset from being used even if no toolset has been
+# configured at all.
+.legacy-ignore-config = [ MATCH ^(--ignore-config)$ : $(.argv) ] ;
+
+# The cleaning is tricky. Say, if user says 'bjam --clean foo' where 'foo' is a
+# directory, then we want to clean targets which are in 'foo' as well as those
+# in any children Jamfiles under foo but not in any unrelated Jamfiles. To
+# achieve this we collect a list of projects under which cleaning is allowed.
+.project-targets = ;
+
+# Virtual targets obtained when building main targets references on the command
+# line. When running 'bjam --clean main_target' we want to clean only files
+# belonging to that main target so we need to record which targets are produced
+# for it.
+.results-of-main-targets = ;
+
+# Was an XML dump requested?
+.out-xml = [ MATCH ^--out-xml=(.*)$ : $(.argv) ] ;
+
+# Default toolset & version to be used in case no other toolset has been used
+# explicitly by either the loaded configuration files, the loaded project build
+# scripts or an explicit toolset request on the command line. If not specified,
+# an arbitrary default will be used based on the current host OS. This value,
+# while not strictly necessary, has been added to allow testing Boost-Build's
+# default toolset usage functionality.
+.default-toolset = ;
+.default-toolset-version = ;
+
+
+################################################################################
+#
+# Public rules.
+#
+################################################################################
+
+# Returns the property set with the free features from the currently processed
# build request.
+#
rule command-line-free-features ( )
{
return $(.command-line-free-features) ;
}
+# Returns the location of the build system. The primary use case is building
+# Boost where it's sometimes needed to get the location of other components
+# (e.g. BoostBook files) and it's convenient to use locations relative to the
+# Boost Build path.
+#
+rule location ( )
+{
+ local r = [ modules.binding build-system ] ;
+ return $(r:P) ;
+}
-# Check if we can load 'test-config.jam'. If we can, load it and
-# ignore user configs.
-local argv = [ modules.peek : ARGV ] ;
+# Sets the default toolset & version to be used in case no other toolset has
+# been used explicitly by either the loaded configuration files, the loaded
+# project build scripts or an explicit toolset request on the command line. For
+# more detailed information see the comment related to used global variables.
+#
+rule set-default-toolset ( toolset : version ? )
+{
+ .default-toolset = $(toolset) ;
+ .default-toolset-version = $(version) ;
+}
-local test-config = [ GLOB [ os.environ BOOST_BUILD_PATH ] : test-config.jam ] ;
-local debug-config = [ MATCH ^(--debug-configuration)$ : [ modules.peek : ARGV ] ] ;
+################################################################################
+#
+# Local rules.
+#
+################################################################################
-if $(test-config)
+# Returns actual Jam targets to be used for executing a clean request.
+#
+local rule actual-clean-targets ( )
{
- if $(debug-config)
- {
- ECHO "notice: loading test-config.jam from"
- [ NORMALIZE_PATH $(test-config[1]) ] ;
- ECHO "notice: user-config.jam and site-config.jam will be ignored" ;
- }
-
- module test-config
+ # Construct a list of projects explicitly detected as targets on this build
+ # system run. These are the projects under which cleaning is allowed.
+ for local t in $(targets)
{
- import toolset : using : using ;
+ if [ class.is-a $(t) : project-target ]
+ {
+ .project-targets += [ $(t).project-module ] ;
+ }
}
- import test-config ;
-}
-local ignore-config ;
-if $(test-config) || --ignore-config in [ modules.peek : ARGV ]
-{
- ignore-config = true ;
-}
+ # Construct a list of targets explicitly detected on this build system run
+ # as a result of building main targets.
+ local targets-to-clean ;
+ for local t in $(.results-of-main-targets)
+ {
+ # Don't include roots or sources.
+ targets-to-clean += [ virtual-target.traverse $(t) ] ;
+ }
+ targets-to-clean = [ sequence.unique $(targets-to-clean) ] ;
-local user-path = [ os.home-directories ] [ os.environ BOOST_BUILD_PATH ] ;
-
-# Unless ignore-config is set, load the configuration file in
-# $(path)/$(basename).jam
-local rule load-config ( basename : path + )
-{
- if ! $(ignore-config)
+ local to-clean ;
+ for local t in [ virtual-target.all-targets ]
{
- if $(debug-config)
+ local p = [ $(t).project ] ;
+
+ # Remove only derived targets.
+ if [ $(t).action ]
{
- ECHO notice: searching \"$(path)\" for \"$(basename).jam\" ;
- local where = [ GLOB $(path) : $(basename).jam ] ;
- if $(where)
- {
- ECHO notice: loading $(basename).jam from
- [ NORMALIZE_PATH $(where[1]) ] ;
- }
- }
+ if $(t) in $(targets-to-clean) ||
+ [ should-clean-project [ $(p).project-module ] ] = true
+ {
+ to-clean += $(t) ;
+ }
+ }
+ }
- modules.load $(basename) : : $(path) ;
- project.load-used-projects $(basename) ;
+ local to-clean-actual ;
+ for local t in $(to-clean)
+ {
+ to-clean-actual += [ $(t).actualize ] ;
}
+ return $(to-clean-actual) ;
}
+
+# Given a target id, try to find and return the corresponding target. This is
+# only invoked when there's no Jamfile in ".". This code somewhat duplicates
+# code in project-target.find but we can't reuse that code without a
+# project-targets instance.
#
-# Load site-config.
+local rule find-target ( target-id )
+{
+ local split = [ MATCH (.*)//(.*) : $(target-id) ] ;
+
+ local pm ;
+ if $(split)
+ {
+ pm = [ project.find $(split[1]) : "." ] ;
+ }
+ else
+ {
+ pm = [ project.find $(target-id) : "." ] ;
+ }
+
+ local result ;
+ if $(pm)
+ {
+ result = [ project.target $(pm) ] ;
+ }
+
+ if $(split)
+ {
+ result = [ $(result).find $(split[2]) ] ;
+ }
+
+ return $(result) ;
+}
+
+
+# Initializes a new configuration module.
#
-module site-config
+local rule initialize-config-module ( module-name )
{
- import project : initialize ;
- initialize site-config ;
+ project.initialize $(module-name) ;
+ if USER_MODULE in [ RULENAMES ]
+ {
+ USER_MODULE $(module-name) ;
+ }
}
-local site-path = /etc $(user-path) ;
-if [ os.name ] in NT CYGWIN
-{
- site-path = [ modules.peek : SystemRoot ] $(user-path) ;
+# Helper rule used to load configuration files. Loads the first configuration
+# file with the given 'filename' at 'path' into module with name 'module-name'.
+# Not finding the requested file may or may not be treated as an error depending
+# on the must-find parameter. Returns a normalized path to the loaded
+# configuration file or nothing if no file was loaded.
+#
+local rule load-config ( module-name : filename : path + : must-find ? )
+{
+ if $(.debug-config)
+ {
+ ECHO "notice: Searching" "$(path)" "for" "$(module-name)"
+ "configuration file" "$(filename)" "." ;
+ }
+ local where = [ GLOB $(path) : $(filename) ] ;
+ if $(where)
+ {
+ where = [ NORMALIZE_PATH $(where[1]) ] ;
+ if $(.debug-config)
+ {
+ ECHO "notice: Loading" "$(module-name)" "configuration file"
+ "$(filename)" "from" $(where) "." ;
+ }
+
+ modules.load $(module-name) : $(filename) : $(path) ;
+ project.load-used-projects $(module-name) ;
+ }
+ else
+ {
+ if ( must-find )
+ {
+ errors.user-error "Configuration file" "$(filename)" "not found in"
+ "$(path)" "." ;
+ }
+ if $(.debug-config)
+ {
+ ECHO "notice:" "Configuration file" "$(filename)" "not found in"
+ "$(path)" "." ;
+ }
+ }
+ return where ;
}
-load-config site-config : $(site-path) ;
+# Loads all the configuration files used by Boost Build in the following order:
+#
+# -- test-config --
+# Loaded only if specified on the command-line using the --test-config
+# command-line parameter. It is ok for this file not to exist even if specified.
+# If this configuration file is loaded, regular site and user configuration
+# files will not be. If a relative path is specified, file is searched for in
+# the current folder.
#
-# Load user-config.
+# -- site-config --
+# Always named site-config.jam. Will only be found if located on the system
+# root path (Windows), /etc (non-Windows), user's home folder or the Boost Build
+# path, in that order. Not loaded in case the test-config configuration file is
+# loaded or either the --ignore-site-config or the --ignore-config command-line
+# option is specified.
#
-module user-config
+# -- user-config --
+# Named user-config.jam by default or may be named explicitly using the
+# --user-config command-line option or the BOOST_BUILD_USER_CONFIG environment
+# variable. If named explicitly the file is looked for from the current working
+# directory and if the default one is used then it is searched for in the
+# user's home directory and the Boost Build path, in that order. Not loaded in
+# case either the test-config configuration file is loaded, --ignore-config
+# command-line option is specified or an empty file name is explicitly
+# specified. If the file name has been given explicitly then the file must
+# exist.
+#
+# Test configurations have been added primarily for use by Boost Build's
+# internal unit testing system but may be used freely in other places as well.
+#
+local rule load-configuration-files
{
- import project : initialize ;
- initialize user-config ;
-}
+ # Flag indicating that site configuration should not be loaded.
+ local ignore-site-config =
+ [ MATCH ^(--ignore-site-config)$ : $(.argv) ] ;
+
+ if $(.legacy-ignore-config) && $(.debug-config)
+ {
+ ECHO "notice: Regular site and user configuration files will be ignored" ;
+ ECHO "notice: due to the --ignore-config command-line option." ;
+ }
+
+ initialize-config-module test-config ;
+ local test-config = [ MATCH ^--test-config=(.*)$ : $(.argv) ] ;
+ if $(test-config)
+ {
+ local where =
+ [ load-config test-config : $(test-config:BS) : $(test-config:D) ] ;
+ if $(where)
+ {
+ if $(.debug-config) && ! $(.legacy-ignore-config)
+ {
+ ECHO "notice: Regular site and user configuration files will" ;
+ ECHO "notice: be ignored due to the test configuration being"
+ "loaded." ;
+ }
+ }
+ else
+ {
+ test-config = ;
+ }
+ }
-local user-config-path = [ MATCH ^--user-config=(.*) : $(argv) ] ;
+ local user-path = [ os.home-directories ] [ os.environ BOOST_BUILD_PATH ] ;
+ local site-path = /etc $(user-path) ;
+ if [ os.name ] in NT CYGWIN
+ {
+ site-path = [ modules.peek : SystemRoot ] $(user-path) ;
+ }
-user-config-path ?= [ os.environ BOOST_BUILD_USER_CONFIG ] ;
+ if $(ignore-site-config) && !$(.legacy-ignore-config)
+ {
+ ECHO "notice: Site configuration files will be ignored due to the" ;
+ ECHO "notice: --ignore-site-config command-line option." ;
+ }
-if $(user-config-path)
-{
- if $(debug-config)
+ initialize-config-module site-config ;
+ if ! $(test-config) && ! $(ignore-site-config) && ! $(.legacy-ignore-config)
{
- ECHO "Loading explicitly specifier user configuration file:" ;
- ECHO " $(user-config-path)" ;
+ load-config site-config : site-config.jam : $(site-path) ;
+ }
+
+ initialize-config-module user-config ;
+ if ! $(test-config) && ! $(.legacy-ignore-config)
+ {
+ local user-config = [ MATCH ^--user-config=(.*)$ : $(.argv) ] ;
+ user-config = $(user-config[-1]) ;
+ user-config ?= [ os.environ BOOST_BUILD_USER_CONFIG ] ;
+ local explicitly-requested = $(user-config) ;
+ user-config ?= user-config.jam ;
+
+ if $(user-config)
+ {
+ if $(explicitly-requested)
+ {
+ # Treat explicitly entered user paths as native OS path
+ # references and, if non-absolute, root them at the current
+ # working directory.
+ user-config = [ path.make $(user-config) ] ;
+ user-config = [ path.root $(user-config) [ path.pwd ] ] ;
+ user-config = [ path.native $(user-config) ] ;
+
+ if $(.debug-config)
+ {
+ ECHO "notice: Loading explicitly specified user configuration file:" ;
+ ECHO " $(user-config)" ;
+ }
+
+ load-config user-config : $(user-config:BS) : $(user-config:D)
+ : must-exist ;
+ }
+ else
+ {
+ load-config user-config : $(user-config) : $(user-path) ;
+ }
+ }
+ else if $(.debug-config)
+ {
+ ECHO "notice: User configuration file loading explicitly disabled." ;
+ }
}
-
-
- modules.load user-config : $(user-config-path:BS) : $(user-config-path:D) ;
- project.load-used-projects user-config ;
-}
-else
-{
- load-config user-config : $(user-path) ;
}
-#
# Autoconfigure toolsets based on any instances of --toolset=xx,yy,...zz or
-# toolset=xx,yy,...zz in the command line
+# toolset=xx,yy,...zz in the command line. May return additional properties to
+# be processed as if they had been specified by the user.
#
-local option-toolsets = [ regex.split-list [ MATCH ^--toolset=(.*) : $(argv) ] : "," ] ;
-local feature-toolsets = [ regex.split-list [ MATCH ^toolset=(.*) : $(argv) ] : "," ] ;
+local rule process-explicit-toolset-requests
+{
+ local extra-properties ;
-# if the user specified --toolset=..., we need to add toolset=... to
-# the build request
-local extra-build-request ;
+ local option-toolsets = [ regex.split-list [ MATCH ^--toolset=(.*)$ : $(.argv) ] : "," ] ;
+ local feature-toolsets = [ regex.split-list [ MATCH ^toolset=(.*)$ : $(.argv) ] : "," ] ;
-if ! $(ignore-config)
-{
for local t in $(option-toolsets) $(feature-toolsets)
{
- # Parse toolset-version/properties
+ # Parse toolset-version/properties.
local (t-v,t,v) = [ MATCH (([^-/]+)-?([^/]+)?)/?.* : $(t) ] ;
local toolset-version = $((t-v,t,v)[1]) ;
local toolset = $((t-v,t,v)[2]) ;
local version = $((t-v,t,v)[3]) ;
- if $(debug-config)
+ if $(.debug-config)
{
- ECHO notice: [cmdline-cfg] Detected command-line request for
- $(toolset-version): toolset= \"$(toolset)\" "version= \""$(version)\" ;
+ ECHO "notice: [cmdline-cfg] Detected command-line request for"
+ $(toolset-version): "toolset=" $(toolset) "version="
+ $(version) ;
}
+ # If the toolset isn't known, configure it now.
local known ;
-
- # if the toolset isn't known, configure it now.
if $(toolset) in [ feature.values <toolset> ]
{
known = true ;
}
-
- if $(known) && $(version)
- && ! [ feature.is-subvalue toolset : $(toolset) : version : $(version) ]
+ if $(known) && $(version) && ! [ feature.is-subvalue toolset
+ : $(toolset) : version : $(version) ]
{
known = ;
}
if ! $(known)
{
- if $(debug-config)
+ if $(.debug-config)
{
- ECHO notice: [cmdline-cfg] toolset $(toolset-version)
- not previously configured; configuring now ;
+ ECHO notice: [cmdline-cfg] toolset $(toolset-version) not
+ previously configured; attempting to auto-configure now ;
}
toolset.using $(toolset) : $(version) ;
}
else
{
- if $(debug-config)
+ if $(.debug-config)
{
- ECHO notice: [cmdline-cfg] toolset $(toolset-version) already configured ;
+ ECHO notice: [cmdline-cfg] toolset $(toolset-version) already
+ configured ;
}
}
- # make sure we get an appropriate property into the build request in
- # case the user used the "--toolset=..." form
- if ! $(t) in $(argv)
- && ! $(t) in $(feature-toolsets)
+ # Make sure we get an appropriate property into the build request in
+ # case toolset was specified using the "--toolset=..." command-line
+ # option form.
+ if ! $(t) in $(.argv) && ! $(t) in $(feature-toolsets)
{
- if $(debug-config)
+ if $(.debug-config)
{
ECHO notice: [cmdline-cfg] adding toolset=$(t) "to build request." ;
}
- extra-build-request += toolset=$(t) ;
+ extra-properties += toolset=$(t) ;
}
}
-}
-
-if USER_MODULE in [ RULENAMES ]
-{
- USER_MODULE site-config user-config ;
-}
+ return $(extra-properties) ;
+}
-if --version in [ modules.peek : ARGV ]
+# Returns 'true' if the given 'project' is equal to or is a (possibly indirect)
+# child to any of the projects requested to be cleaned in this build system run.
+# Returns 'false' otherwise. Expects the .project-targets list to have already
+# been constructed.
+#
+local rule should-clean-project ( project )
{
- version.print ;
- EXIT ;
-}
+ if ! $(.should-clean-project.$(project))
+ {
+ local r = false ;
+ if $(project) in $(.project-targets)
+ {
+ r = true ;
+ }
+ else
+ {
+ local parent = [ project.attribute $(project) parent-module ] ;
+ if $(parent) && $(parent) != user-config
+ {
+ r = [ should-clean-project $(parent) ] ;
+ }
+ }
+ .should-clean-project.$(project) = $(r) ;
+ }
+ return $(.should-clean-project.$(project)) ;
+}
-# We always load project in "." so that 'use-project' directives has
-# any chance of been seen. Otherwise, we won't be able to refer to
-# subprojects using target ids.
-if [ project.find "." : "." ]
-{
- current-project = [ project.target [ project.load "." ] ] ;
-}
+################################################################################
+#
+# main()
+# ------
+#
+################################################################################
-if ! [ feature.values <toolset> ]
{
- local default-toolset = gcc ;
- if [ os.name ] = NT
+ if --version in $(.argv)
{
- default-toolset = msvc ;
+ version.print ;
+ EXIT ;
}
-
- ECHO "warning: No toolsets are configured." ;
- ECHO "warning: Configuring default toolset" \"$(default-toolset)\". ;
- ECHO "warning: If the default is wrong, you may not be able to build C++ programs." ;
- ECHO "warning: Use the \"--toolset=xxxxx\" option to override our guess." ;
- ECHO "warning: For more configuration options, please consult" ;
- ECHO "warning: http://boost.org/boost-build2/doc/html/bbv2/advanced/configuration.html" ;
-
- if ! $(ignore-config)
+
+ load-configuration-files ;
+
+ local extra-properties ;
+ # Note that this causes --toolset options to be ignored if --ignore-config
+ # is specified.
+ if ! $(.legacy-ignore-config)
{
- toolset.using $(default-toolset) ;
+ extra-properties = [ process-explicit-toolset-requests ] ;
}
-}
-
-build-request = [
- build-request.from-command-line [
- modules.peek : ARGV
- ] $(extra-build-request)
-] ;
-properties = [ $(build-request).get-at 2 ] ;
-if $(properties)
-{
- expanded = [ build-request.expand-no-defaults $(properties) ] ;
- local xexpanded ;
- for local e in $(expanded)
+ # We always load project in "." so that 'use-project' directives have any
+ # chance of being seen. Otherwise, we would not be able to refer to
+ # subprojects using target ids.
+ local current-project ;
+ if [ project.find "." : "." ]
{
- xexpanded += [ property-set.create [ feature.split $(e) ] ] ;
+ current-project = [ project.target [ project.load "." ] ] ;
}
- expanded = $(xexpanded) ;
-}
-else
-{
- expanded = [ property-set.empty ] ;
-}
+ # In case there are no toolsets currently defined makes the build run using
+ # the default toolset.
+ if ! $(.legacy-ignore-config) && ! [ feature.values <toolset> ]
+ {
+ local default-toolset = $(.default-toolset) ;
+ local default-toolset-version = ;
+ if $(default-toolset)
+ {
+ default-toolset-version = $(.default-toolset-version) ;
+ }
+ else
+ {
+ default-toolset = gcc ;
+ if [ os.name ] = NT
+ {
+ default-toolset = msvc ;
+ }
+ }
-local target-ids = [ $(build-request).get-at 1 ] ;
-local targets
-local clean ;
-
+ ECHO "warning: No toolsets are configured." ;
+ ECHO "warning: Configuring default toolset" \"$(default-toolset)\". ;
+ ECHO "warning: If the default is wrong, your build may not work correctly." ;
+ ECHO "warning: Use the \"--toolset=xxxxx\" option to override our guess." ;
+ ECHO "warning: For more configuration options, please consult" ;
+ ECHO "warning: http://boost.org/boost-build2/doc/html/bbv2/advanced/configuration.html" ;
-if "--clean-all" in [ modules.peek : ARGV ]
-{
- cleanall = true ;
-}
+ toolset.using $(default-toolset) : $(default-toolset-version) ;
+ }
-if "--clean" in [ modules.peek : ARGV ]
-{
- clean = true ;
-}
+ # Parse command line for targets and properties. Note that this requires
+ # that all project files already be loaded.
+ local build-request = [ build-request.from-command-line $(.argv)
+ $(extra-properties) ] ;
+ local target-ids = [ $(build-request).get-at 1 ] ;
+ local properties = [ $(build-request).get-at 2 ] ;
-local bjam-targets ;
-# Given a target it, try to find and return corresponding target.
-# This is only invoked when there's no Jamfile in "."
-# This code somewhat duplicates code in project-target.find but we can't reuse
-# that code without project-targets instance.
-rule find-target ( target-id )
-{
- local split = [ MATCH (.*)//(.*) : $(target-id) ] ;
-
- local pm ;
- if $(split)
+ # Expand properties specified on the command line into multiple property
+ # sets consisting of all legal property combinations. Each expanded property
+ # set will be used for a single build run. E.g. if multiple toolsets are
+ # specified then requested targets will be built with each of them.
+ if $(properties)
{
- pm = [ project.find $(split[1]) : "." ] ;
+ expanded = [ build-request.expand-no-defaults $(properties) ] ;
+ local xexpanded ;
+ for local e in $(expanded)
+ {
+ xexpanded += [ property-set.create [ feature.split $(e) ] ] ;
+ }
+ expanded = $(xexpanded) ;
}
else
{
- pm = [ project.find $(target-id) : "." ] ;
+ expanded = [ property-set.empty ] ;
}
-
- local result ;
- if $(pm)
- {
- result = [ project.target $(pm) ] ;
- }
-
- if $(split)
- {
- result = [ $(result).find $(split[2]) ] ;
- }
-
- return $(result) ;
-}
-
-if ! $(current-project)
-{
- if ! $(target-ids)
- {
- ECHO "error: no Jamfile in current directory found, and no target references specified." ;
+ # Check that we actually found something to build.
+ if ! $(current-project) && ! $(target-ids)
+ {
+ errors.user-error "error: no Jamfile in current directory found, and no"
+ "target references specified." ;
EXIT ;
}
-}
-for local id in $(target-ids)
-{
- if $(id) = clean
+ # Flags indicating that this build system run has been started in order to
+ # clean existing instead of create new targets. Note that these are not the
+ # final flag values as they may get changed later on due to some special
+ # targets being specified on the command line.
+ local clean ; if "--clean" in $(.argv) { clean = true ; }
+ local cleanall ; if "--clean-all" in $(.argv) { cleanall = true ; }
+
+
+ # List of Boost Build meta-targets and actual raw Jam targets directly
+ # requested by the user. Raw Jam targets are used when user's request
+ # contains a reference to a specific file not modeled using a main Boost
+ # Build target.
+ local targets ;
+ local bjam-targets ;
+
+
+ # Process each target specified on the command-line and convert it into
+ # internal Boost Build target objects. Detect special clean target. If no
+ # main Boost Build targets were explictly requested use the current project
+ # as the target.
+ for local id in $(target-ids)
{
- clean = true ;
- }
- else
- {
- local t ;
- if $(current-project)
+ if $(id) = clean
{
- t = [ $(current-project).find $(id) : no-error ] ;
+ clean = true ;
}
else
{
- t = [ find-target $(id) ] ;
- }
-
- if ! $(t)
- {
- ECHO "notice: could not find main target " $(id) ;
- ECHO "notice: assuming it's a name of file to create " ;
- bjam-targets += $(id) ;
- }
- else
- {
- targets += $(t) ;
- }
- }
-}
-
-if ! $(targets)
-{
- targets += [ project.target [ project.module-name "." ] ] ;
-}
-
-virtual-targets = ;
-
-# Virtual targets obtained when building main targets references on
-# the command line. When running
-#
-# bjam --clean main_target
-#
-# we want to clean the files that belong only to that main target,
-# so we need to record which targets are produced.
-local results-of-main-targets ;
-
-for local p in $(expanded)
-{
- .command-line-free-features = [ property-set.create [ $(p).free ] ] ;
- for local t in $(targets)
- {
- local g = [ $(t).generate $(p) ] ;
- if ! [ class.is-a $(t) : project-target ]
- {
- results-of-main-targets += $(g[2-]) ;
- }
- virtual-targets += $(g[2-]) ;
- }
-}
-
-# The cleaning is tricky. Say, if
-# user says:
-#
-# bjam --clean foo
-#
-# where 'foo' is a directory, then we want to clean targets
-# which are in 'foo' or in any children Jamfiles, but not in any
-# unrelated Jamfiles. So, we collect the list of project under which
-# cleaning is allowed.
-#
+ local t ;
+ if $(current-project)
+ {
+ t = [ $(current-project).find $(id) : no-error ] ;
+ }
+ else
+ {
+ t = [ find-target $(id) ] ;
+ }
-local projects-to-clean ;
-local targets-to-clean ;
-if $(clean) || $(clean-all)
-{
- for local t in $(targets)
- {
- if [ class.is-a $(t) : project-target ]
- {
- projects-to-clean += [ $(t).project-module ] ;
- }
+ if ! $(t)
+ {
+ ECHO "notice: could not find main target" $(id) ;
+ ECHO "notice: assuming it's a name of file to create." ;
+ bjam-targets += $(id) ;
+ }
+ else
+ {
+ targets += $(t) ;
+ }
+ }
}
-
- local subvariants ;
- for local t in $(results-of-main-targets)
+ if ! $(targets)
{
- # Don't include roots or sources.
- targets-to-clean += [ virtual-target.traverse $(t) ] ;
+ targets += [ project.target [ project.module-name "." ] ] ;
}
- targets-to-clean = [ sequence.unique $(targets-to-clean) ] ;
-}
-# Returns 'true' if 'project' is a child of 'current-project',
-# possibly indirect, or is equal to 'project'.
-# Returns 'false' otherwise.
-rule is-child ( project )
-{
- if ! $(.is-child.$(project))
+
+ # List of all virtual-targets created in this build system run.
+ local virtual-targets ;
+
+
+ # Now that we have a set of targets to build and a set of property sets to
+ # build the targets with, we can start the main build process by using each
+ # property set to generate virtual targets from all of our listed targets
+ # and any of their dependants.
+ for local p in $(expanded)
{
- local r = false ;
- if $(project) in $(projects-to-clean)
- {
- r = true ;
- }
- else
+ .command-line-free-features = [ property-set.create [ $(p).free ] ] ;
+ for local t in $(targets)
{
- local parent = [ project.attribute $(project) parent-module ] ;
- if $(parent) && $(parent) != user-config
+ local g = [ $(t).generate $(p) ] ;
+ if ! [ class.is-a $(t) : project-target ]
{
- r = [ is-child $(parent) ] ;
- }
- }
-
- .is-child.$(project) = $(r) ;
+ .results-of-main-targets += $(g[2-]) ;
+ }
+ virtual-targets += $(g[2-]) ;
+ }
}
-
- return $(.is-child.$(project)) ;
-}
+ # List of all Jam targets constructed in this build system run.
+ local actual-targets ;
-actual-targets = ;
-for t in $(virtual-targets)
-{
- actual-targets += [ $(t).actualize ] ;
-}
-
-# Was an XML dump requested?
-.out-xml = [ MATCH --out-xml=(.*) : [ modules.peek : ARGV ] ] ;
-if $(.out-xml)
-{
- # Get the qualified name of a virtual target.
- rule full-target-name ( t )
+ # Convert all collected virtual targets into actual raw Jam targets.
+ for t in $(virtual-targets)
{
- local name = [ $(t).name ] ;
- local project = [ $(t).project ] ;
- local project-path = [ $(project).get location ] ;
- return $(project-path)//$(name) ;
+ actual-targets += [ $(t).actualize ] ;
}
-
- # Generate an XML file containing build statistics for each
- # constituent
- rule out-xml ( xml-file : constituents * )
+
+
+ # If XML data output has been requested prepare additional rules and targets
+ # so we can hook into Jam to collect build data while its building and have
+ # it trigger the final XML report generation after all the planned targets
+ # have been built.
+ if $(.out-xml)
{
- # Prepare valid XML header and footer with some basic info
- local nl = "
+ # Get a qualified virtual target name.
+ rule full-target-name ( target )
+ {
+ local name = [ $(target).name ] ;
+ local project = [ $(target).project ] ;
+ local project-path = [ $(project).get location ] ;
+ return $(project-path)//$(name) ;
+ }
+
+ # Generate an XML file containing build statistics for each constituent.
+ #
+ rule out-xml ( xml-file : constituents * )
+ {
+ # Prepare valid XML header and footer with some basic info.
+ local nl = "
" ;
- local jam = [ version.jam ] ;
- local os = [ modules.peek : OS OSPLAT JAMUNAME ] "" ;
- local timestamp = [ modules.peek : JAMDATE ] ;
- local cwd = [ PWD ] ;
- local command = [ modules.peek : ARGV ] ;
- local bb-version = [ version.boost-build ] ;
- .header on $(xml-file) =
- "<?xml version=\"1.0\" encoding=\"utf-8\"?>"
- "$(nl)<build format=\"1.0\" version=\"$(bb-version)\">"
- "$(nl) <jam version=\"$(jam:J=.)\" />"
- "$(nl) <os name=\"$(os[1])\" platform=\"$(os[2])\"><![CDATA[$(os[3-]:J= )]]></os>"
- "$(nl) <timestamp><![CDATA[$(timestamp)]]></timestamp>"
- "$(nl) <directory><![CDATA[$(cwd)]]></directory>"
- "$(nl) <command><![CDATA[\"$(command:J=\" \")\"]]></command>"
- ;
- .footer on $(xml-file) =
- "$(nl)</build>" ;
- # Generate target dependency graph
- .contents on $(xml-file) +=
- "$(nl) <targets>"
- ;
+ local jam = [ version.jam ] ;
+ local os = [ modules.peek : OS OSPLAT JAMUNAME ] "" ;
+ local timestamp = [ modules.peek : JAMDATE ] ;
+ local cwd = [ PWD ] ;
+ local command = $(.argv) ;
+ local bb-version = [ version.boost-build ] ;
+ .header on $(xml-file) =
+ "<?xml version=\"1.0\" encoding=\"utf-8\"?>"
+ "$(nl)<build format=\"1.0\" version=\"$(bb-version)\">"
+ "$(nl) <jam version=\"$(jam:J=.)\" />"
+ "$(nl) <os name=\"$(os[1])\" platform=\"$(os[2])\"><![CDATA[$(os[3-]:J= )]]></os>"
+ "$(nl) <timestamp><![CDATA[$(timestamp)]]></timestamp>"
+ "$(nl) <directory><![CDATA[$(cwd)]]></directory>"
+ "$(nl) <command><![CDATA[\"$(command:J=\" \")\"]]></command>"
+ ;
+ .footer on $(xml-file) =
+ "$(nl)</build>" ;
- for local t in [ virtual-target.all-targets ]
+ # Generate the target dependency graph.
+ .contents on $(xml-file) +=
+ "$(nl) <targets>" ;
+ for local t in [ virtual-target.all-targets ]
+ {
+ local action = [ $(t).action ] ;
+ if $(action)
+ # If a target has no action, it has no dependencies.
+ {
+ local name = [ full-target-name $(t) ] ;
+ local sources = [ $(action).sources ] ;
+ local dependencies ;
+ for local s in $(sources)
+ {
+ dependencies += [ full-target-name $(s) ] ;
+ }
+
+ local path = [ $(t).path ] ;
+ local jam-target = [ $(t).actual-name ] ;
+
+ .contents on $(xml-file) +=
+ "$(nl) <target>"
+ "$(nl) <name><![CDATA[$(name)]]></name>"
+ "$(nl) <dependencies>"
+ "$(nl) <dependency><![CDATA[$(dependencies)]]></dependency>"
+ "$(nl) </dependencies>"
+ "$(nl) <path><![CDATA[$(path)]]></path>"
+ "$(nl) <jam-target><![CDATA[$(jam-target)]]></jam-target>"
+ "$(nl) </target>"
+ ;
+ }
+ }
+ .contents on $(xml-file) +=
+ "$(nl) </targets>" ;
+
+ # Build $(xml-file) after $(constituents). Do so even if a
+ # constituent action fails and regenerate the xml on every bjam run.
+ INCLUDES $(xml-file) : $(constituents) ;
+ ALWAYS $(xml-file) ;
+ __ACTION_RULE__ on $(xml-file) = build-system.out-xml.generate-action ;
+ out-xml.generate $(xml-file) ;
+ }
+
+ # The actual build actions are here; if we did this work in the actions
+ # clause we would have to form a valid command line containing the
+ # result of @(...) below (the name of the XML file).
+ #
+ rule out-xml.generate-action ( args * : xml-file
+ : command status start end user system : output ? )
+ {
+ local contents =
+ [ on $(xml-file) return $(.header) $(.contents) $(.footer) ] ;
+ local f = @($(xml-file):E=$(contents)) ;
+ }
+
+ # Nothing to do here; the *real* actions happen in
+ # out-xml.generate-action.
+ actions quietly out-xml.generate { }
+
+ # Define the out-xml file target, which depends on all the targets so
+ # that it runs the collection after the targets have run.
+ out-xml $(.out-xml) : $(actual-targets) ;
+
+ # Set up a global __ACTION_RULE__ that records all the available
+ # statistics about each actual target in a variable "on" the --out-xml
+ # target.
+ #
+ rule out-xml.collect ( xml-file : target : command status start end user
+ system : output ? )
{
- local action = [ $(t).action ] ;
+ local nl = "
+" ;
+ # Open the action with some basic info.
+ .contents on $(xml-file) +=
+ "$(nl) <action status=\"$(status)\" start=\"$(start)\" end=\"$(end)\" user=\"$(user)\" system=\"$(system)\">" ;
+
+ # If we have an action object we can print out more detailed info.
+ local action = [ on $(target) return $(.action) ] ;
if $(action)
{
- # If a target has no action, it has
- # no dependencies.
+ local action-name = [ $(action).action-name ] ;
+ local action-sources = [ $(action).sources ] ;
+ local action-props = [ $(action).properties ] ;
- local name = [ full-target-name $(t) ] ;
- local sources = [ $(action).sources ] ;
- local dependencies ;
- for local s in $(sources)
+ # The qualified name of the action which we created the target.
+ .contents on $(xml-file) +=
+ "$(nl) <name><![CDATA[$(action-name)]]></name>" ;
+
+ # The sources that made up the target.
+ .contents on $(xml-file) +=
+ "$(nl) <sources>" ;
+ for local source in $(action-sources)
{
- dependencies += [ full-target-name $(s) ] ;
+ local source-actual = [ $(source).actual-name ] ;
+ .contents on $(xml-file) +=
+ "$(nl) <source><![CDATA[$(source-actual)]]></source>" ;
}
+ .contents on $(xml-file) +=
+ "$(nl) </sources>" ;
- local path = [ $(t).path ] ;
- local jam-target = [ $(t).actual-name ] ;
-
+ # The properties that define the conditions under which the
+ # target was built.
.contents on $(xml-file) +=
- "$(nl) <target>"
- "$(nl) <name><![CDATA[$(name)]]></name>"
- "$(nl) <dependencies>"
- "$(nl) <dependency><![CDATA[$(dependencies)]]></dependency>"
- "$(nl) </dependencies>"
- "$(nl) <path><![CDATA[$(path)]]></path>"
- "$(nl) <jam-target><![CDATA[$(jam-target)]]></jam-target>"
- "$(nl) </target>"
- ;
- }
- }
- .contents on $(xml-file) +=
- "$(nl) </targets>"
- ;
-
- # Build $(xml-file) after $(constituents) and do so even if a
- # constituent action fails, and regenerate the xml on every bjam run.
- INCLUDES $(xml-file) : $(constituents) ;
- ALWAYS $(xml-file) ;
- __ACTION_RULE__ on $(xml-file) =
- build-system.out-xml.generate-action ;
- out-xml.generate $(xml-file) ;
- }
-
- # The actual build actions are here; if we did this work in the
- # actions clause we would have to form a valid command line
- # containing the result of @(...) below (the name of the XML file).
- rule out-xml.generate-action (
- xml-file args * :
- status : user : system : command : output ? )
- {
- local contents =
- [ on $(xml-file) return $(.header) $(.contents) $(.footer) ] ;
- local f = @($(xml-file):E=$(contents)) ;
- }
-
- # Nothing to do here; the *real* actions happen in
- # out-xml.generate-action
- actions quietly out-xml.generate { }
-
- # Define the out-xml file target, which depends on all the targets
- # so that it runs the collection after the targets have run.
- out-xml $(.out-xml) : $(actual-targets) ;
-
- # Set up a global __ACTION_RULE__ that records all the available
- # statistics about each actual target in a variable "on" the
- # --out-xml target.
- rule out-xml.collect (
- xml-file target :
- status : user : system : command : output ? )
- {
- local nl = "
-" ;
- # Open the action with some basic info.
- .contents on $(xml-file) +=
- "$(nl) <action status=\"$(status)\" user=\"$(user)\" system=\"$(system)\">"
- ;
-
- # If we have an action object we can print out more detailed info.
- local action = [ on $(target) return $(.action) ] ;
- if $(action)
- {
- local action-name = [ $(action).action-name ] ;
- local action-sources = [ $(action).sources ] ;
- local action-props = [ $(action).properties ] ;
-
- # The qualified name of the action which we created the target.
- .contents on $(xml-file) +=
- "$(nl) <name><![CDATA[$(action-name)]]></name>"
- ;
-
- # The sources that made up the target.
- .contents on $(xml-file) +=
- "$(nl) <sources>"
- ;
- for local source in $(action-sources)
- {
- local source-actual = [ $(source).actual-name ] ;
+ "$(nl) <properties>" ;
+ for local prop in [ $(action-props).raw ]
+ {
+ local prop-name = [ MATCH ^<(.*)>$ : $(prop:G) ] ;
+ .contents on $(xml-file) +=
+ "$(nl) <property name=\"$(prop-name)\"><![CDATA[$(prop:G=)]]></property>" ;
+ }
.contents on $(xml-file) +=
- "$(nl) <source><![CDATA[$(source-actual)]]></source>"
- ;
+ "$(nl) </properties>" ;
}
+
+ local locate = [ on $(target) return $(LOCATE) ] ;
+ locate ?= "" ;
.contents on $(xml-file) +=
- "$(nl) </sources>"
- ;
-
- # The properties that define the conditions under which the
- # target was built.
+ "$(nl) <jam-target><![CDATA[$(target)]]></jam-target>"
+ "$(nl) <path><![CDATA[$(target:G=:R=$(locate))]]></path>"
+ "$(nl) <command><![CDATA[$(command)]]></command>"
+ "$(nl) <output><![CDATA[$(output)]]></output>" ;
.contents on $(xml-file) +=
- "$(nl) <properties>"
- ;
- for local prop in [ $(action-props).raw ]
- {
- local prop-name = [ MATCH ^<(.*)>$ : $(prop:G) ] ;
- .contents on $(xml-file) +=
- "$(nl) <property name=\"$(prop-name)\"><![CDATA[$(prop:G=)]]></property>"
- ;
- }
- .contents on $(xml-file) +=
- "$(nl) </properties>"
- ;
+ "$(nl) </action>" ;
}
-
- local locate = [ on $(target) return $(LOCATE) ] ;
- locate ?= "" ;
- .contents on $(xml-file) +=
- "$(nl) <jam-target><![CDATA[$(target)]]></jam-target>"
- "$(nl) <path><![CDATA[$(target:G=:R=$(locate))]]></path>"
- "$(nl) <command><![CDATA[$(command)]]></command>"
- "$(nl) <output><![CDATA[$(output)]]></output>"
- ;
- .contents on $(xml-file) +=
- "$(nl) </action>"
- ;
- }
- # When no __ACTION_RULE__ is set "on" a target, the search falls
- # back to the global module
- module
- {
- __ACTION_RULE__ = build-system.out-xml.collect
- [ modules.peek build-system : .out-xml ] ;
+ # When no __ACTION_RULE__ is set "on" a target, the search falls back to
+ # the global module.
+ module
+ {
+ __ACTION_RULE__ = build-system.out-xml.collect
+ [ modules.peek build-system : .out-xml ] ;
+ }
}
-}
-NOTFILE all ;
-DEPENDS all : $(actual-targets) ;
-if $(bjam-targets)
-{
- UPDATE $(bjam-targets:G=e) $(.out-xml) ;
-}
-else if $(cleanall)
-{
- UPDATE clean-all ;
-}
-else if $(clean)
-{
- local to-clean ;
- for local t in [ virtual-target.all-targets ]
- {
- local p = [ $(t).project ] ;
+ # TODO: See if this 'NOTFILE all' statement can be moved below to the
+ # default case where the 'all' target is actually requested to be built.
+ # Check for other Jam scripts manually setting a dependency for this target.
+ NOTFILE all ;
- # Remove only derived targets.
- if [ $(t).action ]
- {
- if $(t) in $(targets-to-clean)
- || [ is-child [ $(p).project-module ] ] = true
- {
- to-clean += $(t) ;
- }
- }
+
+ # And now that all the actual raw Jam targets and all the dependencies
+ # between them have been prepared (or we have everything set so we can
+ # easily prepare them) all that is left is to tell Jam to update those
+ # targets.
+ if $(bjam-targets)
+ {
+ UPDATE $(bjam-targets:G=e) $(.out-xml) ;
}
- local to-clean-actual ;
- for local t in $(to-clean)
+ else if $(cleanall)
{
- to-clean-actual += [ $(t).actualize ] ;
+ UPDATE clean-all ;
+ }
+ else if $(clean)
+ {
+ common.Clean clean : [ actual-clean-targets ] ;
+ UPDATE clean ;
+ }
+ else
+ {
+ DEPENDS all : $(actual-targets) ;
+ UPDATE all $(.out-xml) ;
}
- common.Clean clean : $(to-clean-actual) ;
- UPDATE clean ;
-}
-else
-{
- UPDATE all $(.out-xml) ;
}
Modified: branches/release/tools/build/v2/build/alias.jam
==============================================================================
--- branches/release/tools/build/v2/build/alias.jam (original)
+++ branches/release/tools/build/v2/build/alias.jam 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -1,72 +1,69 @@
-# Copyright 2003, 2004, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+# Copyright 2003, 2004, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-# This module defines the 'alias' rule and associated class.
+# This module defines the 'alias' rule and the associated target class.
#
-# Alias is just a main target which returns its source targets without any
-# processing. For example::
+# Alias is just a main target which returns its source targets without any
+# processing. For example:
#
# alias bin : hello test_hello ;
# alias lib : helpers xml_parser ;
#
-# Another important use of 'alias' is to conveniently group source files::
+# Another important use of 'alias' is to conveniently group source files:
#
# alias platform-src : win.cpp : <os>NT ;
# alias platform-src : linux.cpp : <os>LINUX ;
# exe main : main.cpp platform-src ;
-#
+#
# Lastly, it's possible to create local alias for some target, with different
# properties::
#
# alias big_lib : : @/external_project/big_lib/<link>static ;
#
-import targets ;
+
import "class" : new ;
-import property ;
-import errors : error ;
-import type : type ;
-import regex ;
import project ;
import property-set ;
+import targets ;
+
-class alias-target-class : basic-target
+class alias-target-class : basic-target
{
- rule __init__ ( name : project : sources * : requirements *
+ rule __init__ ( name : project : sources * : requirements *
: default-build * : usage-requirements * )
{
- basic-target.__init__ $(name) : $(project) : $(sources) : $(requirements)
- : $(default-build) : $(usage-requirements) ;
+ basic-target.__init__ $(name) : $(project) : $(sources) : $(requirements)
+ : $(default-build) : $(usage-requirements) ;
}
-
+
rule construct ( name : source-targets * : property-set )
{
return [ property-set.empty ] $(source-targets) ;
- }
-
- rule compute-usage-requirements ( subvariant )
+ }
+
+ rule compute-usage-requirements ( subvariant )
{
local base = [ basic-target.compute-usage-requirements $(subvariant) ] ;
# Add source's usage requirement. If we don't do this, "alias" does not
# look like 100% alias.
return [ $(base).add [ $(subvariant).sources-usage-requirements ] ] ;
}
-
}
# Declares the 'alias' target. It will build sources, and return them unaltered.
rule alias ( name : sources * : requirements * : default-build * : usage-requirements * )
{
local project = [ project.current ] ;
-
+
targets.main-target-alternative
- [ new alias-target-class $(name) : $(project)
- : [ targets.main-target-sources $(sources) : $(name) : no-renaming ]
- : [ targets.main-target-requirements $(requirements) : $(project) ]
- : [ targets.main-target-default-build $(default-build) : $(project) ]
- : [ targets.main-target-usage-requirements $(usage-requirements) : $(project) ]
- ] ;
+ [ new alias-target-class $(name) : $(project)
+ : [ targets.main-target-sources $(sources) : $(name) : no-renaming ]
+ : [ targets.main-target-requirements $(requirements) : $(project) ]
+ : [ targets.main-target-default-build $(default-build) : $(project) ]
+ : [ targets.main-target-usage-requirements $(usage-requirements) : $(project) ]
+ ] ;
}
IMPORT $(__name__) : alias : : alias ;
Modified: branches/release/tools/build/v2/build/build-request.jam
==============================================================================
--- branches/release/tools/build/v2/build/build-request.jam (original)
+++ branches/release/tools/build/v2/build/build-request.jam 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -2,16 +2,16 @@
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+import "class" : new ;
import sequence ;
import set ;
import regex ;
import feature ;
import property ;
-import numbers ;
import container ;
-import "class" : new ;
import string ;
+
# Transform property-set by applying f to each component property.
local rule apply-to-property-set ( f property-set )
{
@@ -19,22 +19,25 @@
return [ string.join [ $(f) $(properties) ] : / ] ;
}
-# expand the given build request by combining all property-sets which don't
-# specify conflicting non-free features.
+
+# Expand the given build request by combining all property-sets which don't
+# specify conflicting non-free features. Expects all the project files to
+# already be loaded.
rule expand-no-defaults ( property-sets * )
{
- # First make all features and subfeatures explicit
- local expanded-property-sets = [
- sequence.transform apply-to-property-set feature.expand-subfeatures
- : $(property-sets) ] ;
-
+ # First make all features and subfeatures explicit.
+ local expanded-property-sets = [ sequence.transform apply-to-property-set
+ feature.expand-subfeatures : $(property-sets) ] ;
+
# Now combine all of the expanded property-sets
local product = [ x-product $(expanded-property-sets) : $(feature-space) ] ;
-
+
return $(product) ;
}
-# implementaiton of x-product, below
+
+# Implementation of x-product, below. Expects all the project files to already
+# be loaded.
local rule x-product-aux ( property-sets + )
{
local result ;
@@ -46,58 +49,60 @@
{
local x-product-seen ;
{
- # don't mix in any conflicting features
+ # Don't mix in any conflicting features.
local x-product-used = $(x-product-used) $(f) ;
-
+
if $(property-sets[2])
{
local rest = [ x-product-aux $(property-sets[2-]) : $(feature-space) ] ;
result = $(property-sets[1])/$(rest) ;
}
-
+
result ?= $(property-sets[1]) ;
}
-
- # If we didn't encounter a conflicting feature lower down,
- # don't recurse again.
+
+ # If we didn't encounter a conflicting feature lower down, don't recurse
+ # again.
if ! [ set.intersection $(f) : $(x-product-seen) ]
{
property-sets = ;
}
-
+
seen = $(x-product-seen) ;
}
-
+
if $(property-sets[2])
{
result += [ x-product-aux $(property-sets[2-]) : $(feature-space) ] ;
}
-
- # Note that we've seen these features so that higher levels will
- # recurse again without them set.
+
+ # Note that we've seen these features so that higher levels will recurse
+ # again without them set.
x-product-seen += $(f) $(seen) ;
return $(result) ;
}
-# Return the cross-product of all elements of property-sets, less any
-# that would contain conflicting values for single-valued features.
+
+# Return the cross-product of all elements of property-sets, less any that would
+# contain conflicting values for single-valued features. Expects all the project
+# files to already be loaded.
local rule x-product ( property-sets * )
{
if $(property-sets).non-empty
{
- # prepare some "scoped globals" that can be used by the
- # implementation function, x-product-aux.
+ # Prepare some "scoped globals" that can be used by the implementation
+ # function, x-product-aux.
local x-product-seen x-product-used ;
return [ x-product-aux $(property-sets) : $(feature-space) ] ;
}
- # otherwise return empty
+ # Otherwise return empty.
}
-# Returns true if 'v' is either implicit value, or
-# the part before the first '-' symbol is implicit value
+
+# Returns true if either 'v' or the part of 'v' before the first '-' symbol is
+# an implicit value. Expects all the project files to already be loaded.
local rule looks-like-implicit-value ( v )
{
-
if [ feature.is-implicit-value $(v) ]
{
return true ;
@@ -108,16 +113,16 @@
if [ feature.is-implicit-value $(split[1]) ]
{
return true ;
- }
+ }
}
}
-# Takes the command line tokens (such as taken from ARGV rule) and constructs
-# build request from it.
-# Returns a vector of two vectors (where "vector" means container.jam's "vector").
-# First is the set of targets specified in the command line, and second is
-# the set of requested build properties.
+# Takes the command line tokens (such as taken from the ARGV rule) and
+# constructs a build request from them. Returns a vector of two vectors (where
+# "vector" means container.jam's "vector"). First is the set of targets
+# specified in the command line, and second is the set of requested build
+# properties. Expects all the project files to already be loaded.
rule from-command-line ( command-line * )
{
local targets ;
@@ -131,15 +136,16 @@
{
skip-next = ;
}
- else if ! [ MATCH "^(-).*" : $(e) ]
+ else if ! [ MATCH "^(-).*" : $(e) ]
{
- # Build request spec either has "=" in it, or completely
- # consists of implicit feature values.
+ # Build request spec either has "=" in it or completely consists of
+ # implicit feature values.
local fs = feature-space ;
- if [ MATCH "(.*=.*)" : $(e) ]
- || [ looks-like-implicit-value $(e:D=) : $(feature-space) ]
+ if [ MATCH "(.*=.*)" : $(e) ]
+ || [ looks-like-implicit-value $(e:D=) : $(feature-space) ]
{
- properties += [ convert-command-line-element $(e) : $(feature-space) ] ;
+ properties += [ convert-command-line-element $(e)
+ : $(feature-space) ] ;
}
else
{
@@ -151,24 +157,27 @@
skip-next = true ;
}
}
- return [ new vector [ new vector $(targets) ] [ new vector $(properties) ] ] ;
+ return [ new vector
+ [ new vector $(targets) ]
+ [ new vector $(properties) ] ] ;
}
-# Converts one element of command line build request specification into
-# internal form.
+
+# Converts one element of command line build request specification into internal
+# form. Expects all the project files to already be loaded.
local rule convert-command-line-element ( e )
{
local result ;
local parts = [ regex.split $(e) "/" ] ;
- for local p in $(parts)
+ for local p in $(parts)
{
local m = [ MATCH "([^=]*)=(.*)" : $(p) ] ;
local lresult ;
- if $(m)
+ if $(m)
{
local feature = $(m[1]) ;
- local values = [ regex.split $(m[2]) "," ] ;
- lresult = <$(feature)>$(values) ;
+ local values = [ regex.split $(m[2]) "," ] ;
+ lresult = <$(feature)>$(values) ;
}
else
{
@@ -176,17 +185,16 @@
}
if ! [ MATCH (.*-.*) : $(p) ]
- {
- # property.validate cannot handle subfeatures,
- # so we avoid the check here.
+ {
+ # property.validate cannot handle subfeatures, so we avoid the check
+ # here.
for local p in $(lresult)
{
property.validate $(p) : $(feature-space) ;
}
}
-
- if ! $(result)
+ if ! $(result)
{
result = $(lresult) ;
}
@@ -194,17 +202,19 @@
{
result = $(result)/$(lresult) ;
}
- }
-
+ }
+
return $(result) ;
}
+
rule __test__ ( )
{
- import assert feature ;
-
+ import assert ;
+ import feature ;
+
feature.prepare-test build-request-test-temp ;
-
+
import build-request ;
import build-request : expand-no-defaults : build-request.expand-no-defaults ;
import errors : try catch ;
@@ -222,56 +232,45 @@
feature runtime-link : dynamic static : symmetric ;
- # empty build requests should expand to empty.
+ # Empty build requests should expand to empty.
assert.result
- : build-request.expand-no-defaults
- ;
+ : build-request.expand-no-defaults ;
assert.result
- <toolset>gcc/<toolset-gcc:version>3.0.1/<stdlib>stlport/<variant>debug
- <toolset>msvc/<stdlib>stlport/<variant>debug
- <toolset>msvc/<variant>debug
-
- : build-request.expand-no-defaults gcc-3.0.1/stlport msvc/stlport msvc debug
- ;
+ <toolset>gcc/<toolset-gcc:version>3.0.1/<stdlib>stlport/<variant>debug
+ <toolset>msvc/<stdlib>stlport/<variant>debug
+ <toolset>msvc/<variant>debug
+ : build-request.expand-no-defaults gcc-3.0.1/stlport msvc/stlport msvc debug ;
assert.result
- <toolset>gcc/<toolset-gcc:version>3.0.1/<stdlib>stlport/<variant>debug
- <toolset>msvc/<variant>debug
- <variant>debug/<toolset>msvc/<stdlib>stlport
-
- : build-request.expand-no-defaults gcc-3.0.1/stlport msvc debug msvc/stlport
- ;
+ <toolset>gcc/<toolset-gcc:version>3.0.1/<stdlib>stlport/<variant>debug
+ <toolset>msvc/<variant>debug
+ <variant>debug/<toolset>msvc/<stdlib>stlport
+ : build-request.expand-no-defaults gcc-3.0.1/stlport msvc debug msvc/stlport ;
assert.result
- <toolset>gcc/<toolset-gcc:version>3.0.1/<stdlib>stlport/<variant>debug/<inlining>off
- <toolset>gcc/<toolset-gcc:version>3.0.1/<stdlib>stlport/<variant>release/<inlining>off
-
- : build-request.expand-no-defaults gcc-3.0.1/stlport debug release <inlining>off
- ;
+ <toolset>gcc/<toolset-gcc:version>3.0.1/<stdlib>stlport/<variant>debug/<inlining>off
+ <toolset>gcc/<toolset-gcc:version>3.0.1/<stdlib>stlport/<variant>release/<inlining>off
+ : build-request.expand-no-defaults gcc-3.0.1/stlport debug release <inlining>off ;
assert.result
- <include>a/b/c/<toolset>gcc/<toolset-gcc:version>3.0.1/<stdlib>stlport/<variant>debug/<include>x/y/z
- <include>a/b/c/<toolset>msvc/<stdlib>stlport/<variant>debug/<include>x/y/z
- <include>a/b/c/<toolset>msvc/<variant>debug/<include>x/y/z
-
- : build-request.expand-no-defaults <include>a/b/c gcc-3.0.1/stlport msvc/stlport msvc debug <include>x/y/z
- ;
+ <include>a/b/c/<toolset>gcc/<toolset-gcc:version>3.0.1/<stdlib>stlport/<variant>debug/<include>x/y/z
+ <include>a/b/c/<toolset>msvc/<stdlib>stlport/<variant>debug/<include>x/y/z
+ <include>a/b/c/<toolset>msvc/<variant>debug/<include>x/y/z
+ : build-request.expand-no-defaults <include>a/b/c gcc-3.0.1/stlport msvc/stlport msvc debug <include>x/y/z ;
local r ;
- r = [ build-request.from-command-line bjam debug runtime-link=dynamic ] ;
+ r = [ build-request.from-command-line bjam debug runtime-link=dynamic ] ;
assert.equal [ $(r).get-at 1 ] : ;
assert.equal [ $(r).get-at 2 ] : debug <runtime-link>dynamic ;
try ;
{
-
build-request.from-command-line bjam gcc/debug runtime-link=dynamic/static ;
}
catch \"static\" is not a value of an implicit feature ;
-
r = [ build-request.from-command-line bjam -d2 --debug debug target runtime-link=dynamic ] ;
assert.equal [ $(r).get-at 1 ] : target ;
assert.equal [ $(r).get-at 2 ] : debug <runtime-link>dynamic ;
@@ -282,13 +281,13 @@
r = [ build-request.from-command-line bjam debug gcc/runtime-link=dynamic,static ] ;
assert.equal [ $(r).get-at 1 ] : ;
- assert.equal [ $(r).get-at 2 ] : debug gcc/<runtime-link>dynamic
- gcc/<runtime-link>static ;
+ assert.equal [ $(r).get-at 2 ] : debug gcc/<runtime-link>dynamic
+ gcc/<runtime-link>static ;
r = [ build-request.from-command-line bjam msvc gcc,borland/runtime-link=static ] ;
assert.equal [ $(r).get-at 1 ] : ;
- assert.equal [ $(r).get-at 2 ] : msvc gcc/<runtime-link>static
- borland/<runtime-link>static ;
+ assert.equal [ $(r).get-at 2 ] : msvc gcc/<runtime-link>static
+ borland/<runtime-link>static ;
r = [ build-request.from-command-line bjam gcc-3.0 ] ;
assert.equal [ $(r).get-at 1 ] : ;
@@ -296,5 +295,3 @@
feature.finish-test build-request-test-temp ;
}
-
-
Modified: branches/release/tools/build/v2/build/feature.jam
==============================================================================
--- branches/release/tools/build/v2/build/feature.jam (original)
+++ branches/release/tools/build/v2/build/feature.jam 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -1,52 +1,53 @@
-# Copyright 2001, 2002, 2003 Dave Abrahams
-# Copyright 2002, 2006 Rene Rivera
-# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+# Copyright 2001, 2002, 2003 Dave Abrahams
+# Copyright 2002, 2006 Rene Rivera
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+import assert : * ;
import "class" : * ;
-
-import errors : error lol->list ;
-import sequence ;
+import errors : lol->list ;
+import indirect ;
+import modules ;
import regex ;
+import sequence ;
import set ;
import utility ;
-import modules indirect ;
-import assert : * ;
+
local rule setup ( )
{
.all-attributes =
-
- implicit
- executed
- composite
- optional
- symmetric
- free
- incidental
- path
- dependency
- propagated
- link-incompatible
- subfeature
- order-sensitive
- ;
+ implicit
+ executed
+ composite
+ optional
+ symmetric
+ free
+ incidental
+ path
+ dependency
+ propagated
+ link-incompatible
+ subfeature
+ order-sensitive
+ ;
.all-features = ;
- .all-subfeatures = ; # non-subfeatures
- .all-top-features = ; # non-subfeatures
+ .all-subfeatures = ;
+ .all-top-features = ; # non-subfeatures
.all-implicit-values = ;
}
setup ;
-# prepare a fresh space to test in by moving all global variable
-# settings into the given temporary module and erasing them here.
+
+# Prepare a fresh space to test in by moving all global variable settings into
+# the given temporary module and erasing them here.
rule prepare-test ( temp-module )
{
DELETE_MODULE $(temp-module) ;
-
- # transfer globals to temp-module
+
+ # Transfer globals to temp-module.
for local v in [ VARNAMES feature ]
{
if [ MATCH (\\.) : $(v) ]
@@ -58,11 +59,12 @@
setup ;
}
-# clear out all global variables and recover all variables from the
-# given temporary module
+
+# Clear out all global variables and recover all variables from the given
+# temporary module.
rule finish-test ( temp-module )
{
- # clear globals
+ # Clear globals.
for local v in [ VARNAMES feature ]
{
if [ MATCH (\\.) : $(v) ]
@@ -70,7 +72,7 @@
$(v) = ;
}
}
-
+
for local v in [ VARNAMES $(temp-module) ]
{
$(v) = [ modules.peek $(temp-module) : $(v) ] ;
@@ -79,33 +81,31 @@
}
-# Transform features by bracketing any elements which aren't already
-# bracketed by "<>"
+# Transform features by bracketing any elements which aren't already bracketed
+# by "<>".
local rule grist ( features * )
{
local empty = "" ;
- local r = $(empty:G=$(features)) ;
- return $(r) ;
+ return $(empty:G=$(features)) ;
}
-empty = "" ;
-# declare a new feature with the given name, values, and attributes.
-rule feature (
- name # feature name
- : values * # the allowable values - may be extended later with feature.extend
- : attributes * # The feature's attributes (e.g. implicit, free, propagated...)
+# Declare a new feature with the given name, values, and attributes.
+rule feature (
+ name # Feature name.
+ : values * # Allowable values - may be extended later using feature.extend.
+ : attributes * # Feature attributes (e.g. implicit, free, propagated...).
)
{
name = [ grist $(name) ] ;
local error ;
- # if there are any unknown attributes...
+ # Check for any unknown attributes.
if ! ( $(attributes) in $(.all-attributes) )
{
error = unknown attributes:
- [ set.difference $(attributes) : $(.all-attributes) ] ;
+ [ set.difference $(attributes) : $(.all-attributes) ] ;
}
else if $(name) in $(.all-features)
{
@@ -116,16 +116,15 @@
error = free features cannot also be implicit ;
}
else if free in $(attributes) && propagated in $(attributes)
- {
+ {
error = free features cannot be propagated ;
- }
-
+ }
if $(error)
{
- error $(error)
- : "in" feature declaration:
- : feature [ lol->list $(1) : $(2) : $(3) ] ;
+ errors.error $(error)
+ : "in" feature declaration:
+ : feature [ lol->list $(1) : $(2) : $(3) ] ;
}
$(name).values ?= ;
@@ -142,24 +141,24 @@
{
.all-top-features += $(name) ;
}
- extend $(name) : $(values) ;
+ extend $(name) : $(values) ;
}
-# set default value of the given feature, overriding any previous
-# default.
+
+# Sets the default value of the given feature, overriding any previous default.
rule set-default ( feature : value )
{
local f = [ grist $(feature) ] ;
- if ! $(value) in $($(f).values)
+ if ! $(value) in $($(f).values)
{
- errors.error "The specified default value, '$(value)' is invalid"
+ errors.error "The specified default value, '$(value)' is invalid"
: "allowed values are: " $($(f).values) ;
- }
+ }
$(f).default = $(value) ;
}
-# return the default property values for the given features.
+# Returns the default property values for the given features.
rule defaults ( features * )
{
local result ;
@@ -178,7 +177,8 @@
return $(result) ;
}
-# returns true iff all elements of names are valid features.
+
+# Returns true iff all 'names' elements are valid features.
rule valid ( names + )
{
if $(names) in $(.all-features)
@@ -187,31 +187,33 @@
}
}
-# return the attibutes of the given feature
+
+# Returns the attibutes of the given feature.
rule attributes ( feature )
{
return $($(:E=:G=$(feature)).attributes) ;
}
-# return the values of the given feature
+
+# Returns the values of the given feature.
rule values ( feature )
{
return $($(:E=:G=$(feature)).values) ;
}
-# returns true iff 'value-string' is a value-string of an implicit feature
+
+# Returns true iff 'value-string' is a value-string of an implicit feature.
rule is-implicit-value ( value-string )
{
local v = [ regex.split $(value-string) - ] ;
local failed ;
- if ! $(v[1]) in $(.all-implicit-values)
+ if ! $(v[1]) in $(.all-implicit-values)
{
failed = true ;
}
- else
+ else
{
local feature = $($(v[1]).implicit-feature) ;
-
for local subvalue in $(v[2-])
{
if ! [ find-implied-subfeature $(feature) $(subvalue) : $(v[1]) ]
@@ -220,82 +222,87 @@
}
}
}
-
- if ! $(failed)
+
+ if ! $(failed)
{
return true ;
}
}
-# return the implicit feature associated with the given implicit value.
+
+# Returns the implicit feature associated with the given implicit value.
rule implied-feature ( implicit-value )
{
local components = [ regex.split $(implicit-value) "-" ] ;
-
+
local feature = $($(components[1]).implicit-feature) ;
if ! $(feature)
{
- error \"$(implicit-value)\" is not a value of an implicit feature ;
- feature = "" ; # keep testing happy; it expects a result.
+ errors.error \"$(implicit-value)\" is not a value of an implicit feature ;
+ feature = "" ; # Keep testing happy; it expects a result.
}
return $(feature) ;
}
+
local rule find-implied-subfeature ( feature subvalue : value-string ? )
{
- # feature should be of the form <feature-name>
+ # Feature should be of the form <feature-name>.
if $(feature) != $(feature:G)
{
- error invalid feature $(feature) ;
+ errors.error invalid feature $(feature) ;
}
return $($(feature)$(value-string:E="")<>$(subvalue).subfeature) ;
}
-# Given a feature and a value of one of its subfeatures, find the name
-# of the subfeature. If value-string is supplied, looks for implied
-# subfeatures that are specific to that value of feature
-rule implied-subfeature (
- feature # The main feature name
- subvalue # The value of one of its subfeatures
- : value-string ? # The value of the main feature
+
+# Given a feature and a value of one of its subfeatures, find the name of the
+# subfeature. If value-string is supplied, looks for implied subfeatures that
+# are specific to that value of feature
+rule implied-subfeature (
+ feature # The main feature name.
+ subvalue # The value of one of its subfeatures.
+ : value-string ? # The value of the main feature.
)
{
local subfeature = [ find-implied-subfeature $(feature) $(subvalue)
- : $(value-string) ] ;
-
+ : $(value-string) ] ;
if ! $(subfeature)
{
value-string ?= "" ;
- error \"$(subvalue)\" is not a known subfeature value of
- $(feature)$(value-string) ;
+ errors.error \"$(subvalue)\" is not a known subfeature value of
+ $(feature)$(value-string) ;
}
-
return $(subfeature) ;
}
-# generate an error if the feature is unknown
+
+# Generate an error if the feature is unknown.
local rule validate-feature ( feature )
{
if ! $(feature) in $(.all-features)
{
- error unknown feature \"$(feature)\" ;
+ errors.error unknown feature \"$(feature)\" ;
}
}
-# Given a feature and value, or just a value corresponding to an
-# implicit feature, returns a property set consisting of all component
-# subfeatures and their values. For example:
+
+# Given a feature and its value or just a value corresponding to an implicit
+# feature, returns a property set consisting of all component subfeatures and
+# their values. For example all the following calls:
+#
+# expand-subfeatures-aux <toolset>gcc-2.95.2-linux-x86
+# expand-subfeatures-aux gcc-2.95.2-linux-x86
#
-# expand-subfeatures <toolset>gcc-2.95.2-linux-x86
-# -> <toolset>gcc <toolset-version>2.95.2 <toolset-os>linux <toolset-cpu>x86
+# return:
#
-# equivalent to:
-# expand-subfeatures gcc-2.95.2-linux-x86
-local rule expand-subfeatures-aux (
- feature ? # The name of the feature, or empty if value corresponds to an implicit property
- : value # The value of the feature.
- : dont-validate ? # If set, no validation of value string will be done
+# <toolset>gcc <toolset-version>2.95.2 <toolset-os>linux <toolset-cpu>x86
+local rule expand-subfeatures-aux (
+ feature ? # Feature name or empty if value corresponds to an
+ # implicit property.
+ : value # Feature value.
+ : dont-validate ? # If set, no value string validation will be done.
)
{
if $(feature)
@@ -312,31 +319,31 @@
validate-feature $(feature) ;
}
if ! $(dont-validate)
- {
+ {
validate-value-string $(feature) $(value) ;
}
-
+
local components = [ regex.split $(value) "-" ] ;
-
- # get the top-level feature's value
+
+ # Get the top-level feature's value.
local value = $(components[1]:G=) ;
local result = $(components[1]:G=$(feature)) ;
-
+
local subvalues = $(components[2-]) ;
while $(subvalues)
{
- local subvalue = $(subvalues[1]) ; # pop the head off of subvalues
+ local subvalue = $(subvalues[1]) ; # Pop the head off of subvalues.
subvalues = $(subvalues[2-]) ;
-
+
local subfeature = [ find-implied-subfeature $(feature) $(subvalue) : $(value) ] ;
-
- # If no subfeature was found, reconstitute the value string and use that
+
+ # If no subfeature was found reconstitute the value string and use that.
if ! $(subfeature)
{
result = $(components:J=-) ;
result = $(result:G=$(feature)) ;
- subvalues = ; # stop looping
+ subvalues = ; # Stop looping.
}
else
{
@@ -344,25 +351,27 @@
result += $(subvalue:G=$(f)-$(subfeature)) ;
}
}
-
+
return $(result) ;
}
-# Make all elements of properties corresponding to implicit features
-# explicit, and express all subfeature values as separate properties
-# in their own right. For example, the property
+
+# Make all elements of properties corresponding to implicit features explicit,
+# and express all subfeature values as separate properties in their own right.
+# For example, all of the following properties
#
# gcc-2.95.2-linux-x86
+# <toolset>gcc-2.95.2-linux-x86
#
# might expand to
#
# <toolset>gcc <toolset-version>2.95.2 <toolset-os>linux <toolset-cpu>x86
#
-rule expand-subfeatures (
- properties * # property set with elements of the form
- # <feature>value-string or just value-string in the
- # case of implicit features.
- : dont-validate ?
+rule expand-subfeatures (
+ properties * # Property set with elements of the form
+ # <feature>value-string or just value-string in the case
+ # of implicit features.
+ : dont-validate ?
)
{
local result ;
@@ -370,17 +379,18 @@
{
# Don't expand subfeatures in subfeatures
if ! [ MATCH "(:)" : $(p:G) ]
- {
+ {
result += [ expand-subfeatures-aux $(p:G) : $(p:G=) : $(dont-validate) ] ;
}
else
{
result += $(p) ;
- }
+ }
}
return $(result) ;
}
+
# Helper for extend, below. Handles the feature case.
local rule extend-feature ( feature : values * )
{
@@ -392,7 +402,7 @@
{
if $($(v).implicit-feature)
{
- error $(v) is already associated with the \"$($(v).implicit-feature)\" feature ;
+ errors.error $(v) is already associated with the \"$($(v).implicit-feature)\" feature ;
}
$(v).implicit-feature = $(feature) ;
}
@@ -404,20 +414,21 @@
# This is the first value specified for this feature,
# take it as default value
$(feature).default = $(values[1]) ;
- }
+ }
$(feature).values += $(values) ;
}
+
# Checks that value-string is a valid value-string for the given feature.
rule validate-value-string ( feature value-string )
-{
- if ! (
- free in $($(feature).attributes)
- || ( $(value-string) in $(feature).values )
+{
+ if ! (
+ free in $($(feature).attributes)
+ || ( $(value-string) in $(feature).values )
)
{
local values = $(value-string) ;
-
+
if $($(feature).subfeatures)
{
values = [ regex.split $(value-string) - ] ;
@@ -425,38 +436,33 @@
if ! ( $(values[1]) in $($(feature).values) ) &&
- # An empty value is allowed for optional features
- ( $(values[1]) || ! ( optional in $($(feature).attributes) ) )
+ # An empty value is allowed for optional features.
+ ( $(values[1]) || ! ( optional in $($(feature).attributes) ) )
{
- error \"$(values[1])\" is not a known value of feature $(feature)
- : legal values: \"$($(feature).values)\" ;
+ errors.error \"$(values[1])\" is not a known value of feature $(feature)
+ : legal values: \"$($(feature).values)\" ;
}
for local v in $(values[2-])
{
- # this will validate any subfeature values in value-string
+ # This will validate any subfeature values in value-string.
implied-subfeature $(feature) $(v) : $(values[1]) ;
}
}
}
+
# A helper that computes:
-# * the name(s) of the module-local variable(s) used to record the
-# correspondence between subvalue(s) and a subfeature
-#
-# * the value of that variable when such a subfeature/subvalue has
-# been defined
-#
-# Returns a list consisting of the latter followed by the former
-local rule subvalue-var (
- feature # Main feature name
-
- value-string ? # If supplied, specifies a specific value of the
- # main feature for which the subfeature values
- # are valid
-
- : subfeature # The name of the subfeature
- : subvalues * # The subfeature values
+# * name(s) of module-local variable(s) used to record the correspondence
+# between subvalue(s) and a subfeature
+# * value of that variable when such a subfeature/subvalue has been defined and
+# returns a list consisting of the latter followed by the former.
+local rule subvalue-var (
+ feature # Main feature name.
+ value-string ? # If supplied, specifies a specific value of the main
+ # feature for which the subfeature values are valid.
+ : subfeature # Subfeature name.
+ : subvalues * # Subfeature values.
)
{
feature = [ grist $(feature) ] ;
@@ -472,68 +478,68 @@
$(feature)$(value-string:E="")<>$(subvalues).subfeature ;
}
-# Extends the given subfeature with the subvalues. If the optional
-# value-string is provided, the subvalues are only valid for the given
-# value of the feature. Thus, you could say that
-# <target-platform>mingw is specifc to <toolset>gcc-2.95.2 as follows:
+
+# Extends the given subfeature with the subvalues. If the optional value-string
+# is provided, the subvalues are only valid for the given value of the feature.
+# Thus, you could say that <target-platform>mingw is specific to
+# <toolset>gcc-2.95.2 as follows:
#
# extend-subfeature toolset gcc-2.95.2 : target-platform : mingw ;
#
-rule extend-subfeature (
- feature # The feature whose subfeature is being extended
-
- value-string ? # If supplied, specifies a specific value of the
- # main feature for which the new subfeature values
- # are valid
-
- : subfeature # The name of the subfeature
- : subvalues * # The additional values of the subfeature being defined.
+rule extend-subfeature (
+ feature # The feature whose subfeature is being extended.
+
+ value-string ? # If supplied, specifies a specific value of the main
+ # feature for which the new subfeature values are valid.
+
+ : subfeature # Subfeature name.
+ : subvalues * # Additional subfeature values.
)
{
- local subfeature-vars = [
- subvalue-var $(feature) $(value-string) : $(subfeature) : $(subvalues) ] ;
-
+ local subfeature-vars = [ subvalue-var $(feature) $(value-string)
+ : $(subfeature) : $(subvalues) ] ;
+
local f = [ utility.ungrist [ grist $(feature) ] ] ;
extend $(f)-$(subfeature-vars[1]) : $(subvalues) ;
-
- # provide a way to get from the given feature or property and
- # subfeature value to the subfeature name.
+
+ # Provide a way to get from the given feature or property and subfeature
+ # value to the subfeature name.
$(subfeature-vars[2-]) = $(subfeature-vars[1]) ;
}
-# Returns true iff the subvalues are valid for the feature. When the
-# optional value-string is provided, returns true iff the subvalues
-# are valid for the given value of the feature.
+
+# Returns true iff the subvalues are valid for the feature. When the optional
+# value-string is provided, returns true iff the subvalues are valid for the
+# given value of the feature.
rule is-subvalue ( feature : value-string ? : subfeature : subvalue )
{
- local subfeature-vars = [
- subvalue-var $(feature) $(value-string) : $(subfeature) : $(subvalue) ] ;
-
- if $($(subfeature-vars[2])) = $(subfeature-vars[1])
+ local subfeature-vars = [ subvalue-var $(feature) $(value-string)
+ : $(subfeature) : $(subvalue) ] ;
+
+ if $($(subfeature-vars[2])) = $(subfeature-vars[1])
{
return true ;
}
}
+
# Can be called three ways:
#
# 1. extend feature : values *
# 2. extend <feature> subfeature : values *
# 3. extend <feature>value-string subfeature : values *
#
-# * Form 1 adds the given values to the given feature
-# * Forms 2 and 3 add subfeature values to the given feature
-# * Form 3 adds the subfeature values as specific to the given
-# property value-string.
+# * Form 1 adds the given values to the given feature.
+# * Forms 2 and 3 add subfeature values to the given feature.
+# * Form 3 adds the subfeature values as specific to the given property
+# value-string.
#
rule extend ( feature-or-property subfeature ? : values * )
{
- local
- feature # If a property was specified this is its feature
- value-string # E.G., the gcc-2.95-2 part of <toolset>gcc-2.95.2
- ;
+ local feature ; # If a property was specified this is its feature.
+ local value-string ; # E.g., the gcc-2.95-2 part of <toolset>gcc-2.95.2.
- # if a property was specified
+ # If a property was specified.
if $(feature-or-property:G) && $(feature-or-property:G=)
{
# Extract the feature and value-string, if any.
@@ -545,140 +551,145 @@
feature = [ grist $(feature-or-property) ] ;
}
- # Dispatch to the appropriate handler
+ # Dispatch to the appropriate handler.
if $(subfeature)
{
- extend-subfeature $(feature) $(value-string)
- : $(subfeature) : $(values) ;
+ extend-subfeature $(feature) $(value-string) : $(subfeature)
+ : $(values) ;
}
else
{
# If no subfeature was specified, we didn't expect to see a
- # value-string
+ # value-string.
if $(value-string)
{
- error can only be specify a property as the first argument
- when extending a subfeature
- : usage:
- : " extend" feature ":" values...
- : " | extend" <feature>value-string subfeature ":" values...
- ;
+ errors.error can only specify a property as the first argument when
+ extending a subfeature
+ : usage:
+ : " extend" feature ":" values...
+ : " | extend" <feature>value-string subfeature ":" values...
+ ;
}
extend-feature $(feature) : $(values) ;
}
}
+
local rule get-subfeature-name ( subfeature value-string ? )
{
local prefix = $(value-string): ;
return $(prefix:E="")$(subfeature) ;
}
-# Declares a subfeature
-rule subfeature (
- feature # Root feature that is not a subfeature
- value-string ? # A value-string specifying which feature or
- # subfeature values this subfeature is specific to,
- # if any
-
- : subfeature # The name of the subfeature being declared
- : subvalues * # The allowed values of this subfeature
- : attributes * # The attributes of the subfeature
+
+# Declares a subfeature.
+rule subfeature (
+ feature # Root feature that is not a subfeature.
+ value-string ? # A value-string specifying which feature or subfeature
+ # values this subfeature is specific to, if any.
+ : subfeature # The name of the subfeature being declared.
+ : subvalues * # The allowed values of this subfeature.
+ : attributes * # The attributes of the subfeature.
)
{
feature = [ grist $(feature) ] ;
validate-feature $(feature) ;
-
- # Add grist to the subfeature name if a value-string was supplied
+
+ # Add grist to the subfeature name if a value-string was supplied.
local subfeature-name = [ get-subfeature-name $(subfeature) $(value-string) ] ;
-
+
if $(subfeature-name) in $($(feature).subfeatures)
{
- error \"$(subfeature)\" already declared as a subfeature of \"$(feature)\"
- "specific to "$(value-string) ;
+ errors.error \"$(subfeature)\" already declared as a subfeature of \"$(feature)\"
+ "specific to "$(value-string) ;
}
$(feature).subfeatures += $(subfeature-name) ;
-
- # First declare the subfeature as a feature in its own right
+
+ # First declare the subfeature as a feature in its own right.
local f = [ utility.ungrist $(feature) ] ;
feature $(f)-$(subfeature-name) : $(subvalues) : $(attributes) subfeature ;
-
+
# Now make sure the subfeature values are known.
extend-subfeature $(feature) $(value-string) : $(subfeature) : $(subvalues) ;
}
-# Set the components of the given composite property
+
+# Set components of the given composite property.
rule compose ( composite-property : component-properties * )
{
local feature = $(composite-property:G) ;
if ! ( composite in [ attributes $(feature) ] )
{
- error "$(feature)" is not a composite feature ;
+ errors.error "$(feature)" is not a composite feature ;
}
$(composite-property).components ?= ;
if $($(composite-property).components)
{
- error components of "$(composite-property)" already set:
- $($(composite-property).components) ;
+ errors.error components of "$(composite-property)" already set:
+ $($(composite-property).components) ;
}
if $(composite-property) in $(component-properties)
{
- error composite property "$(composite-property)" cannot have itself as a component ;
+ errors.error composite property "$(composite-property)" cannot have itself as a component ;
}
$(composite-property).components = $(component-properties) ;
}
+
local rule expand-composite ( property )
{
return $(property)
- [ sequence.transform expand-composite : $($(property).components) ] ;
+ [ sequence.transform expand-composite : $($(property).components) ] ;
}
-# return all values of the given feature specified by the given property set.
+
+# Return all values of the given feature specified by the given property set.
rule get-values ( feature : properties * )
{
local result ;
-
- feature = $(:E=:G=$(feature)) ; # add <> if necessary.
+
+ feature = $(:E=:G=$(feature)) ; # Add <> if necessary.
for local p in $(properties)
{
if $(p:G) = $(feature)
{
- #~ Use MATCH instead if :G= to get the value, in order to preserve
- #~ the value intact instead of having bjam treat it as a decompossible
- #~ path.
+ # Use MATCH instead of :G= to get the value, in order to preserve
+ # the value intact instead of having bjam treat it as a decomposable
+ # path.
result += [ MATCH ">(.*)" : $(p) ] ;
}
}
return $(result) ;
}
+
rule free-features ( )
{
return $(free.features) ;
}
-# Expand all composite properties in the set so that all components
-# are explicitly expressed.
+
+# Expand all composite properties in the set so that all components are
+# explicitly expressed.
rule expand-composites ( properties * )
{
local explicit-features = $(properties:G) ;
-
local result ;
- # now expand composite features
+
+ # Now expand composite features.
for local p in $(properties)
{
local expanded = [ expand-composite $(p) ] ;
-
+
for local x in $(expanded)
{
if ! $(x) in $(result)
{
local f = $(x:G) ;
-
+
if $(f) in $(free.features)
{
result += $(x) ;
@@ -689,21 +700,21 @@
{
if $(f) in $(result:G)
{
- error expansions of composite features result in conflicting
- values for $(f)
- : values: [ get-values $(f) : $(result) ] $(x:G=)
- : one contributing composite property was $(p) ;
+ errors.error expansions of composite features result
+ in conflicting values for $(f)
+ : values: [ get-values $(f) : $(result) ] $(x:G=)
+ : one contributing composite property was $(p) ;
}
else
{
result += $(x) ;
}
}
- }
+ }
else if $(f) in $(result:G)
{
- error explicitly-specified values of non-free feature
- $(f) conflict :
+ errors.error explicitly-specified values of non-free feature
+ $(f) conflict :
"existing values:" [ get-values $(f) : $(properties) ] :
"value from expanding " $(p) ":" $(x:G=) ;
}
@@ -711,15 +722,16 @@
{
result += $(x) ;
}
- }
+ }
}
}
return $(result) ;
}
-# Return true iff f is an ordinary subfeature of the parent-property's
-# feature, or if f is a subfeature fo the parent-property's feature
-# specific to the parent-property's value
+
+# Return true iff f is an ordinary subfeature of the parent-property's feature,
+# or if f is a subfeature of the parent-property's feature specific to the
+# parent-property's value.
local rule is-subfeature-of ( parent-property f )
{
if subfeature in $($(f).attributes)
@@ -727,9 +739,8 @@
local specific-subfeature = [ MATCH <(.*):(.*)> : $(f) ] ;
if $(specific-subfeature)
{
- # The feature has the form
- # <topfeature-topvalue:subfeature>,
- # e.g. <toolset-msvc:version>
+ # The feature has the form <topfeature-topvalue:subfeature>, e.g.
+ # <toolset-msvc:version>.
local feature-value = [ split-top-feature $(specific-subfeature[1]) ] ;
if <$(feature-value[1])>$(feature-value[2]) = $(parent-property)
{
@@ -738,10 +749,9 @@
}
else
{
- # The feature has the form <topfeature-subfeature>,
- # e.g. <toolset-version>
+ # The feature has the form <topfeature-subfeature>, e.g.
+ # <toolset-version>
local top-sub = [ split-top-feature [ utility.ungrist $(f) ] ] ;
-
if $(top-sub[2]) && <$(top-sub[1])> = $(parent-property:G)
{
return true ;
@@ -750,46 +760,47 @@
}
}
-# as above, for subproperties
+
+# As for is-subfeature-of but for subproperties.
local rule is-subproperty-of ( parent-property p )
{
return [ is-subfeature-of $(parent-property) $(p:G) ] ;
}
-# Given a property, return the subset of features consisting of all
-# ordinary subfeatures of the property's feature, and all specific
-# subfeatures of the property's feature which are conditional on the
-# property's value.
+
+# Given a property, return the subset of features consisting of all ordinary
+# subfeatures of the property's feature, and all specific subfeatures of the
+# property's feature which are conditional on the property's value.
local rule select-subfeatures ( parent-property : features * )
{
return [ sequence.filter is-subfeature-of $(parent-property) : $(features) ] ;
}
-
-# as above, for subproperties
+
+
+# As for select-subfeatures but for subproperties.
local rule select-subproperties ( parent-property : properties * )
{
return [ sequence.filter is-subproperty-of $(parent-property) : $(properties) ] ;
}
-# Given a property set which may consist of composite and implicit
-# properties and combined subfeature values, returns an expanded,
-# normalized property set with all implicit features expressed
-# explicitly, all subfeature values individually expressed, and all
-# components of composite properties expanded. Non-free features
-# directly expressed in the input properties cause any values of
-# those features due to composite feature expansion to be dropped. If
-# two values of a given non-free feature are directly expressed in the
-# input, an error is issued.
+
+# Given a property set which may consist of composite and implicit properties
+# and combined subfeature values, returns an expanded, normalized property set
+# with all implicit features expressed explicitly, all subfeature values
+# individually expressed, and all components of composite properties expanded.
+# Non-free features directly expressed in the input properties cause any values
+# of those features due to composite feature expansion to be dropped. If two
+# values of a given non-free feature are directly expressed in the input, an
+# error is issued.
rule expand ( properties * )
{
local expanded = [ expand-subfeatures $(properties) ] ;
-
return [ expand-composites $(expanded) ] ;
}
-# Helper rule for minimize, below - return true iff property's feature
-# is present in the contents of the variable named by feature-set-var.
+# Helper rule for minimize. Returns true iff property's feature is present in
+# the contents of the variable named by feature-set-var.
local rule in-features ( feature-set-var property )
{
if $(property:G) in $($(feature-set-var))
@@ -798,9 +809,9 @@
}
}
-# Helper for minimize, below - returns the list with
-# the same properties, but where all subfeatures
-# are in the end of the list
+
+# Helper rule for minimize. Returns the list with the same properties, but with
+# all subfeatures moved to the end of the list.
local rule move-subfeatures-to-the-end ( properties * )
{
local x1 ;
@@ -814,53 +825,53 @@
else
{
x1 += $(p) ;
- }
+ }
}
- return $(x1) $(x2) ;
+ return $(x1) $(x2) ;
}
-# Given an expanded property set, eliminate all redundancy: properties
-# which are elements of other (composite) properties in the set will
-# be eliminated. Non-symmetric properties equal to default values will be
-# eliminated, unless the override a value from some composite property.
-# Implicit properties will be expressed without feature
-# grist, and sub-property values will be expressed as elements joined
-# to the corresponding main property.
+# Given an expanded property set, eliminate all redundancy: properties that are
+# elements of other (composite) properties in the set will be eliminated.
+# Non-symmetric properties equal to default values will be eliminated unless
+# they override a value from some composite property. Implicit properties will
+# be expressed without feature grist, and sub-property values will be expressed
+# as elements joined to the corresponding main property.
rule minimize ( properties * )
{
# Precondition checking
local implicits = [ set.intersection $(p:G=) : $(p:G) ] ;
if $(implicits)
{
- error minimize requires an expanded property set, but \"$(implicits[1])\"
- appears to be the value of an un-expanded implicit feature ;
+ errors.error minimize requires an expanded property set, but
+ \"$(implicits[1])\" appears to be the value of an un-expanded
+ implicit feature ;
}
-
- # remove properties implied by composite features
+
+ # Remove properties implied by composite features.
local components = $($(properties).components) ;
local x = [ set.difference $(properties) : $(components) ] ;
-
- # handle subfeatures and implicit features
- x = [ move-subfeatures-to-the-end $(x) ] ;
+
+ # Handle subfeatures and implicit features.
+ x = [ move-subfeatures-to-the-end $(x) ] ;
local result ;
while $(x)
{
local p fullp = $(x[1]) ;
local f = $(p:G) ;
local v = $(p:G=) ;
-
- # eliminate features in implicit properties.
+
+ # Eliminate features in implicit properties.
if implicit in [ attributes $(f) ]
{
p = $(v) ;
}
- # locate all subproperties of $(x[1]) in the property set
+ # Locate all subproperties of $(x[1]) in the property set.
local subproperties = [ select-subproperties $(fullp) : $(x) ] ;
if $(subproperties)
{
- # reconstitute the joined property name
+ # Reconstitute the joined property name.
local sorted = [ sequence.insertion-sort $(subproperties) ] ;
result += $(p)-$(sorted:G="":J=-) ;
@@ -868,16 +879,16 @@
}
else
{
- # eliminate properties whose value is equal to feature's
- # default and which are not symmetric and which do not
- # contradict values implied by composite properties.
-
- # since all component properties of composites in the set
- # have been eliminated, any remaining property whose
- # feature is the same as a component of a composite in the
- # set must have a non-redundant value.
+ # Eliminate properties whose value is equal to feature's default,
+ # which are not symmetric and which do not contradict values implied
+ # by composite properties.
+
+ # Since all component properties of composites in the set have been
+ # eliminated, any remaining property whose feature is the same as a
+ # component of a composite in the set must have a non-redundant
+ # value.
if $(fullp) != [ defaults $(f) ]
- || symmetric in [ attributes $(f) ]
+ || symmetric in [ attributes $(f) ]
|| $(fullp:G) in $(components:G)
{
result += $(p) ;
@@ -889,33 +900,33 @@
return $(result) ;
}
+
# Combine all subproperties into their parent properties
#
-# Requires: for every subproperty, there is a parent property. All
-# features are explicitly expressed.
+# Requires: for every subproperty, there is a parent property. All features are
+# explicitly expressed.
#
-# This rule probably shouldn't be needed, but
-# build-request.expand-no-defaults is being abused for unintended
-# purposes and it needs help
+# This rule probably shouldn't be needed, but build-request.expand-no-defaults
+# is being abused for unintended purposes and it needs help.
rule compress-subproperties ( properties * )
{
- local all-subs matched-subs result ;
-
+ local all-subs ;
+ local matched-subs ;
+ local result ;
+
for local p in $(properties)
{
if ! $(p:G)
{
- assert.nonempty-variable p:G ; # expecting fully-gristed properties
+ # Expecting fully-gristed properties.
+ assert.nonempty-variable p:G ;
}
-
-
+
if ! subfeature in $($(p:G).attributes)
{
- local subs = [
- sequence.insertion-sort
- [ sequence.filter is-subproperty-of $(p) : $(properties) ]
- ] ;
-
+ local subs = [ sequence.insertion-sort
+ [ sequence.filter is-subproperty-of $(p) : $(properties) ] ] ;
+
matched-subs += $(subs) ;
local subvalues = -$(subs:G=:J=-) ;
@@ -930,17 +941,16 @@
assert.result true : set.equal $(all-subs) : $(matched-subs) ;
return $(result) ;
}
-
-# given an ungristed string, finds the longest prefix which is a
-# top-level feature name followed by a dash, and return a pair
-# consisting of the parts before and after that dash. More
-# interesting than a simple split because feature names can contain
-# dashes.
+
+
+# Given an ungristed string, finds the longest prefix which is a top-level
+# feature name followed by a dash, and return a pair consisting of the parts
+# before and after that dash. More interesting than a simple split because
+# feature names may contain dashes.
local rule split-top-feature ( feature-plus )
{
local e = [ regex.split $(feature-plus) - ] ;
local f = $(e[1]) ;
-
local v ;
while $(e)
{
@@ -953,22 +963,24 @@
}
return $(v) ;
}
-
-# Given a set of properties, add default values for features not
-# represented in the set.
-# Note: if there's there's ordinary feature F1 and composite feature
-# F2, which includes some value for F1, and both feature have default values,
-# then the default value of F1 will be added, not the value in F2. This might
-# not be right idea: consider
+
+
+# Given a set of properties, add default values for features not represented in
+# the set.
+#
+# Note: if there's an ordinary feature F1 and a composite feature F2 which
+# includes some value for F1 and both feature have default values then the
+# default value of F1 will be added (as opposed to the value in F2). This might
+# not be the right idea, e.g. consider:
#
# feature variant : debug ... ;
# <variant>debug : .... <runtime-debugging>on
# feature <runtime-debugging> : off on ;
-#
+#
# Here, when adding default for an empty property set, we'll get
#
# <variant>debug <runtime_debugging>off
-#
+#
# and that's kind of strange.
rule add-defaults ( properties * )
{
@@ -976,22 +988,23 @@
{
if $(v) in $(properties)
{
- error add-defaults requires explicitly specified features,
- but \"$(v)\" appears to be the value of an un-expanded implicit feature ;
+ errors.error add-defaults requires explicitly specified features,
+ but \"$(v)\" appears to be the value of an un-expanded implicit
+ feature ;
}
}
# We don't add default for elements with ":" inside. This catches:
# 1. Conditional properties --- we don't want <variant>debug:<define>DEBUG
# to be takes as specified value for <variant>
- # 2. Free properties with ":" in values. We don't care, since free properties
- # don't have defaults.
+ # 2. Free properties with ":" in values. We don't care, since free
+ # properties don't have defaults.
local xproperties = [ MATCH "^([^:]+)$" : $(properties) ] ;
local missing-top = [ set.difference $(.all-top-features) : $(xproperties:G) ] ;
local more = [ defaults $(missing-top) ] ;
properties += $(more) ;
xproperties += $(more) ;
-
- # Add defaults for subfeatures of features which are present
+
+ # Add defaults for subfeatures of features which are present.
for local p in $(xproperties)
{
local s = $($(p:G).subfeatures) ;
@@ -999,19 +1012,20 @@
local missing-subs = [ set.difference <$(f)-$(s)> : $(properties:G) ] ;
properties += [ defaults [ select-subfeatures $(p) : $(missing-subs) ] ] ;
}
-
+
return $(properties) ;
}
+
# Given a property-set of the form
# v1/v2/...vN-1/<fN>vN/<fN+1>vN+1/...<fM>vM
#
# Returns
# v1 v2 ... vN-1 <fN>vN <fN+1>vN+1 ... <fM>vM
#
-# Note that vN...vM may contain slashes. This is resilient to the
-# substitution of backslashes for slashes, since Jam, unbidden,
-# sometimes swaps slash direction on NT.
+# Note that vN...vM may contain slashes. This needs to be resilient to the
+# substitution of backslashes for slashes, since Jam, unbidden, sometimes swaps
+# slash direction on NT.
rule split ( property-set )
{
local pieces = [ regex.split $(property-set) [\\/] ] ;
@@ -1032,18 +1046,19 @@
return $(result) ;
}
-# tests of module feature
+
+# Tests of module feature.
local rule __test__ ( )
{
- # use a fresh copy of the feature module
+ # Use a fresh copy of the feature module.
prepare-test feature-test-temp ;
- # These are local rules and so must be explicitly reimported into
- # the testing module
- import feature : extend-feature validate-feature select-subfeatures ;
-
- import errors : try catch ;
import assert ;
+ import errors : try catch ;
+
+ # These are local rules and so must be explicitly reimported into the
+ # testing module.
+ import feature : extend-feature validate-feature select-subfeatures ;
feature toolset : gcc : implicit ;
feature define : : free ;
@@ -1063,84 +1078,74 @@
{
compose <variant>profile : <variant>profile ;
}
- catch composite property <variant>profile cannot have itself as a component ;
+ catch composite property <variant>profile cannot have itself as a component ;
extend-feature toolset : msvc metrowerks ;
- subfeature toolset gcc : version : 2.95.2 2.95.3 2.95.4
- 3.0 3.0.1 3.0.2 ;
-
+ subfeature toolset gcc : version : 2.95.2 2.95.3 2.95.4 3.0 3.0.1 3.0.2 ;
+
assert.true is-subvalue toolset : gcc : version : 2.95.3 ;
assert.false is-subvalue toolset : gcc : version : 1.1 ;
assert.false is-subvalue toolset : msvc : version : 2.95.3 ;
assert.false is-subvalue toolset : : version : yabba ;
-
+
feature yabba ;
subfeature yabba : version : dabba ;
assert.true is-subvalue yabba : : version : dabba ;
-
-
+
subfeature toolset gcc : platform : linux cygwin : optional ;
-
+
assert.result <toolset-gcc:version>
- : select-subfeatures <toolset>gcc
- : <toolset-gcc:version>
- <toolset-msvc:version>
- <toolset-version>
- <stdlib>
- ;
-
+ : select-subfeatures <toolset>gcc
+ : <toolset-gcc:version>
+ <toolset-msvc:version>
+ <toolset-version>
+ <stdlib> ;
+
subfeature stdlib : version : 3 4 : optional ;
assert.result <stdlib-version>
- : select-subfeatures <stdlib>native
- : <toolset-gcc:version>
- <toolset-msvc:version>
- <toolset-version>
- <stdlib-version>
- ;
-
+ : select-subfeatures <stdlib>native
+ : <toolset-gcc:version>
+ <toolset-msvc:version>
+ <toolset-version>
+ <stdlib-version> ;
+
assert.result <toolset>gcc <toolset-gcc:version>3.0.1
- : expand-subfeatures <toolset>gcc-3.0.1 ;
-
+ : expand-subfeatures <toolset>gcc-3.0.1 ;
+
assert.result <toolset>gcc <toolset-gcc:version>3.0.1 <toolset-gcc:platform>linux
- : expand-subfeatures <toolset>gcc-3.0.1-linux ;
+ : expand-subfeatures <toolset>gcc-3.0.1-linux ;
-
assert.result <toolset>gcc <toolset-gcc:version>3.0.1
- : expand <toolset>gcc <toolset-gcc:version>3.0.1 ;
-
+ : expand <toolset>gcc <toolset-gcc:version>3.0.1 ;
+
assert.result <define>foo=x-y
- : expand-subfeatures <define>foo=x-y ;
+ : expand-subfeatures <define>foo=x-y ;
assert.result <toolset>gcc <toolset-gcc:version>3.0.1
- : expand-subfeatures gcc-3.0.1 ;
-
+ : expand-subfeatures gcc-3.0.1 ;
+
assert.result a c e
- : get-values <x> : <x>a <y>b <x>c <y>d <x>e ;
+ : get-values <x> : <x>a <y>b <x>c <y>d <x>e ;
assert.result <toolset>gcc <toolset-gcc:version>3.0.1
- <variant>debug <define>_DEBUG <optimization>on
- : expand gcc-3.0.1 debug <optimization>on
- ;
-
+ <variant>debug <define>_DEBUG <optimization>on
+ : expand gcc-3.0.1 debug <optimization>on ;
+
assert.result <variant>debug <define>_DEBUG <optimization>on
- : expand debug <optimization>on
- ;
+ : expand debug <optimization>on ;
- assert.result <optimization>on <variant>debug <define>_DEBUG
- : expand <optimization>on debug
- ;
+ assert.result <optimization>on <variant>debug <define>_DEBUG
+ : expand <optimization>on debug ;
assert.result <runtime-link>dynamic <optimization>on
- : defaults <runtime-link> <define> <optimization>
- ;
-
- # make sure defaults is resilient to missing grist.
+ : defaults <runtime-link> <define> <optimization> ;
+
+ # Make sure defaults is resilient to missing grist.
assert.result <runtime-link>dynamic <optimization>on
- : defaults runtime-link define optimization
- ;
-
+ : defaults runtime-link define optimization ;
+
feature dummy : dummy1 dummy2 ;
subfeature dummy : subdummy : x y z : optional ;
@@ -1150,68 +1155,54 @@
assert.result optional : attributes <fu> ;
assert.result optional : attributes fu ;
-
- assert.result <runtime-link>static <define>foobar <optimization>on <toolset>gcc:<define>FOO
- <toolset>gcc <variant>debug <stdlib>native <dummy>dummy1 <toolset-gcc:version>2.95.2
-
- : add-defaults <runtime-link>static <define>foobar
- <optimization>on <toolset>gcc:<define>FOO
- ;
-
- assert.result <runtime-link>static <define>foobar <optimization>on <toolset>gcc:<define>FOO
- <fu>fu1 <toolset>gcc <variant>debug <stdlib>native <dummy>dummy1 <fu-subfu2>q
- <toolset-gcc:version>2.95.2
-
- : add-defaults <runtime-link>static <define>foobar
- <optimization>on <toolset>gcc:<define>FOO <fu>fu1
- ;
-
+
+ assert.result <runtime-link>static <define>foobar <optimization>on
+ <toolset>gcc:<define>FOO <toolset>gcc <variant>debug <stdlib>native
+ <dummy>dummy1 <toolset-gcc:version>2.95.2
+ : add-defaults <runtime-link>static <define>foobar <optimization>on
+ <toolset>gcc:<define>FOO ;
+
+ assert.result <runtime-link>static <define>foobar <optimization>on
+ <toolset>gcc:<define>FOO <fu>fu1 <toolset>gcc <variant>debug
+ <stdlib>native <dummy>dummy1 <fu-subfu2>q <toolset-gcc:version>2.95.2
+ : add-defaults <runtime-link>static <define>foobar <optimization>on
+ <toolset>gcc:<define>FOO <fu>fu1 ;
+
set-default <runtime-link> : static ;
- assert.result <runtime-link>static
- : defaults <runtime-link>
- ;
-
+ assert.result <runtime-link>static : defaults <runtime-link> ;
+
assert.result gcc-3.0.1 debug <optimization>on
- : minimize [ expand gcc-3.0.1 debug <optimization>on <stdlib>native ]
- ;
+ : minimize [ expand gcc-3.0.1 debug <optimization>on <stdlib>native ] ;
assert.result gcc-3.0.1 debug <runtime-link>dynamic
- : minimize [ expand gcc-3.0.1 debug <optimization>off <runtime-link>dynamic ]
- ;
+ : minimize
+ [ expand gcc-3.0.1 debug <optimization>off <runtime-link>dynamic ] ;
assert.result gcc-3.0.1 debug
- : minimize [ expand gcc-3.0.1 debug <optimization>off ]
- ;
+ : minimize [ expand gcc-3.0.1 debug <optimization>off ] ;
assert.result debug <optimization>on
- : minimize [ expand debug <optimization>on ]
- ;
+ : minimize [ expand debug <optimization>on ] ;
assert.result gcc-3.0
- : minimize <toolset>gcc <toolset-gcc:version>3.0
- ;
+ : minimize <toolset>gcc <toolset-gcc:version>3.0 ;
assert.result gcc-3.0
- : minimize <toolset-gcc:version>3.0 <toolset>gcc
- ;
+ : minimize <toolset-gcc:version>3.0 <toolset>gcc ;
assert.result <x>y/z <a>b/c <d>e/f
- : split <x>y/z/<a>b/c/<d>e/f
- ;
+ : split <x>y/z/<a>b/c/<d>e/f ;
assert.result <x>y/z <a>b/c <d>e/f
- : split <x>y\\z\\<a>b\\c\\<d>e\\f
- ;
+ : split <x>y\\z\\<a>b\\c\\<d>e\\f ;
assert.result a b c <d>e/f/g <h>i/j/k
- : split a/b/c/<d>e/f/g/<h>i/j/k
- ;
+ : split a/b/c/<d>e/f/g/<h>i/j/k ;
assert.result a b c <d>e/f/g <h>i/j/k
- : split a\\b\\c\\<d>e\\f\\g\\<h>i\\j\\k
- ;
+ : split a\\b\\c\\<d>e\\f\\g\\<h>i\\j\\k ;
- # test error checking
+ # Test error checking.
try ;
{
@@ -1269,16 +1260,14 @@
{
implied-subfeature <toolset> 3.0.1 ;
}
- catch \"3.0.1\" is not a known subfeature value of
- <toolset> ;
+ catch \"3.0.1\" is not a known subfeature value of <toolset> ;
try ;
{
implied-subfeature <toolset> not-a-version : gcc ;
}
- catch \"not-a-version\" is not a known subfeature value of
- <toolset>gcc ;
+ catch \"not-a-version\" is not a known subfeature value of <toolset>gcc ;
- # leave a clean copy of the features module behind
+ # Leave a clean copy of the features module behind.
finish-test feature-test-temp ;
}
Modified: branches/release/tools/build/v2/build/generators.jam
==============================================================================
--- branches/release/tools/build/v2/build/generators.jam (original)
+++ branches/release/tools/build/v2/build/generators.jam 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -5,65 +5,72 @@
# (See accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
-# Manages 'generators' --- objects which can do transformation between different
-# target types and contain algorithm for finding transformation from sources
-# to targets.
+# Manages 'generators' --- objects which can do transformation between
+# different target types and contain algorithm for finding transformation from
+# sources to targets.
#
# The main entry point to this module is generators.construct rule. It is given
-# a list of source targets, desired target type and a set of properties.
-# It starts by selecting 'viable generators', which have any chances of producing
-# the desired target type with the required properties. Generators are ranked and
-# a set of most specific ones is selected.
-#
-# The most specific generators have their 'run' methods called, with the properties
-# and list of sources. Each one selects target which can be directly consumed, and
-# tries to convert the remaining ones to the types it can consume. This is done
-# by recursively calling 'construct' with all consumable types.
+# a list of source targets, desired target type and a set of properties. It
+# starts by selecting 'viable generators', which have any chances of producing
+# the desired target type with the required properties. Generators are ranked
+# and a set of the most specific ones is selected.
#
-# If the generator has collected all the targets it needs, it creates targets
+# The most specific generators have their 'run' methods called, with the
+# properties and list of sources. Each one selects a target which can be
+# directly consumed, and tries to convert the remaining ones to the types it
+# can consume. This is done by recursively calling 'construct' with all
+# consumable types.
+#
+# If the generator has collected all the targets it needs, it creates targets
# corresponding to result, and returns it. When all generators have been run,
-# results of one of them are selected and returned as result.
+# results of one of them are selected and returned as a result.
#
-# It's quite possible that 'construct' returns more targets that it was asked for.
-# For example, it was asked to target type EXE, but the only found generators produces
-# both EXE and TDS (file with debug) information. The extra target will be returned.
-#
-# Likewise, when generator tries to convert sources to consumable types, it can get
-# more targets that it was asked for. The question is what to do with extra targets.
-# Boost.Build attempts to convert them to requested types, and attempts as early as
-# possible. Specifically, this is done after invoking each generator. (Later I'll
-# document the rationale for trying extra target conversion at that point).
-#
-# That early conversion is not always desirable. Suppose a generator got a source of
-# type Y and must consume one target of type X_1 and one target of type X_2.
-# When converting Y to X_1 extra target of type Y_2 is created. We should not try to
-# convert it to type X_1, because if we do so, the generator will get two targets
-# of type X_1, and will be at loss as to which one to use. Because of that, the
-# 'construct' rule has a parameter, telling if multiple targets can be returned. If
-# the parameter is false, conversion of extra targets is not performed.
-
-import "class" : is-a new ;
-import container ;
-import utility : str equal ;
-import set sequence ;
-import assert ;
-import virtual-target ;
+# It's quite possible that 'construct' returns more targets that it was asked
+# for. For example, it was asked to target type EXE, but the only found
+# generators produces both EXE and TDS (file with debug) information. The extra
+# target will be returned.
+#
+# Likewise, when generator tries to convert sources to consumable types, it can
+# get more targets that it was asked for. The question is what to do with extra
+# targets. Boost.Build attempts to convert them to requested types, and
+# attempts that as early as possible. Specifically, this is done after invoking
+# each generator. (Later I'll document the rationale for trying extra target
+# conversion at that point).
+#
+# That early conversion is not always desirable. Suppose a generator got a
+# source of type Y and must consume one target of type X_1 and one target of
+# type X_2. When converting Y to X_1 extra target of type Y_2 is created. We
+# should not try to convert it to type X_1, because if we do so, the generator
+# will get two targets of type X_1, and will be at loss as to which one to use.
+# Because of that, the 'construct' rule has a parameter, telling if multiple
+# targets can be returned. If the parameter is false, conversion of extra
+# targets is not performed.
+
+import "class" : new ;
+import errors ;
import property-set ;
+import sequence ;
+import set ;
+import utility ;
+import virtual-target ;
+
-if "--debug-generators" in [ modules.peek : ARGV ]
-{
+if "--debug-generators" in [ modules.peek : ARGV ]
+{
.debug = true ;
}
-# Outputs a debug message if generators debugging is on.
-# Each element of 'message' is checked to see if it's class instance.
-# If so, instead of the value, the result of 'str' call is output.
+
+# Outputs a debug message if generators debugging is on. Each element of
+# 'message' is checked to see if it's class instance. If so, instead of the
+# value, the result of 'str' call is output.
+#
local rule generators.dout ( message * )
{
if $(.debug)
- {
+ {
ECHO [ sequence.transform utility.str : $(message) ] ;
- }
+ }
}
@@ -72,76 +79,82 @@
return $(.indent:J="") ;
}
+
local rule increase-indent ( )
{
.indent += " " ;
}
+
local rule decrease-indent ( )
{
.indent = $(.indent[2-]) ;
}
+
# Takes a vector of 'virtual-target' instances and makes a normalized
# representation, which is the same for given set of targets,
# regardless of their order.
+#
rule normalize-target-list ( targets )
{
local v = [ $(targets).get ] ;
$(targets).set $(v[1]) [ sequence.insertion-sort $(v[2-]) : utility.less ] ;
}
+
# Creates a generator
-class generator
+class generator
{
- import generators ;
- import assert ;
import generators : indent increase-indent decrease-indent generators.dout ;
- import generators ;
import set ;
- import utility : equal ;
+ import utility ;
import feature ;
- import errors : error ;
+ import errors ;
import sequence ;
import type ;
import virtual-target ;
import "class" : new ;
import property ;
-
+
EXPORT class_at_generator : indent increase-indent decrease-indent generators.dout ;
-
- rule __init__ (
- id # identifies the generator - should be name of the rule which
- # sets up build actions
- composing ? # whether generator processes each source target in
- # turn, converting it to required types.
- # Ordinary generators pass all sources together to
- # recusrive generators.construct-types call.
-
- : source-types * # types that this generator can handle. If
- # empty, the generator can consume anything.
-
- : target-types-and-names +
- # types the generator will create and, optionally, names for
- # created targets. Each element should have the form
- # type["(" name-pattern ")"]
- # for example, obj(%_x). Name of generated target will be found
- # by replacing % with the name of source, provided explicit name
- # was not specified.
-
- : requirements *
- )
- {
+
+ rule __init__ (
+ id # Identifies the generator - should be name
+ # of the rule which sets up the build
+ # actions.
+
+ composing ? # Whether generator processes each source
+ # target in turn, converting it to required
+ # types. Ordinary generators pass all
+ # sources together to the recursive
+ # generators.construct-types call.
+
+ : source-types * # Types that this generator can handle. If
+ # empty, the generator can consume anything.
+
+ : target-types-and-names + # Types the generator will create and,
+ # optionally, names for created targets.
+ # Each element should have the form
+ # type["(" name-pattern ")"], for example,
+ # obj(%_x). Generated target name will be
+ # found by replacing % with the name of
+ # source, provided an explicit name was not
+ # specified.
+
+ : requirements *
+ )
+ {
self.id = $(id) ;
self.composing = $(composing) ;
self.source-types = $(source-types) ;
self.target-types-and-names = $(target-types-and-names) ;
self.requirements = $(requirements) ;
-
+
for local e in $(target-types-and-names)
- {
+ {
# Create three parallel lists: one with the list of target types,
- # and two other with prefixes and postfixes to be added to target
+ # and two other with prefixes and postfixes to be added to target
# name. We use parallel lists for prefix and postfix (as opposed
# to mapping), because given target type might occur several times,
# for example "H H(%_symbols)".
@@ -150,20 +163,21 @@
self.name-prefix += $(m[3]:E="") ;
self.name-postfix += $(m[4]:E="") ;
}
-
+
# Note that 'transform' here, is the same as 'for_each'.
sequence.transform type.validate : $(self.source-types) ;
sequence.transform type.validate : $(self.target-types) ;
}
-
+
############## End of constructor #################
-
+
rule id ( )
{
return $(self.id) ;
}
# Returns the list of target type the generator accepts.
+ #
rule source-types ( )
{
return $(self.source-types) ;
@@ -171,24 +185,27 @@
# Returns the list of target types that this generator produces.
# It is assumed to be always the same -- i.e. it cannot change depending
- # list of sources.
+ # list of sources.
+ #
rule target-types ( )
{
return $(self.target-types) ;
}
-
+
# Returns the required properties for this generator. Properties
- # in returned set must be present in build properties if this
+ # in returned set must be present in build properties if this
# generator is to be used. If result has grist-only element,
# that build properties must include some value of that feature.
# XXX: remove this method?
+ #
rule requirements ( )
{
return $(self.requirements) ;
}
-
- # Returns a true value if the generator can be run with the specified
+
+ # Returns a true value if the generator can be run with the specified
# properties.
+ #
rule match-rank ( property-set-to-match )
{
# See if generator's requirements are satisfied by
@@ -196,7 +213,7 @@
# (i.e. grist-only element), as matching any value of the
# feature.
local all-requirements = [ requirements ] ;
-
+
local property-requirements feature-requirements ;
for local r in $(all-requirements)
{
@@ -207,11 +224,11 @@
else
{
feature-requirements += $(r) ;
- }
+ }
}
local properties-to-match = [ $(property-set-to-match).raw ] ;
- if $(property-requirements) in $(properties-to-match)
+ if $(property-requirements) in $(properties-to-match)
&& $(feature-requirements) in $(properties-to-match:G)
{
return true ;
@@ -221,25 +238,27 @@
return ;
}
}
-
+
# Returns another generator which differers from $(self) in
# - id
# - value to <toolset> feature in properties
+ #
rule clone ( new-id : new-toolset-properties + )
{
return [ new $(__class__) $(new-id) $(self.composing)
: $(self.source-types)
- : $(self.target-types-and-names)
+ : $(self.target-types-and-names)
# Note: this does not remove any subfeatures of <toolset>
# which might cause problems
: [ property.change $(self.requirements) : <toolset> ]
$(new-toolset-properties)
] ;
}
-
- # Creates another generator that is the same as $(self), except that
- # if 'base' is in target types of $(self), 'type' will in target types
- # of the new generator.
+
+ # Creates another generator that is the same as $(self), except that if
+ # 'base' is in target types of $(self), 'type' will in target types of the
+ # new generator.
+ #
rule clone-and-change-target-type ( base : type )
{
local target-types ;
@@ -253,104 +272,99 @@
else
{
target-types += $(t) ;
- }
+ }
}
-
+
return [ new $(__class__) $(self.id) $(self.composing)
: $(self.source-types)
- : $(target-types)
+ : $(target-types)
: $(self.requirements)
] ;
}
-
- # Tries to invoke this generator on the given sources. Returns a
- # list of generated targets (instances of 'virtual-target').
- # Returning nothing from run indicates that the generator was
- # unable to create the target.
+ # Tries to invoke this generator on the given sources. Returns a list of
+ # generated targets (instances of 'virtual-target'). Returning nothing from
+ # run indicates that the generator was unable to create the target.
+ #
rule run ( project # Project for which the targets are generated
- name ? # Determines the name of 'name' attribute for
+ name ? # Determines the name of 'name' attribute for
# all generated targets. See 'generated-targets' method.
: property-set # Desired properties for generated targets.
: sources + # Source targets.
)
- {
+ {
generators.dout [ indent ] " ** generator" $(self.id) ;
generators.dout [ indent ] " multiple:" $(mutliple) ;
- generators.dout [ indent ] " composing:" $(self.composing) ;
-
+ generators.dout [ indent ] " composing:" $(self.composing) ;
+
if ! $(self.composing) && $(sources[2]) && $(self.source-types[2])
{
errors.error "Unsupported source/source-type combination" ;
}
-
+
# We don't run composing generators if no name is specified. The reason
# is that composing generator combines several targets, which can have
# different names, and it cannot decide which name to give for produced
# target. Therefore, the name must be passed.
#
- # This in effect, means that composing generators are runnable only
- # at top-level of transofrmation graph, or if name is passed explicitly.
- # Thus, we dissallow composing generators in the middle. For example, the
- # transofrmation CPP -> OBJ -> STATIC_LIB -> RSP -> EXE won't be allowed
- # (the OBJ -> STATIC_LIB generator is composing)
+ # This in effect, means that composing generators are runnable only at
+ # the top-level of a transformation graph, or if their name is passed
+ # explicitly. Thus, we dissallow composing generators in the middle. For
+ # example, the transformation CPP -> OBJ -> STATIC_LIB -> RSP -> EXE
+ # won't be allowed (the OBJ -> STATIC_LIB generator is composing)
if ! $(self.composing) || $(name)
- {
+ {
run-really $(project) $(name) : $(property-set) : $(sources) ;
- }
+ }
}
-
-
+
rule run-really ( project name ? : property-set : sources + )
{
# Targets that this generator will consume directly.
local consumed = ;
# Targets that can't be consumed and will be returned as-is.
local bypassed = ;
-
+
if $(self.composing)
{
convert-multiple-sources-to-consumable-types $(project)
- : $(property-set) : $(sources) : consumed bypassed ;
+ : $(property-set) : $(sources) : consumed bypassed ;
}
else
- {
- convert-to-consumable-types $(project) $(name) :
- $(property-set) : $(sources)
- :
- : consumed bypassed ;
+ {
+ convert-to-consumable-types $(project) $(name) : $(property-set)
+ : $(sources) : : consumed bypassed ;
}
-
+
local result ;
- if $(consumed)
- {
- result = [ construct-result $(consumed) : $(project) $(name)
+ if $(consumed)
+ {
+ result = [ construct-result $(consumed) : $(project) $(name)
: $(property-set) ] ;
}
-
-
+
if $(result)
{
- generators.dout [ indent ] " SUCCESS: " $(result) ;
+ generators.dout [ indent ] " SUCCESS: " $(result) ;
}
else
{
generators.dout [ indent ] " FAILURE" ;
}
generators.dout ;
- return $(result) ;
+ return $(result) ;
}
- # Constructs the dependency graph that will be returned by this
- # generator
- rule construct-result (
+ # Constructs the dependency graph to be returned by this generator.
+ #
+ rule construct-result (
consumed + # Already prepared list of consumable targets
- # If generator requires several source files will contain
+ # If generator requires several source files will contain
# exactly len $(self.source-types) targets with matching types
- # Otherwise, might contain several targets with the type of
- # $(self.source-types[1])
- : project name ?
- : property-set # Properties to be used for all actions create here
+ # Otherwise, might contain several targets with the type of
+ # $(self.source-types[1])
+ : project name ?
+ : property-set # Properties to be used for all actions create here.
)
{
local result ;
@@ -358,23 +372,23 @@
if ! $(self.source-types[2]) && ! $(self.composing)
{
for local r in $(consumed)
- {
+ {
result += [ generated-targets $(r) : $(property-set) : $(project) $(name) ] ; #(targets) ;
}
}
else
{
- if $(consumed)
+ if $(consumed)
{
- result += [ generated-targets $(consumed) : $(property-set)
+ result += [ generated-targets $(consumed) : $(property-set)
: $(project) $(name) ] ;
- }
+ }
}
return $(result) ;
}
-
- # Determine the name of the produced target from the
- # names of the sources.
+
+ # Determine the name of the produced target from the names of the sources.
+ #
rule determine-output-name ( sources + )
{
# The simple case if when a name
@@ -387,126 +401,127 @@
# dot. In the second case -- no sure, but for now take
# the part till the last dot too.
name = [ utility.basename [ $(sources[1]).name ] ] ;
-
+
for local s in $(sources[2])
{
local n2 = [ utility.basename [ $(s).name ] ] ;
if $(n2) != $(name)
{
- error "$(self.id): source targets have different names: cannot determine target name" ;
+ errors.error "$(self.id): source targets have different names: cannot determine target name" ;
}
}
-
+
# Names of sources might include directory. We should strip it.
name = $(name:D=) ;
-
+
return $(name) ;
}
-
- # Constructs targets that are created after consuming 'sources'.
- # The result will be the list of virtual-target, which the same length
- # as 'target-types' attribute and with corresponding types.
- #
- # When 'name' is empty, all source targets must have the same value of
- # the 'name' attribute, which will be used instead of the 'name' argument.
+
+ # Constructs targets that are created after consuming 'sources'. The result
+ # will be the list of virtual-target, which has the same length as the
+ # 'target-types' attribute and with corresponding types.
+ #
+ # When 'name' is empty, all source targets must have the same value of the
+ # 'name' attribute, which will be used instead of the 'name' argument.
#
# The value of 'name' attribute for each generated target will be equal to
# the 'name' parameter if there's no name pattern for this type. Otherwise,
- # the '%' symbol in the name pattern will be replaced with the 'name' parameter
- # to obtain the 'name' attribute.
+ # the '%' symbol in the name pattern will be replaced with the 'name'
+ # parameter to obtain the 'name' attribute.
#
- # For example, if targets types are T1 and T2(with name pattern "%_x"), suffixes
- # for T1 and T2 are .t1 and t2, and source if foo.z, then created files would
- # be "foo.t1" and "foo_x.t2". The 'name' attribute actually determined the
- # basename of a file.
+ # For example, if targets types are T1 and T2(with name pattern "%_x"),
+ # suffixes for T1 and T2 are .t1 and t2, and source if foo.z, then created
+ # files would be "foo.t1" and "foo_x.t2". The 'name' attribute actually
+ # determines the basename of a file.
#
# Note that this pattern mechanism has nothing to do with implicit patterns
- # in make. It's a way to produce target which name is different for name of
+ # in make. It's a way to produce target which name is different for name of
# source.
+ #
rule generated-targets ( sources + : property-set : project name ? )
{
if ! $(name)
{
name = [ determine-output-name $(sources) ] ;
}
-
+
# Assign an action for each target
local action = [ action-class ] ;
- local a = [ class.new $(action) $(sources) : $(self.id) :
+ local a = [ class.new $(action) $(sources) : $(self.id) :
$(property-set) ] ;
-
+
# Create generated target for each target type.
local targets ;
local pre = $(self.name-prefix) ;
local post = $(self.name-postfix) ;
- for local t in $(self.target-types)
- {
+ for local t in $(self.target-types)
+ {
local generated-name = $(pre[1])$(name)$(post[1]) ;
pre = $(pre[2-]) ;
post = $(post[2-]) ;
-
- targets += [ class.new file-target $(generated-name)
+
+ targets += [ class.new file-target $(generated-name)
: $(t) : $(project) : $(a) ] ;
- }
-
+ }
+
return [ sequence.transform virtual-target.register : $(targets) ] ;
- }
-
+ }
+
# Attempts to convert 'source' to the types that this generator can
# handle. The intention is to produce the set of targets can should be
# used when generator is run.
- rule convert-to-consumable-types ( project name ? :
- property-set : sources +
- : only-one ? # convert 'source' to only one of source types
- # if there's more that one possibility, report an
- # error
- : consumed-var # name of variable which recieves all targets which
- # can be consumed.
- bypassed-var # name variable which recieves all targets which
- # cannot be consumed
+ #
+ rule convert-to-consumable-types ( project name ? :
+ property-set : sources +
+ : only-one ? # Convert 'source' to only one of the source types. If
+ # there's more that one possibility, report an error.
+ : consumed-var # Name of the variable which recieves all targets which
+ # can be consumed.
+ bypassed-var # Name of the variable which recieves all targets which
+ # cannot be consumed
)
- {
+ {
# We're likely to be passed 'consumed' and 'bypassed'
# var names. Use "_" to avoid name conflicts.
local _consumed ;
local _bypassed ;
- local missing-types ;
+ local missing-types ;
if $(sources[2])
{
- # Don't know how to handle several sources yet. Just try
+ # Don't know how to handle several sources yet. Just try
# to pass the request to other generator
missing-types = $(self.source-types) ;
}
else
- {
+ {
consume-directly $(sources) : _consumed : missing-types ;
}
-
+
# No need to search for transformation if
# some source type has consumed source and
# no more source types are needed.
- if $(only-one) && $(_consumed)
+ if $(only-one) && $(_consumed)
{
missing-types = ;
}
-
+
#TODO: we should check that only one source type
#if create of 'only-one' is true.
# TODO: consider if consuned/bypassed separation should
# be done by 'construct-types'.
-
+
if $(missing-types)
- {
+ {
local transformed = [ generators.construct-types $(project) $(name)
: $(missing-types) : $(property-set) : $(sources) ] ;
-
- # Add targets of right type to 'consumed'. Add others to
- # 'bypassed'. The 'generators.construct' rule has done
- # its best to convert everything to the required type.
- # There's no need to rerun it on targets of different types.
-
- # NOTE: ignoring usage requirements
+
+ # Add targets of right type to 'consumed'. Add others to 'bypassed'.
+ # The 'generators.construct' rule has done its best to convert
+ # everything to the required type. There's no need to rerun it on
+ # targets of different types.
+
+ # NOTE: ignoring usage requirements.
for local t in $(transformed[2-])
{
if [ $(t).type ] in $(missing-types)
@@ -517,35 +532,36 @@
{
_bypassed += $(t) ;
}
- }
- }
-
- _consumed = [ sequence.unique $(_consumed) ] ;
+ }
+ }
+
+ _consumed = [ sequence.unique $(_consumed) ] ;
_bypassed = [ sequence.unique $(_bypassed) ] ;
-
+
# remove elements of '_bypassed' that are in '_consumed'
-
- # Suppose the target type of current generator, X is produced from
+
+ # Suppose the target type of current generator, X is produced from
# X_1 and X_2, which are produced from Y by one generator.
# When creating X_1 from Y, X_2 will be added to 'bypassed'
# Likewise, when creating X_2 from Y, X_1 will be added to 'bypassed'
# But they are also in 'consumed'. We have to remove them from
# bypassed, so that generators up the call stack don't try to convert
- # them.
-
+ # them.
+
# In this particular case, X_1 instance in 'consumed' and X_1 instance
# in 'bypassed' will be the same: because they have the same source and
# action name, and 'virtual-target.register' won't allow two different
# instances. Therefore, it's OK to use 'set.difference'.
-
+
_bypassed = [ set.difference $(_bypassed) : $(_consumed) ] ;
-
-
+
+
$(consumed-var) += $(_consumed) ;
$(bypassed-var) += $(_bypassed) ;
}
-
+
# Converts several files to consumable types.
+ #
rule convert-multiple-sources-to-consumable-types
( project : property-set : sources * : consumed-var bypassed-var )
{
@@ -562,23 +578,23 @@
{
generators.dout [ indent ] " failed to convert " $(source) ;
}
- $(consumed-var) += $(_c) ;
+ $(consumed-var) += $(_c) ;
$(bypassed-var) += $(_b) ;
- }
+ }
}
-
+
rule consume-directly ( source : consumed-var : missing-types-var )
{
local real-source-type = [ $(source).type ] ;
-
+
# If there are no source types, we can consume anything
local source-types = $(self.source-types) ;
source-types ?= $(real-source-type) ;
-
+
for local st in $(source-types)
{
# The 'source' if of right type already)
- if $(real-source-type) = $(st) ||
+ if $(real-source-type) = $(st) ||
[ type.is-derived $(real-source-type) $(st) ]
{
$(consumed-var) += $(source) ;
@@ -587,44 +603,45 @@
{
$(missing-types-var) += $(st) ;
}
- }
+ }
}
-
-
- # Returns the class to be used to actions. Default implementation
+
+ # Returns the class to be used to actions. Default implementation
# returns "action".
+ #
rule action-class ( )
{
return "action" ;
- }
+ }
}
-import errors : error ;
.generators = ;
-# Registers new generator instance 'g'.
+
+# Registers a new generator instance 'g'.
+#
rule register ( g )
{
.generators += $(g) ;
-
+
# A generator can produce several targets of the
# same type. We want unique occurence of that generator
# in .generators.$(t) in that case, otherwise, it will
# be tried twice and we'll get false ambiguity.
for local t in [ sequence.unique [ $(g).target-types ] ]
- {
+ {
.generators.$(t) += $(g) ;
- }
-
+ }
+
# Update the set of generators for toolset
-
+
# TODO: should we check that generator with this id
# is not already registered. For example, the fop.jam
# module intentionally declared two generators with the
# same id, so such check will break it.
local id = [ $(g).id ] ;
-
+
# Some generators have multiple periods in their name, so the
# normal $(id:S=) won't generate the right toolset name.
# e.g. if id = gcc.compile.c++, then
@@ -640,87 +657,88 @@
}
.generators-for-toolset.$(base) += $(g) ;
}
-
-# Creates new instance of the 'generator' class and registers it.
-# Retursn the creates instance.
-# Rationale: the instance is returned so that it's possible to first register
-# a generator and then call 'run' method on that generator, bypassing all
-# generator selection.
+
+
+# Creates new instance of the 'generator' class and registers it. Returns the
+# created instance. Rationale: the instance is returned so that it's possible to
+# first register a generator and then call the 'run' method on that generator,
+# bypassing all generator selection.
+#
rule register-standard ( id : source-types * : target-types + : requirements * )
{
local g = [ new generator $(id) : $(source-types) : $(target-types)
- : $(requirements) ] ;
- register $(g) ;
+ : $(requirements) ] ;
+ register $(g) ;
return $(g) ;
}
-# Creates new instance of the 'composing-generator' class and
-# registers it.
+
+# Creates new instance of the 'composing-generator' class and registers it.
+#
rule register-composing ( id : source-types * : target-types + : requirements * )
{
- local g = [ new generator $(id) true : $(source-types)
- : $(target-types) : $(requirements) ] ;
+ local g = [ new generator $(id) true : $(source-types) : $(target-types)
+ : $(requirements) ] ;
register $(g) ;
return $(g) ;
}
-# Returns all generators which belong to 'toolset', i.e. which
-# ids are $(toolset).<something>
+
+# Returns all generators which belong to 'toolset', i.e. whose ids are
+# '$(toolset).<something>'.
+#
rule generators-for-toolset ( toolset )
{
return $(.generators-for-toolset.$(toolset)) ;
}
-# Make generator 'overrider-id' be preferred to
-# 'overridee-id'. If, when searching for generators
-# that could produce a target of certain type,
-# both those generators are amoung viable generators,
-# the overridden generator is immediately discarded.
-#
-# The overridden generators are discarded immediately
-# after computing the list of viable generators, before
-# running any of them.
+
+# Make generator 'overrider-id' be preferred to 'overridee-id'. If, when
+# searching for generators that could produce a target of a certain type, both
+# those generators are amoung viable generators, the overridden generator is
+# immediately discarded.
+#
+# The overridden generators are discarded immediately after computing the list
+# of viable generators, before running any of them.
+#
rule override ( overrider-id : overridee-id )
{
- .override.$(overrider-id) += $(overridee-id) ;
+ .override.$(overrider-id) += $(overridee-id) ;
}
-
-
-# Set if results of the current generators search are going to be cached
-# This means no futher attempts to cache generators search should be
-# made.
+# Set if results of the current generators search are going to be cached. This
+# means no futher attempts to cache generators search should be made.
.caching = ;
-# Returns a list of source type which can possibly be converted
-# to 'target-type' by some chain of generator invocation.
+
+# Returns a list of source type which can possibly be converted to 'target-type'
+# by some chain of generator invocation.
+#
+# More formally, takes all generators for 'target-type' and returns union of
+# source types for those generators and result of calling itself recirsively on
+# source types.
#
-# More formally, takes all generators for 'target-type' and
-# returns union of source types for those generators and result
-# of calling itself recusrively on source types.
local rule viable-source-types-real ( target-type )
{
local generators ;
local t = [ type.all-bases $(target-type) ] ;
-
+
local result ;
- # 't' is the list of types which are not yet processed
+ # 't' is the list of types which have not yet been processed.
while $(t)
{
- # Find all generators for current type.
+ # Find all generators for current type.
# Unlike 'find-viable-generators' we don't care about property-set.
local generators = $(.generators.$(t[1])) ;
t = $(t[2-]) ;
-
-
while $(generators)
{
local g = $(generators[1]) ;
generators = $(generators[2-]) ;
-
+
if ! [ $(g).source-types ]
{
# Empty source types -- everything can be accepted
@@ -730,10 +748,10 @@
# This will terminate outer loop.
t = ;
}
-
- for local source-type in [ $(g).source-types ]
+
+ for local source-type in [ $(g).source-types ]
{
- if ! $(source-type) in $(result)
+ if ! $(source-type) in $(result)
{
# If generator accepts 'source-type' it
# will happily accept any type derived from it
@@ -741,22 +759,22 @@
for local n in $(all)
{
if ! $(n) in $(result)
- {
+ {
t += $(n) ;
result += $(n) ;
- }
- }
- }
- }
- }
- }
-
- result = [ sequence.unique $(result) ] ;
-
- return $(result) ;
+ }
+ }
+ }
+ }
+ }
+ }
+
+ return [ sequence.unique $(result) ] ;
}
+
# Helper rule, caches the result of 'viable-source-types-real'.
+#
rule viable-source-types ( target-type )
{
local key = .vst.$(target-type) ;
@@ -769,46 +787,49 @@
}
$(key) = $(v) ;
}
-
+
if $($(key)) != none
{
return $($(key)) ;
- }
+ }
}
-# Returns the list of source types, which, when passed to 'run'
-# method of 'generator', has some change of being eventually used
-# (probably after conversion by other generators)
-rule viable-source-types-for-generator-real ( generator )
-{
+
+# Returns the list of source types, which, when passed to 'run' method of
+# 'generator', has some change of being eventually used (probably after
+# conversion by other generators).
+#
+rule viable-source-types-for-generator-real ( generator )
+{
local source-types = [ $(generator).source-types ] ;
if ! $(source-types)
{
- # If generator does not specify any source types,
- # it might be special generator like builtin.lib-generator
- # which just relays to other generators. Return '*' to
- # indicate that any source type is possibly OK, since we don't
- # know for sure.
+ # If generator does not specify any source types, it might be special
+ # generator like builtin.lib-generator which just relays to other
+ # generators. Return '*' to indicate that any source type is possibly
+ # OK, since we don't know for sure.
return * ;
}
else
- {
+ {
local result ;
for local s in $(source-types)
{
- result += [ type.all-derived $(s) ]
+ result += [ type.all-derived $(s) ]
[ generators.viable-source-types $(s) ] ;
}
- result = [ sequence.unique $(result) ] ;
+ result = [ sequence.unique $(result) ] ;
if * in $(result)
{
result = * ;
- }
+ }
return $(result) ;
- }
-}
+ }
+}
+
-# Helper rule, caches the result of 'viable-source-types-for-genrator'.
+# Helper rule, caches the result of 'viable-source-types-for-generator'.
+#
local rule viable-source-types-for-generator ( generator )
{
local key = .vstg.$(generator) ;
@@ -819,36 +840,33 @@
{
v = none ;
}
- $(key) = $(v) ;
+ $(key) = $(v) ;
}
-
+
if $($(key)) != none
{
return $($(key)) ;
- }
+ }
}
-
-# Returns usage requirements + list of created targets
-local rule try-one-generator-really ( project name ? : generator :
- target-type : property-set : sources * )
+# Returns usage requirements + list of created targets.
+#
+local rule try-one-generator-really ( project name ? : generator : target-type
+ : property-set : sources * )
{
local targets =
- [ $(generator).run $(project) $(name)
- : $(property-set)
- : $(sources)
- ] ;
-
+ [ $(generator).run $(project) $(name) : $(property-set) : $(sources) ] ;
+
local usage-requirements ;
local success ;
-
+
generators.dout [ indent ] returned $(targets) ;
-
+
if $(targets)
{
success = true ;
-
+
if [ class.is-a $(targets[1]) : property-set ]
{
usage-requirements = $(targets[1]) ;
@@ -859,57 +877,56 @@
usage-requirements = [ property-set.empty ] ;
}
}
-
+
generators.dout [ indent ] " generator" [ $(generator).id ] " spawned " ;
- generators.dout [ indent ] " " $(targets) ;
+ generators.dout [ indent ] " " $(targets) ;
if $(usage-requirements)
{
generators.dout [ indent ] " with usage requirements:" $(x) ;
}
-
-
+
if $(success)
{
return $(usage-requirements) $(targets) ;
}
}
-# Checks if generator invocation can be pruned, because it's guaranteed
-# to fail. If so, quickly returns empty list. Otherwise, calls
-# try-one-generator-really.
-local rule try-one-generator ( project name ? : generator :
- target-type : property-set : sources * )
-{
+
+# Checks if generator invocation can be pruned, because it's guaranteed to fail.
+# If so, quickly returns empty list. Otherwise, calls try-one-generator-really.
+#
+local rule try-one-generator ( project name ? : generator : target-type
+ : property-set : sources * )
+{
local source-types ;
for local s in $(sources)
{
source-types += [ $(s).type ] ;
}
- local viable-source-types =
+ local viable-source-types =
[ viable-source-types-for-generator $(generator) ] ;
-
- if $(source-types) && $(viable-source-types) != * &&
- ! [ set.intersection $(source-types) : $(viable-source-types) ]
+
+ if $(source-types) && $(viable-source-types) != * &&
+ ! [ set.intersection $(source-types) : $(viable-source-types) ]
{
local id = [ $(generator).id ] ;
generators.dout [ indent ] " ** generator '$(id)' pruned" ;
#generators.dout [ indent ] "source-types" '$(source-types)' ;
#generators.dout [ indent ] "viable-source-types" '$(viable-source-types)' ;
- }
+ }
else {
return [ try-one-generator-really $(project) $(name)
: $(generator)
: $(target-type) : $(property-set) : $(sources) ] ;
- }
+ }
}
-
-rule construct-types ( project name ? : target-types + :
- property-set : sources + )
+rule construct-types ( project name ? : target-types + : property-set
+ : sources + )
{
local result ;
- local matched-types ;
+ local matched-types ;
local usage-requirements = [ property-set.empty ] ;
for local t in $(target-types)
{
@@ -922,14 +939,12 @@
matched-types += $(t) ;
}
}
- # TODO: have to introduce parameter controlling if
- # several types can be matches and add appropriate
- # checks
-
- # TODO: need to review the documentation for
- # 'construct' to see if it should return $(source) even
- # if nothing can be done with it. Currents docs seem to
- # imply that, contrary to the behaviour.
+ # TODO: have to introduce parameter controlling if several types can be
+ # matched and add appropriate checks.
+
+ # TODO: need to review the documentation for 'construct' to see if it should
+ # return $(source) even if nothing can be done with it. Currents docs seem
+ # to imply that, contrary to the behaviour.
if $(result)
{
return $(usage-requirements) $(result) ;
@@ -940,8 +955,9 @@
}
}
-# Ensures all 'targets' have types. If this is not so, exists with
-# error.
+
+# Ensures all 'targets' have their type. If this is not so, exists with error.
+#
local rule ensure-type ( targets * )
{
for local t in $(targets)
@@ -949,20 +965,22 @@
if ! [ $(t).type ]
{
errors.error "target" [ $(t).str ] "has no type" ;
- }
- }
+ }
+ }
}
-
+
+
# Returns generators which can be used to construct target of specified type
# with specified properties. Uses the following algorithm:
-# - iterates over requested target-type and all it's bases (in the order returned bt
-# type.all-bases.
-# - for each type find all generators that generate that type and which requirements
-# are satisfied by properties.
+# - iterates over requested target-type and all its bases (in the order returned
+# by type.all-bases.
+# - for each type find all generators that generate that type and whose
+# requirements are satisfied by properties.
# - if the set of generators is not empty, returns that set.
#
# Note: this algorithm explicitly ignores generators for base classes if there's
-# at least one generator for requested target-type.
+# at least one generator for the requested target-type.
+#
local rule find-viable-generators-aux ( target-type : property-set )
{
# Select generators that can create the required target type.
@@ -971,52 +989,51 @@
import type ;
local t = [ type.all-bases $(target-type) ] ;
-
- generators.dout [ indent ] find-viable-generators target-type= $(target-type)
- property-set= [ $(property-set).as-path ]
- ;
-
- # Get the lit of generators for the requested type.
- # If no generator is registered, try base type, and so on.
+
+ generators.dout [ indent ] find-viable-generators target-type= $(target-type)
+ property-set= [ $(property-set).as-path ] ;
+
+ # Get the list of generators for the requested type. If no generator is
+ # registered, try base type, and so on.
local generators ;
while $(t[1])
{
- generators.dout [ indent ] "trying type" $(t[1]) ;
+ generators.dout [ indent ] "trying type" $(t[1]) ;
if $(.generators.$(t[1]))
{
generators.dout [ indent ] "there are generators for this type" ;
generators = $(.generators.$(t[1])) ;
-
+
if $(t[1]) != $(target-type)
{
- # We're here, when no generators for target-type are found,
- # but there are some generators for a base type.
- # We'll try to use them, but they will produce targets of
- # base type, not of 'target-type'. So, we clone the generators
- # and modify the list of target types.
+ # We're here, when no generators for target-type are found, but
+ # there are some generators for a base type. We'll try to use
+ # them, but they will produce targets of base type, not of
+ # 'target-type'. So, we clone the generators and modify the list
+ # of target types.
local generators2 ;
for local g in $(generators)
{
- # generators.register adds generator to the list of generators
- # for toolsets, which is a bit strange, but should work.
- # That list is only used when inheriting toolset, which
- # should have being done before generators are run.
- generators2 += [
+ # generators.register adds a generator to the list of
+ # generators for toolsets, which is a bit strange, but
+ # should work. That list is only used when inheriting a
+ # toolset, which should have been done before running
+ # generators.
+ generators2 += [
$(g).clone-and-change-target-type $(t[1]) : $(target-type) ] ;
generators.register $(generators2[-1]) ;
- }
+ }
generators = $(generators2) ;
- }
+ }
t = ;
- }
- t = $(t[2-]) ;
+ }
+ t = $(t[2-]) ;
}
-
-
+
for local g in $(generators)
{
generators.dout [ indent ] "trying generator" [ $(g).id ] "(" [ $(g).source-types ] -> [ $(g).target-types ] ")" ;
-
+
local m = [ $(g).match-rank $(property-set) ] ;
if $(m)
{
@@ -1024,10 +1041,11 @@
viable-generators += $(g) ;
}
}
-
+
return $(viable-generators) ;
}
+
rule find-viable-generators ( target-type : property-set )
{
local key = $(target-type).$(property-set) ;
@@ -1038,29 +1056,29 @@
if ! $(l)
{
l = none ;
- }
+ }
.fv.$(key) = $(l) ;
}
-
+
if $(l) = none
{
l = ;
}
-
+
local viable-generators ;
for local g in $(l)
{
- # Avoid trying the same generator twice on different levels.
- if ! $(g) in $(.active-generators)
+ # Avoid trying the same generator twice on different levels.
+ if ! $(g) in $(.active-generators)
{
viable-generators += $(g) ;
- }
+ }
}
-
+
# Generators which override 'all'.
local all-overrides ;
# Generators which are overriden
- local overriden-ids ;
+ local overriden-ids ;
for local g in $(viable-generators)
{
local id = [ $(g).id ] ;
@@ -1069,8 +1087,8 @@
if all in $(this-overrides)
{
all-overrides += $(g) ;
- }
- }
+ }
+ }
if $(all-overrides)
{
viable-generators = $(all-overrides) ;
@@ -1081,34 +1099,37 @@
if ! [ $(g).id ] in $(overriden-ids)
{
result += $(g) ;
- }
+ }
}
-
+
return $(result) ;
-}
-
+}
+
+
.construct-stack = ;
-# Attempts to construct target by finding viable generators, running them
-# and selecting the dependency graph
+
+# Attempts to construct a target by finding viable generators, running them and
+# selecting the dependency graph.
+#
local rule construct-really (
project name ? : target-type : property-set : sources * )
{
viable-generators = [ find-viable-generators $(target-type) : $(property-set) ] ;
-
- generators.dout [ indent ] "*** " [ sequence.length $(viable-generators) ]
- " viable generators" ;
-
+
+ generators.dout [ indent ] "*** " [ sequence.length $(viable-generators) ]
+ " viable generators" ;
+
local result ;
local generators-that-succeeded ;
for local g in $(viable-generators)
{
# This variable will be restored on exit from this scope.
local .active-generators = $(g) $(.active-generators) ;
-
+
local r = [ try-one-generator $(project) $(name) : $(g) : $(target-type) :
$(property-set) : $(sources) ] ;
-
+
if $(r)
{
generators-that-succeeded += $(g) ;
@@ -1140,75 +1161,73 @@
else
{
result = $(r) ;
- }
+ }
}
}
-
+
return $(result) ;
-}
-
-
-# Attempts to create target of 'target-type' with 'properties'
-# from 'sources'. The 'sources' are treated as a collection of
-# *possible* ingridients -- i.e. it is not required to consume
-# them all. If 'multiple' is true, the rule is allowed to return
-# several targets of 'target-type'.
-#
+}
+
+
+# Attempts to create a target of 'target-type' with 'properties' from 'sources'.
+# The 'sources' are treated as a collection of *possible* ingridients -- i.e. it
+# is not required to consume them all. If 'multiple' is true, the rule is
+# allowed to return several targets of 'target-type'.
#
-# Returns a list of target. When this invocation is first instance of
+# Returns a list of targets. When this invocation is first instance of
# 'construct' in stack, returns only targets of requested 'target-type',
-# otherwise, returns also unused sources and additionally generated
-# targets.
+# otherwise, returns also unused sources and additionally generated targets.
+#
rule construct ( project name ? : target-type : property-set * : sources * )
{
if (.construct-stack)
{
ensure-type $(sources) ;
}
-
+
.construct-stack += 1 ;
increase-indent ;
if $(.debug)
- {
+ {
generators.dout [ indent ] "*** construct" $(target-type) ;
-
+
for local s in $(sources)
{
generators.dout [ indent ] " from" $(s) ;
}
- generators.dout [ indent ] " properties:" [ $(property-set).raw ] ;
+ generators.dout [ indent ] " properties:" [ $(property-set).raw ] ;
}
-
-
- local result = [ construct-really $(project) $(name)
+
+ local result = [ construct-really $(project) $(name)
: $(target-type) : $(property-set) : $(sources) ] ;
-
+
decrease-indent ;
-
+
.construct-stack = $(.construct-stack[2-]) ;
-
-
- return $(result) ;
+
+ return $(result) ;
}
-# Given 'result', obtained from some generator or
-# generators.construct, adds 'raw-properties' as usage requirements
-# to it. If result already contains usage requirements -- that is
-# the first element of result of an instance of the property-set class,
-# the existing usage requirements and 'raw-properties' are combined.
+
+# Given 'result', obtained from some generator or generators.construct, adds
+# 'raw-properties' as usage requirements to it. If result already contains usage
+# requirements -- that is the first element of result of an instance of the
+# property-set class, the existing usage requirements and 'raw-properties' are
+# combined.
+#
rule add-usage-requirements ( result * : raw-properties * )
{
if $(result)
{
- if [ class.is-a $(result[1]) : property-set ]
+ if [ class.is-a $(result[1]) : property-set ]
{
return [ $(result[1]).add-raw $(raw-properties) ] $(result[2-]) ;
}
else
{
return [ property-set.create $(raw-properties) ] $(result) ;
- }
- }
+ }
+ }
}
Modified: branches/release/tools/build/v2/build/modifiers.jam
==============================================================================
--- branches/release/tools/build/v2/build/modifiers.jam (original)
+++ branches/release/tools/build/v2/build/modifiers.jam 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -125,9 +125,10 @@
return $(target) ;
}
- # Utility, clones a file-target with optional changes to the name, type, and project
- # of the target.
- # NOTE: This functionality should be moved, and generalized, to virtual-targets.
+ # Utility, clones a file-target with optional changes to the name, type and
+ # project of the target.
+ # NOTE: This functionality should be moved, and generalized, to
+ # virtual-targets.
#
rule clone-file-target ( target : new-name ? : new-type ? : new-project ? )
{
@@ -153,10 +154,11 @@
}
}
-# A modifier that changes the name of a target, after it's generated, given
-# a regular expression to slpit the name, and a set of token to insert
-# between the split tokens of the name. This also exposes the target for other
-# uses with a symlink to the original name (optionally).
+
+# A modifier that changes the name of a target, after it's generated, given a
+# regular expression to split the name, and a set of token to insert between the
+# split tokens of the name. This also exposes the target for other uses with a
+# symlink to the original name (optionally).
#
class name-modifier : modifier
{
Modified: branches/release/tools/build/v2/build/project.jam
==============================================================================
--- branches/release/tools/build/v2/build/project.jam (original)
+++ branches/release/tools/build/v2/build/project.jam 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -1,86 +1,82 @@
-# Copyright 2002, 2003 Dave Abrahams
-# Copyright 2002, 2005, 2006 Rene Rivera
-# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Implements project representation and loading.
-# Each project is represented by
-# - a module where all the Jamfile content live.
+# Copyright 2002, 2003 Dave Abrahams
+# Copyright 2002, 2005, 2006 Rene Rivera
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Implements project representation and loading. Each project is represented
+# by:
+# - a module where all the Jamfile content live.
# - an instance of 'project-attributes' class.
-# (given module name, can be obtained by 'attributes' rule)
+# (given a module name, can be obtained using the 'attributes' rule)
# - an instance of 'project-target' class (from targets.jam)
-# (given a module name, can be obtained by 'target' rule)
+# (given a module name, can be obtained using the 'target' rule)
#
-# Typically, projects are created as result of loading Jamfile, which is
-# do by rules 'load' and 'initialize', below. First, module for Jamfile
-# is loaded and new project-attributes instance is created. Some rules
-# necessary for project are added to the module (see 'project-rules' module)
-# at the bottom of this file.
-# Default project attributes are set (inheriting attributes of parent project, if
-# it exists). After that, Jamfile is read. It can declare its own attributes,
-# via 'project' rule, which will be combined with already set attributes.
-#
-#
-# The 'project' rule can also declare project id, which will be associated with
-# the project module.
-#
-# There can also be 'standalone' projects. They are created by calling 'initialize'
-# on arbitrary module, and not specifying location. After the call, the module can
-# call 'project' rule, declare main target and behave as regular projects. However,
-# since it's not associated with any location, it's better declare only prebuilt
-# targets.
-#
-# The list of all loaded Jamfile is stored in variable .project-locations. It's possible
-# to obtain module name for a location using 'module-name' rule. The standalone projects
-# are not recorded, the only way to use them is by project id.
-
+# Typically, projects are created as result of loading a Jamfile, which is done
+# by rules 'load' and 'initialize', below. First, module for Jamfile is loaded
+# and new project-attributes instance is created. Some rules necessary for
+# project are added to the module (see 'project-rules' module) at the bottom of
+# this file. Default project attributes are set (inheriting attributes of
+# parent project, if it exists). After that the Jamfile is read. It can declare
+# its own attributes using the 'project' rule which will be combined with any
+# alread set attributes.
+#
+# The 'project' rule can also declare a project id which will be associated
+# with the project module.
+#
+# There can also be 'standalone' projects. They are created by calling
+# 'initialize' on an arbitrary module and not specifying their location. After
+# the call, the module can call the 'project' rule, declare main targets and
+# behave as a regular project except that, since it's not associated with any
+# location, it should not declare targets that are not prebuilt.
+#
+# The list of all loaded Jamfile is stored in the .project-locations variable.
+# It's possible to obtain a module name for a location using the 'module-name'
+# rule. Standalone projects are not recorded and can only be referenced using
+# their project id.
-import modules : peek poke ;
-import numbers ;
-import path ;
-import sequence ;
-import errors : error ;
-
-import print ;
import "class" : new ;
import errors ;
-import assert ;
+import modules ;
+import path ;
+import print ;
import property-set ;
+import sequence ;
-#
-# Loads jamfile at the given location. After loading, project global
-# file and jamfile needed by the loaded one will be loaded recursively.
-# If the jamfile at that location is loaded already, does nothing.
-# Returns the project module for the Jamfile.
+
+# Loads the Jamfile at the given location. After loading, project global file
+# and Jamfiles needed by the requested one will be loaded recursively. If the
+# Jamfile at that location is loaded already, does nothing. Returns the project
+# module for the Jamfile.
#
rule load ( jamfile-location )
-{
+{
if --debug-loading in [ modules.peek : ARGV ]
{
ECHO "Loading Jamfile at" '$(jamfile-location)' ;
- }
-
- local module-name = [ module-name $(jamfile-location) ] ;
+ }
+
+ local module-name = [ module-name $(jamfile-location) ] ;
# If Jamfile is already loaded, don't try again.
if ! $(module-name) in $(.jamfile-modules)
- {
+ {
load-jamfile $(jamfile-location) ;
-
- # We want to make sure that child project are loaded only
- # after parent projects. In particular, because parent projects
- # define attributes whch are inherited by children, and we don't
- # want children to be loaded before parents has defined everything.
+
+ # We want to make sure that child project are loaded only after parent
+ # projects. In particular, because parent projects define attributes
+ # which are inherited by children, and we don't want children to be
+ # loaded before parent has defined everything.
#
- # While "build-project" and "use-project" can potentially refer
- # to child projects from parent projects, we don't immediately
- # loading child projects when seing those attributes. Instead,
- # we record the minimal information that will be used only later.
+ # While "build-project" and "use-project" can potentially refer to child
+ # projects from parent projects, we don't immediately load child
+ # projects when seeing those attributes. Instead, we record the minimal
+ # information to be used only later.
load-used-projects $(module-name) ;
- }
- return $(module-name) ;
+ }
+ return $(module-name) ;
}
+
rule load-used-projects ( module-name )
{
local used = [ modules.peek $(module-name) : .used-projects ] ;
@@ -90,39 +86,40 @@
{
local id = $(used[1]) ;
local where = $(used[2]) ;
-
- project.use $(id) : [ path.root
- [ path.make $(where) ] $(location) ] ;
+
+ project.use $(id) : [ path.root [ path.make $(where) ] $(location) ] ;
used = $(used[3-]) ;
- }
+ }
}
-# Note the use of character groups, as opposed to listing
-# 'Jamroot' and 'jamroot'. With the latter, we'd get duplicate
-# matches on windows and would have to eliminate duplicates.
-JAMROOT ?= [ peek : JAMROOT ] ;
+# Note the use of character groups, as opposed to listing 'Jamroot' and
+# 'jamroot'. With the latter, we'd get duplicate matches on Windows and would
+# have to eliminate duplicates.
+JAMROOT ?= [ modules.peek : JAMROOT ] ;
JAMROOT ?= project-root.jam [Jj]amroot [Jj]amroot.jam ;
+
# Loads parent of Jamfile at 'location'. Issues an error if nothing is found.
rule load-parent ( location )
{
- local found = [ path.glob-in-parents $(location) :
+ local found = [ path.glob-in-parents $(location) :
$(JAMROOT) $(JAMFILE) ] ;
-
+
if ! $(found)
{
ECHO "error: Could not find parent for project at '$(location)'" ;
- ECHO "error: Did not find Jamfile or project-root.jam in any parent directory." ;
+ ECHO "error: Did not find Jamfile.jam or Jamroot.jam in any parent directory." ;
EXIT ;
}
-
- return [ load $(found[1]:D) ] ;
+
+ return [ load $(found[1]:D) ] ;
}
-# Makes the specified 'module' act as if it were a regularly loaded Jamfile
-# at 'location'. If Jamfile is already located for that location, it's an
-# error.
+
+# Makes the specified 'module' act as if it were a regularly loaded Jamfile at
+# 'location'. Reports an error if a Jamfile has already been loaded for that
+# location.
rule act-as-jamfile ( module : location )
{
if [ module-name $(location) ] in $(.jamfile-modules)
@@ -130,83 +127,83 @@
errors.error "Jamfile was already loaded for '$(location)'" ;
}
# Set up non-default mapping from location to module.
- .module.$(location) = $(module) ;
-
- # Add the location to the list of project locations
- # so that we don't try to load Jamfile in future
+ .module.$(location) = $(module) ;
+
+ # Add the location to the list of project locations so that we don't try to
+ # reload the same Jamfile in the future.
.jamfile-modules += [ module-name $(location) ] ;
-
+
initialize $(module) : $(location) ;
}
-# Given 'name' which can be project-id or plain directory name,
-# return project module corresponding to that id or directory.
-# Returns nothing of project is not found.
+# Returns the project module corresponding to the given project-id or plain
+# directory name. Returns nothing if such a project can not be found.
rule find ( name : current-location )
{
local project-module ;
-
+
# Try interpreting name as project id.
if [ path.is-rooted $(name) ]
- {
- project-module = $($(name).jamfile-module) ;
- }
-
+ {
+ project-module = $($(name).jamfile-module) ;
+ }
+
if ! $(project-module)
- {
- local location = [ path.root
+ {
+ local location = [ path.root
[ path.make $(name) ] $(current-location) ] ;
- # If no project is registered for the given location, try to
- # load it. First see if we have Jamfile. If not we might have project
- # root, willing to act as Jamfile. In that case, project-root
+
+ # If no project is registered for the given location, try to load it.
+ # First see if we have a Jamfile. If not, then see if we might have a
+ # project root willing to act as a Jamfile. In that case, project root
# must be placed in the directory referred by id.
-
+
project-module = [ module-name $(location) ] ;
- if ! $(project-module) in $(.jamfile-modules)
+ if ! $(project-module) in $(.jamfile-modules)
{
if [ path.glob $(location) : $(JAMROOT) $(JAMFILE) ]
{
- project-module = [ load $(location) ] ;
- }
+ project-module = [ load $(location) ] ;
+ }
else
{
project-module = ;
}
- }
+ }
}
-
+
return $(project-module) ;
}
-#
-# Returns the name of module corresponding to 'jamfile-location'.
-# If no module corresponds to location yet, associates default
-# module name with that location.
+
+# Returns the name of the module corresponding to 'jamfile-location'. If no
+# module corresponds to that location yet, associates the default module name
+# with that location.
#
rule module-name ( jamfile-location )
{
if ! $(.module.$(jamfile-location))
{
- # Root the path, so that locations are always umbiguious.
- # Without this, we can't decide if '../../exe/program1' and '.'
- # are the same paths, or not.
+ # Root the path, so that locations are always unambiguous. Without this,
+ # we can't decide if '../../exe/program1' and '.' are the same paths.
jamfile-location = [ path.root $(jamfile-location) [ path.pwd ] ] ;
- .module.$(jamfile-location) = Jamfile<$(jamfile-location)> ;
+ .module.$(jamfile-location) = Jamfile<$(jamfile-location)> ;
}
return $(.module.$(jamfile-location)) ;
}
-# Default patterns to search for the Jamfiles to use for build
-# declarations.
+
+# Default patterns to search for the Jamfiles to use for build declarations.
#
JAMFILE = [ modules.peek : JAMFILE ] ;
JAMFILE ?= [Bb]uild.jam [Jj]amfile.v2 [Jj]amfile [Jj]amfile.jam ;
-# Find the Jamfile at the given location. This returns the exact names of
-# all the Jamfiles in the given directory. The optional parent-root argument
-# causes this to search not the given directory but the ones above it up
-# to the directory given in it.
+
+# Find the Jamfile at the given location. This returns the exact names of all
+# the Jamfiles in the given directory. The optional parent-root argument causes
+# this to search not the given directory but the ones above it up to the
+# directory given in it.
#
rule find-jamfile (
dir # The directory(s) to look for a Jamfile.
@@ -220,27 +217,26 @@
if $(parent-root)
{
if ! $(.parent-jamfile.$(dir))
- {
- .parent-jamfile.$(dir) =
- [ path.glob-in-parents $(dir) : $(JAMFILE) ] ;
- }
- jamfile-glob = $(.parent-jamfile.$(dir)) ;
+ {
+ .parent-jamfile.$(dir) =
+ [ path.glob-in-parents $(dir) : $(JAMFILE) ] ;
+ }
+ jamfile-glob = $(.parent-jamfile.$(dir)) ;
}
else
{
if ! $(.jamfile.$(dir))
- {
- .jamfile.$(dir) = [ path.glob $(dir) : $(JAMFILE) ] ;
- }
+ {
+ .jamfile.$(dir) = [ path.glob $(dir) : $(JAMFILE) ] ;
+ }
jamfile-glob = $(.jamfile.$(dir)) ;
-
+
}
-
+
local jamfile-to-load = $(jamfile-glob) ;
- # Multiple Jamfiles found in the same place. Warn about this.
- # And ensure we use only one of them.
- # As a temporary convenience measure, if there's Jamfile.v2 amount
- # found files, suppress the warning and use it.
+ # Multiple Jamfiles found in the same place. Warn about this and ensure we
+ # use only one of them. As a temporary convenience measure, if there's
+ # Jamfile.v2 among found files, suppress the warning and use it.
#
if $(jamfile-to-load[2-])
{
@@ -249,24 +245,24 @@
if $(v2-jamfiles) && ! $(v2-jamfiles[2])
{
jamfile-to-load = $(v2-jamfiles) ;
- }
+ }
else
- {
- ECHO
- "warning: Found multiple Jamfiles at '"$(dir)"'!"
- "Loading the first one: '" [ path.basename $(jamfile-to-load[1]) ] "'." ;
+ {
+ local jamfile = [ path.basename $(jamfile-to-load[1]) ] ;
+ ECHO "warning: Found multiple Jamfiles at '"$(dir)"'!"
+ "Loading the first one: '$(jamfile)'." ;
}
-
+
jamfile-to-load = $(jamfile-to-load[1]) ;
- }
-
+ }
+
# Could not find it, error.
#
if ! $(no-errors) && ! $(jamfile-to-load)
{
errors.error
"Unable to load Jamfile." :
- "Could not find a Jamfile in directory '$(dir)'". :
+ "Could not find a Jamfile in directory '$(dir)'". :
"Attempted to find it with pattern '"$(JAMFILE:J=" ")"'." :
"Please consult the documentation at 'http://www.boost.org'." ;
}
@@ -274,12 +270,13 @@
return $(jamfile-to-load) ;
}
-# Load a Jamfile at the given directory. Returns nothing.
-# Will attempt to load the file as indicated by the JAMFILE patterns.
-# Effect of calling this rule twice with the same 'dir' is underfined.
+# Load a Jamfile at the given directory. Returns nothing. Will attempt to load
+# the file as indicated by the JAMFILE patterns. Effect of calling this rule
+# twice with the same 'dir' is undefined.
+#
local rule load-jamfile (
- dir # The directory of the project Jamfile.
+ dir # The directory of the project Jamfile.
)
{
# See if the Jamfile is where it should be.
@@ -289,76 +286,71 @@
{
jamfile-to-load = [ find-jamfile $(dir) ] ;
}
-
-
- # The module of the jamfile.
+
+ # The module of the Jamfile.
#
- local jamfile-module = [ module-name [ path.parent $(jamfile-to-load) ] ] ;
+ local jamfile-module = [ module-name [ path.parent $(jamfile-to-load) ] ] ;
- # Initialize the jamfile module before loading.
- #
- initialize $(jamfile-module) : [ path.parent $(jamfile-to-load) ]
- : $(jamfile-to-load:BS) ;
+ # Initialize the Jamfile module before loading.
+ #
+ initialize $(jamfile-module) : [ path.parent $(jamfile-to-load) ]
+ : $(jamfile-to-load:BS) ;
local saved-project = $(.current-project) ;
- # Now load the Jamfile in it's own context.
- # Initialization might have load parent Jamfiles, which might have
- # loaded the current Jamfile with use-project. Do a final check to make
- # sure it's not loaded already.
+ # Now load the Jamfile in it's own context. Initialization might have loaded
+ # parent Jamfiles, which might have loaded the current Jamfile with
+ # use-project. Do a final check to make sure it's not loaded already.
if ! $(jamfile-module) in $(.jamfile-modules)
- {
- .jamfile-modules += $(jamfile-module) ;
+ {
+ .jamfile-modules += $(jamfile-module) ;
mark-as-user $(jamfile-module) ;
- modules.load $(jamfile-module) : [ path.native $(jamfile-to-load) ] : . ;
+ modules.load $(jamfile-module) : [ path.native $(jamfile-to-load) ] : . ;
if [ MATCH ($(JAMROOT)) : $(jamfile-to-load:BS) ]
{
jamfile = [ find-jamfile $(dir) : no-errors ] ;
if $(jamfile)
- {
+ {
load-aux $(jamfile-module) : [ path.native $(jamfile) ] ;
- }
- }
+ }
+ }
}
- # Now do some checks
+ # Now do some checks.
if $(.current-project) != $(saved-project)
{
errors.error "The value of the .current-project variable"
: "has magically changed after loading a Jamfile."
- : "This means some of the targets might be defined a the wrong project."
- : "after loading " $(jamfile-module)
- : "expected value " $(saved-project)
- : "actual value " $(.current-project)
+ : "This means some of the targets might be defined in the wrong project."
+ : "after loading" $(jamfile-module)
+ : "expected value" $(saved-project)
+ : "actual value" $(.current-project)
;
}
if $(.global-build-dir)
- {
- local id = [ attribute $(jamfile-module) id ] ;
- local project-root = [ attribute $(jamfile-module) project-root ] ;
- local location = [ attribute $(jamfile-module) location ] ;
-
+ {
+ local id = [ attribute $(jamfile-module) id ] ;
+ local project-root = [ attribute $(jamfile-module) project-root ] ;
+ local location = [ attribute $(jamfile-module) location ] ;
+
if $(location) && $(project-root) = $(dir)
{
- # This is Jamroot
+ # This is Jamroot.
if ! $(id)
{
ECHO "warning: the --build-dir option was specified" ;
ECHO "warning: but Jamroot at '$(dir)'" ;
ECHO "warning: specified no project id" ;
ECHO "warning: the --build-dir option will be ignored" ;
- }
- }
- }
-
-
-
-
+ }
+ }
+ }
}
+
rule mark-as-user ( module-name )
{
if USER_MODULE in [ RULENAMES ]
- {
+ {
USER_MODULE $(module-name) ;
}
}
@@ -367,30 +359,31 @@
rule load-aux ( module-name : file )
{
mark-as-user $(module-name) ;
-
+
module $(module-name)
{
include $(2) ;
local rules = [ RULENAMES $(1) ] ;
IMPORT $(1) : $(rules) : $(1) : $(1).$(rules) ;
- }
+ }
}
+
.global-build-dir = [ MATCH --build-dir=(.*) : [ modules.peek : ARGV ] ] ;
if $(.global-build-dir)
-{
- # If the option is specified several times, take the last value.
+{
+ # If the option is specified several times, take the last value.
.global-build-dir = [ path.make $(.global-build-dir[-1]) ] ;
}
-# Initialize the module for a project.
+# Initialize the module for a project.
#
rule initialize (
- module-name # The name of the project module.
- : location ? # The location (directory) of the project to initialize.
- # If not specified, stanalone project will be initialized.
- : basename ?
+ module-name # The name of the project module.
+ : location ? # The location (directory) of the project to initialize. If
+ # not specified, a standalone project will be initialized.
+ : basename ?
)
{
if --debug-loading in [ modules.peek : ARGV ]
@@ -398,77 +391,80 @@
ECHO "Initializing project '$(module-name)'" ;
}
- # TODO: need to consider if standalone projects can do anything but defining
- # prebuilt targets. If so, we need to give more sensible "location", so that
- # source paths are correct.
+ # TODO: need to consider if standalone projects can do anything but define
+ # prebuilt targets. If so, we need to give it a more sensible "location", so
+ # that source paths are correct.
location ?= "" ;
- # Create the module for the Jamfile first.
+ # Create the module for the Jamfile first.
module $(module-name)
- {
- }
- $(module-name).attributes = [ new project-attributes $(location)
- $(module-name) ] ;
+ {
+ }
+ $(module-name).attributes = [ new project-attributes $(location)
+ $(module-name) ] ;
local attributes = $($(module-name).attributes) ;
-
+
if $(location)
- {
- $(attributes).set source-location : [ path.make $(location) ] : exact ;
+ {
+ $(attributes).set source-location : [ path.make $(location) ] : exact ;
}
else
{
- $(attributes).set source-location : "" : exact ;
+ $(attributes).set source-location : "" : exact ;
}
-
- $(attributes).set requirements : [ property-set.empty ] : exact ;
- $(attributes).set usage-requirements : [ property-set.empty ] : exact ;
+
+ $(attributes).set requirements : [ property-set.empty ] : exact ;
+ $(attributes).set usage-requirements : [ property-set.empty ] : exact ;
# Import rules common to all project modules from project-rules module,
# defined at the end of this file.
modules.clone-rules project-rules $(module-name) ;
-
+
local jamroot ;
local parent-module ;
- if $(module-name) = site-config
+ if $(module-name) = test-config
{
# No parent.
- }
+ }
+ else if $(module-name) = site-config
+ {
+ parent-module = test-config ;
+ }
else if $(module-name) = user-config
{
parent-module = site-config ;
}
- else
+ else
{
- # We search for parent/project-root only if jamfile was specified
- # --- i.e
+ # We search for parent/project-root only if Jamfile was specified, i.e.
# if the project is not standalone.
- if $(location) && ! [ MATCH ($(JAMROOT)) : $(basename) ]
+ if $(location) && ! [ MATCH ($(JAMROOT)) : $(basename) ]
{
parent-module = [ load-parent $(location) ] ;
- }
+ }
else
{
- # It's either jamroot, or standalone project.
- # If it's jamroot, inherit from user-config.
+ # It's either jamroot or standalone project. If it's jamroot,
+ # inherit from user-config.
if $(location)
{
- parent-module = user-config ;
+ parent-module = user-config ;
jamroot = true ;
- }
- }
+ }
+ }
}
-
+
if $(parent-module)
- {
+ {
inherit-attributes $(module-name) : $(parent-module) ;
$(attributes).set parent-module : $(parent-module) : exact ;
}
-
+
if $(jamroot)
- {
+ {
$(attributes).set project-root : $(location) : exact ;
}
-
+
local parent ;
if $(parent-module)
{
@@ -477,70 +473,72 @@
if ! $(.target.$(module-name))
{
- .target.$(module-name) = [ new project-target $(module-name)
- : $(module-name) $(parent)
+ .target.$(module-name) = [ new project-target $(module-name)
+ : $(module-name) $(parent)
: [ attribute $(module-name) requirements ] ] ;
-
+
if --debug-loading in [ modules.peek : ARGV ]
{
- ECHO "Assigned project target" $(.target.$(module-name))
- "to '$(module-name)'" ;
+ ECHO "Assigned project target" $(.target.$(module-name))
+ "to '$(module-name)'" ;
}
- }
-
+ }
+
.current-project = [ target $(module-name) ] ;
}
+
# Make 'project-module' inherit attributes of project root and parent module.
rule inherit-attributes ( project-module : parent-module )
{
- local attributes = $($(project-module).attributes) ;
+ local attributes = $($(project-module).attributes) ;
local pattributes = [ attributes $(parent-module) ] ;
- # Parent module might be locationless user-config.
+ # Parent module might be locationless configuration module.
if [ modules.binding $(parent-module) ]
- {
- $(attributes).set parent : [ path.parent
+ {
+ $(attributes).set parent : [ path.parent
[ path.make [ modules.binding $(parent-module) ] ] ] ;
- }
+ }
local v = [ $(pattributes).get project-root ] ;
$(attributes).set project-root : $(v) : exact ;
- $(attributes).set default-build
- : [ $(pattributes).get default-build ] ;
+ $(attributes).set default-build
+ : [ $(pattributes).get default-build ] ;
$(attributes).set requirements
- : [ $(pattributes).get requirements ] : exact ;
+ : [ $(pattributes).get requirements ] : exact ;
$(attributes).set usage-requirements
- : [ $(pattributes).get usage-requirements ] : exact ;
-
+ : [ $(pattributes).get usage-requirements ] : exact ;
+
local parent-build-dir = [ $(pattributes).get build-dir ] ;
if $(parent-build-dir)
- {
- # Have to compute relative path from parent dir to our dir
- # Convert both paths to absolute, since we cannot
- # find relative path from ".." to "."
-
+ {
+ # Have to compute relative path from parent dir to our dir. Convert both
+ # paths to absolute, since we cannot find relative path from ".." to
+ # ".".
+
local location = [ attribute $(project-module) location ] ;
local parent-location = [ attribute $(parent-module) location ] ;
-
+
local pwd = [ path.pwd ] ;
local parent-dir = [ path.root $(parent-location) $(pwd) ] ;
local our-dir = [ path.root $(location) $(pwd) ] ;
- $(attributes).set build-dir : [ path.join $(parent-build-dir)
- [ path.relative $(our-dir) $(parent-dir) ] ] : exact ;
- }
+ $(attributes).set build-dir : [ path.join $(parent-build-dir)
+ [ path.relative $(our-dir) $(parent-dir) ] ] : exact ;
+ }
}
-# Associate the given id with the given project module
+# Associate the given id with the given project module.
rule register-id ( id : module )
{
$(id).jamfile-module = $(module) ;
}
+
# Class keeping all the attributes of a project.
#
# The standard attributes are "id", "location", "project-root", "parent"
# "requirements", "default-build", "source-location" and "projects-to-build".
-class project-attributes
+class project-attributes
{
import property ;
import property-set ;
@@ -549,28 +547,28 @@
import print ;
import sequence ;
import project ;
-
+
rule __init__ ( location project-module )
- {
+ {
self.location = $(location) ;
self.project-module = $(project-module) ;
}
-
- # Set the named attribute from the specification given by the user.
- # The value actually set may be different.
- rule set ( attribute : specification *
- : exact ? # Sets value from 'specification' without any processing
- )
+
+ # Set the named attribute from the specification given by the user. The
+ # value actually set may be different.
+ rule set ( attribute : specification *
+ : exact ? # Sets value from 'specification' without any processing.
+ )
{
if $(exact)
{
self.$(attribute) = $(specification) ;
}
- else if $(attribute) = "requirements"
+ else if $(attribute) = "requirements"
{
local result = [ property-set.refine-from-user-input
$(self.requirements) : $(specification)
- : $(self.project-module) : $(self.location) ] ;
+ : $(self.project-module) : $(self.location) ] ;
if $(result[1]) = "@error"
{
@@ -593,28 +591,28 @@
split ?= nothing $(p) ;
unconditional += $(split[2]) ;
}
-
+
local non-free = [ property.remove free : $(unconditional) ] ;
if $(non-free)
{
errors.error "usage-requirements" $(specification) "have non-free properties" $(non-free) ;
- }
+ }
local t = [ property.translate-paths $(specification)
: $(self.location) ] ;
if $(self.usage-requirements)
{
- self.usage-requirements = [ property-set.create
+ self.usage-requirements = [ property-set.create
[ $(self.usage-requirements).raw ] $(t) ] ;
}
- else
+ else
{
self.usage-requirements = [ property-set.create $(t) ] ;
- }
- }
+ }
+ }
else if $(attribute) = "default-build"
{
self.default-build = [ property.make $(specification) ] ;
- }
+ }
else if $(attribute) = "source-location"
{
self.source-location = ;
@@ -623,17 +621,17 @@
self.source-location += [ path.root
[ path.make $(src-path) ] $(self.location) ] ;
}
- }
+ }
else if $(attribute) = "build-dir"
{
- self.build-dir = [ path.root
+ self.build-dir = [ path.root
[ path.make $(specification) ] $(self.location) ] ;
- }
- else if ! $(attribute) in "id" "default-build" "location" "source-location"
- "parent" "projects-to-build" "project-root"
+ }
+ else if ! $(attribute) in "id" "default-build" "location"
+ "source-location" "parent" "projects-to-build" "project-root"
{
- errors.error "Invalid project attribute '$(attribute)' specified "
- "for project at '$(self.location)'" ;
+ errors.error "Invalid project attribute '$(attribute)' specified"
+ "for project at '$(self.location)'" ;
}
else
{
@@ -658,27 +656,29 @@
print.list-item "Requirements:" [ $(self.requirements).raw ] ;
print.list-item "Default build:" $(self.default-build) ;
print.list-item "Source location:" $(self.source-location) ;
- print.list-item "Projects to build:"
- [ sequence.insertion-sort $(self.projects-to-build) ] ;
+ print.list-item "Projects to build:"
+ [ sequence.insertion-sort $(self.projects-to-build) ] ;
print.list-end ;
}
-
}
-# Returns the project which is currently being loaded
+
+# Returns the project which is currently being loaded.
rule current ( )
{
return $(.current-project) ;
}
-# Temporary changes the current project to 'project'. Should
-# be followed by 'pop-current'.
+
+# Temporarily changes the current project to 'project'. Should be followed by
+# 'pop-current'.
rule push-current ( project )
{
.saved-current-project += $(.current-project) ;
.current-project = $(project) ;
}
+
rule pop-current ( )
{
.current-project = $(.saved-current-project[-1]) ;
@@ -686,68 +686,71 @@
}
-
-# Returns the project-attribute instance for the specified jamfile module.
+# Returns the project-attribute instance for the specified Jamfile module.
rule attributes ( project )
{
return $($(project).attributes) ;
}
-# Returns the value of the specified attribute in the specified jamfile module.
+
+# Returns the value of the specified attribute in the specified Jamfile module.
rule attribute ( project attribute )
{
- return [ $($(project).attributes).get $(attribute) ] ;
+ return [ $($(project).attributes).get $(attribute) ] ;
}
+
# Returns the project target corresponding to the 'project-module'.
rule target ( project-module )
{
if ! $(.target.$(project-module))
{
- .target.$(project-module) = [ new project-target $(project-module)
- : $(project-module)
- : [ attribute $(project-module) requirements ] ] ;
+ .target.$(project-module) = [ new project-target $(project-module)
+ : $(project-module)
+ : [ attribute $(project-module) requirements ] ] ;
}
- return $(.target.$(project-module)) ;
+ return $(.target.$(project-module)) ;
}
+
# Use/load a project.
rule use ( id : location )
{
local saved-project = $(.current-project) ;
local project-module = [ project.load $(location) ] ;
local declared-id = [ project.attribute $(project-module) id ] ;
-
+
if ! $(declared-id) || $(declared-id) != $(id)
{
- # The project at 'location' either have no id or
- # that id is not equal to the 'id' parameter.
- if $($(id).jamfile-module)
- && $($(id).jamfile-module) != $(project-module)
- {
- errors.user-error
- "Attempt to redeclare already existing project id '$(id)'" ;
- }
+ # The project at 'location' either has no id or that id is not equal to
+ # the 'id' parameter.
+ if $($(id).jamfile-module)
+ && $($(id).jamfile-module) != $(project-module)
+ {
+ errors.user-error
+ "Attempt to redeclare already existing project id '$(id)'" ;
+ }
$(id).jamfile-module = $(project-module) ;
}
.current-project = $(saved-project) ;
}
-# Defines a Boost.Build extension project. Such extensions usually
-# contain library targets and features that can be used by many people.
-# Even though extensions are really projects, they can be initialize as
-# a module would be with the "using" (project.project-rules.using)
-# mechanism.
+
+# Defines a Boost.Build extension project. Such extensions usually contain
+# library targets and features that can be used by many people. Even though
+# extensions are really projects, they can be initialized as a module would be
+# with the "using" (project.project-rules.using) mechanism.
+#
rule extension ( id : options * : * )
{
# The caller is a standalone module for the extension.
local mod = [ CALLER_MODULE ] ;
-
+
# We need to do the rest within the extension module.
module $(mod)
{
import path ;
-
+
# Find the root project.
local root-project = [ project.current ] ;
root-project = [ $(root-project).project-module ] ;
@@ -757,170 +760,167 @@
{
root-project = [ project.attribute $(root-project) parent-module ] ;
}
-
- # Create the project data, and bring in the project rules
- # into the module.
+
+ # Create the project data, and bring in the project rules into the
+ # module.
project.initialize $(__name__) :
[ path.join [ project.attribute $(root-project) location ] ext $(1:L) ] ;
-
- # Create the project itself, i.e. the attributes.
- # All extensions are created in the "/ext" project space.
+
+ # Create the project itself, i.e. the attributes. All extensions are
+ # created in the "/ext" project space.
project /ext/$(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
local attributes = [ project.attributes $(__name__) ] ;
-
+
# Inherit from the root project of whomever is defining us.
project.inherit-attributes $(__name__) : $(root-project) ;
$(attributes).set parent-module : $(root-project) : exact ;
}
}
+
rule glob-internal ( project : wildcards + : excludes * : rule-name )
{
local location = [ $(project).get source-location ] ;
-
+
local result ;
- local paths = [ path.$(rule-name) $(location)
- : [ sequence.transform path.make : $(wildcards) ]
+ local paths = [ path.$(rule-name) $(location)
+ : [ sequence.transform path.make : $(wildcards) ]
: [ sequence.transform path.make : $(excludes) ] ] ;
if $(wildcards:D) || $(rule-name) != glob
{
- # The paths we've found are relative to current directory,
- # but the names specified in sources list are assumed to
- # be relative to source directory of the corresponding
- # prject. So, just make the name absolute.
+ # The paths we've found are relative to the current directory, but the
+ # names specified in the sources list are assumed to be relative to the
+ # source directory of the corresponding project. So, just make the names
+ # absolute.
for local p in $(paths)
{
result += [ path.root $(p) [ path.pwd ] ] ;
- }
+ }
}
- else
+ else
{
- # There were not directory in wildcard, so the files are all
- # in the source directory of the project. Just drop the
- # directory, instead of making paths absolute.
+ # There were no wildcards in the directory path, so the files are all in
+ # the source directory of the project. Just drop the directory, instead
+ # of making paths absolute.
result = $(paths:D="") ;
}
-
- return $(result) ;
+
+ return $(result) ;
}
-# This module defines rules common to all projects
+# This module defines rules common to all projects.
module project-rules
-{
+{
rule using ( toolset-module : * )
{
import toolset ;
import modules ;
import project ;
-
- # The module referred by 'using' can be placed in
- # the same directory as Jamfile, and the user
- # will expect the module to be found even though
- # the directory is not in BOOST_BUILD_PATH.
- # So temporary change the search path.
+
+ # Temporarily change the search path so the module referred to by
+ # 'using' can be placed in the same directory as Jamfile. User will
+ # expect the module to be found even though the directory is not in
+ # BOOST_BUILD_PATH.
local x = [ modules.peek : BOOST_BUILD_PATH ] ;
local caller = [ modules.binding $(__name__) ] ;
modules.poke : BOOST_BUILD_PATH : $(caller:D) $(x) ;
toolset.using $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
modules.poke : BOOST_BUILD_PATH : $(x) ;
-
- # The above might have clobbered .current-project
- # Restore the the right value.
- modules.poke project : .current-project
- : [ project.target $(__name__) ] ;
+
+ # The above might have clobbered .current-project. Restore the correct
+ # value.
+ modules.poke project : .current-project
+ : [ project.target $(__name__) ] ;
}
-
+
import modules ;
-
+
rule import ( * : * : * )
{
modules.import project ;
-
+
local caller = [ CALLER_MODULE ] ;
local saved = [ modules.peek project : .current-project ] ;
module $(caller)
{
modules.import $(1) : $(2) : $(3) ;
}
- modules.poke project : .current-project : $(saved) ;
+ modules.poke project : .current-project : $(saved) ;
}
-
rule project ( id ? : options * : * )
{
- import project ;
- import path ;
import errors ;
-
+ import path ;
+ import project ;
+
local attributes = [ project.attributes $(__name__) ] ;
- if $(id)
+ if $(id)
{
id = [ path.root $(id) / ] ;
project.register-id $(id) : $(__name__) ;
$(attributes).set id : $(id) ;
}
-
+
local explicit-build-dir ;
-
+
for n in 2 3 4 5 6 7 8 9
{
local option = $($(n)) ;
- if $(option)
+ if $(option)
{
$(attributes).set $(option[1]) : $(option[2-]) ;
}
if $(option[1]) = "build-dir"
{
explicit-build-dir = [ path.make $(option[2-]) ] ;
- }
+ }
}
-
+
# If '--build-dir' is specified, change the build dir for the project.
- local global-build-dir =
- [ modules.peek project : .global-build-dir ] ;
-
+ local global-build-dir =
+ [ modules.peek project : .global-build-dir ] ;
+
if $(global-build-dir)
- {
+ {
local location = [ $(attributes).get location ] ;
- # Project with empty location is 'standalone' project, like
- # user-config, or qt. It has no build dir.
- # If we try to set build dir for user-config, we'll then
- # try to inherit it, with either weird, or wrong consequences.
+ # Project with an empty location is a 'standalone' project such as
+ # user-config or qt. It has no build dir. If we try to set build dir
+ # for user-config, we'll then try to inherit it, with either weird
+ # or wrong consequences.
if $(location) && $(location) = [ $(attributes).get project-root ]
{
# This is Jamroot.
if $(id)
- {
- if $(explicit-build-dir)
+ {
+ if $(explicit-build-dir)
&& [ path.is-rooted $(explicit-build-dir) ]
{
- errors.user-error "Absolute directory specified via 'build-dir' project attribute"
- : "Don't know how to combine that with the --build-dir option."
- ;
+ errors.user-error "Absolute directory specified via 'build-dir' project attribute"
+ : "Don't know how to combine that with the --build-dir option."
+ ;
}
# Strip the leading slash from id.
- local rid = [ MATCH /(.*) : $(id) ] ;
- local p = [ path.join
+ local rid = [ MATCH /(.*) : $(id) ] ;
+ local p = [ path.join
$(global-build-dir) $(rid) $(explicit-build-dir) ] ;
$(attributes).set build-dir : $(p) : exact ;
- }
- }
- else
+ }
+ }
+ else
{
# Not Jamroot
if $(explicit-build-dir)
- {
- errors.user-error "When --build-dir is specified, the 'build-project'"
- : "attribute is allowed only for top-level 'project' invocations" ;
- }
- }
+ {
+ errors.user-error "When --build-dir is specified, the 'build-dir' project"
+ : "attribute is allowed only for top-level 'project' invocations" ;
+ }
+ }
}
-
-
}
-
+
# Declare and set a project global constant. Project global constants are
# normal variables but should not be changed. They are applied to every
# child Jamfile.
@@ -932,13 +932,12 @@
{
import project ;
local p = [ project.target $(__name__) ] ;
- $(p).add-constant $(name) : $(value) ;
+ $(p).add-constant $(name) : $(value) ;
}
-
- # Declare and set a project global constant, whose value is a path. The
- # path is adjusted to be relative to the invocation directory. The given
- # value path is taken to be either absolute, or relative to this project
- # root.
+
+ # Declare and set a project global constant, whose value is a path. The path
+ # is adjusted to be relative to the invocation directory. The given value
+ # path is taken to be either absolute, or relative to this project root.
rule path-constant (
name # Variable name of the constant.
: value + # Value of the constant.
@@ -949,7 +948,6 @@
$(p).add-constant $(name) : $(value) : path ;
}
-
rule use-project ( id : where )
{
# See comment in 'load' for explanation.
@@ -964,43 +962,42 @@
local now = [ $(attributes).get projects-to-build ] ;
$(attributes).set projects-to-build : $(now) $(dir) ;
}
-
+
rule explicit ( target-names * )
{
import project ;
- # If 'explicit' is used in a helper rule defined in Jamroot,
- # and inherited by children, then most of the time
- # we want 'explicit' to operate on the Jamfile where
- # the helper rule is invoked.
+ # If 'explicit' is used in a helper rule defined in Jamroot and
+ # inherited by children, then most of the time we want 'explicit' to
+ # operate on the Jamfile where the helper rule is invoked.
local t = [ project.current ] ;
for local n in $(target-names)
- {
+ {
$(t).mark-target-as-explicit $(n) ;
- }
- }
-
+ }
+ }
+
rule glob ( wildcards + : excludes * )
{
import project ;
- return [ project.glob-internal [ project.current ]
+ return [ project.glob-internal [ project.current ]
: $(wildcards) : $(excludes) : glob ] ;
}
rule glob-tree ( wildcards + : excludes * )
{
import project ;
-
+
if $(wildcards:D) || $(excludes:D)
{
errors.user-error "The patterns to 'glob-tree' may not include directory" ;
}
- return [ project.glob-internal [ project.current ]
+ return [ project.glob-internal [ project.current ]
: $(wildcards) : $(excludes) : glob-tree ] ;
}
- # Calculates conditional requirements for multiple requirements
- # at once. This is a shorthand to be reduce duplication and to
- # keep an inline declarative syntax. For example:
+ # Calculates conditional requirements for multiple requirements at once.
+ # This is a shorthand to reduce duplication and to keep an inline
+ # declarative syntax. For example:
#
# lib x : x.cpp : [ conditional <toolset>gcc <variant>debug :
# <define>DEBUG_EXCEPTION <define>DEBUG_TRACE ] ;
Modified: branches/release/tools/build/v2/build/property-set.jam
==============================================================================
--- branches/release/tools/build/v2/build/property-set.jam (original)
+++ branches/release/tools/build/v2/build/property-set.jam 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -1,7 +1,7 @@
-# Copyright 2003 Dave Abrahams
-# Copyright 2003, 2004, 2005, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+# Copyright 2003 Dave Abrahams
+# Copyright 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
import "class" : new ;
import feature ;
@@ -10,46 +10,45 @@
import set ;
# Class for storing a set of properties.
-# - there's 1<->1 correspondence between identity and value. No
-# two instances of the class are equal. To maintain this property,
-# the 'property-set.create' rule should be used to create new instances.
-# Instances are immutable.
-#
-# - each property is classified with regard to it's effect on build
-# results. Incidental properties have no effect on build results, from
-# Boost.Build point of view. Others are either free, or non-free, which we
-# call 'base'. Each property belong to exactly one of those categories and
-# it's possible to get list of properties in each category.
+# - there's 1<->1 correspondence between identity and value. No two instances of
+# the class are equal. To maintain this property, the 'property-set.create'
+# rule should be used to create new instances. Instances are immutable.
#
-# In addition, it's possible to get list of properties with specific
+# - each property is classified with regard to it's effect on build results.
+# Incidental properties have no effect on build results, from Boost.Build's
+# point of view. Others are either free, or non-free, which we call 'base'.
+# Each property belongs to exactly one of those categories and it's possible
+# to get list of properties in each category.
+#
+# In addition, it's possible to get a list of properties with a specific
# attribute.
#
# - several operations, like and refine and as-path are provided. They all use
# caching whenever possible.
#
-class property-set
+class property-set
{
+ import errors ;
import feature ;
- import property-set ;
+ import path ;
import property ;
+ import property-set ;
import set ;
- import path ;
- import errors ;
-
+
rule __init__ ( raw-properties * )
- {
+ {
self.raw = $(raw-properties) ;
-
+
for local p in $(raw-properties)
{
if ! $(p:G)
{
- errors.error "Invalid property: '$(p)'" ;
+ errors.error "Invalid property: '$(p)'" ;
}
-
- local att = [ feature.attributes $(p:G) ] ;
- # A feature can be both incidental and free,
- # in which case we add it to incidental.
+
+ local att = [ feature.attributes $(p:G) ] ;
+ # A feature can be both incidental and free, in which case we add it
+ # to incidental.
if incidental in $(att)
{
self.incidental += $(p) ;
@@ -58,11 +57,11 @@
{
self.free += $(p) ;
}
- else
+ else
{
self.base += $(p) ;
}
-
+
if dependency in $(att)
{
self.dependency += $(p) ;
@@ -71,8 +70,8 @@
{
self.non-dependency += $(p) ;
}
-
- if [ MATCH (:) : $(p:G=) ]
+
+ if [ MATCH (:) : $(p:G=) ]
{
self.conditional += $(p) ;
}
@@ -80,80 +79,76 @@
{
self.non-conditional += $(p) ;
}
-
-
+
if propagated in $(att)
{
self.propagated += $(p) ;
- }
+ }
if link-incompatible in $(att)
{
self.link-incompatible += $(p) ;
- }
+ }
}
-
}
-
-
- # Returns Jam list of stored properties
+
+ # Returns Jam list of stored properties.
rule raw ( )
{
return $(self.raw) ;
}
-
+
rule str ( )
{
return "[" $(self.raw) "]" ;
}
-
- # Returns properties that are neither incidental nor free
+
+ # Returns properties that are neither incidental nor free.
rule base ( )
{
return $(self.base) ;
}
-
-
- # Returns free properties which are not dependency properties
+
+ # Returns free properties which are not dependency properties.
rule free ( )
{
return $(self.free) ;
}
-
- # Returns dependency properties
+
+ # Returns dependency properties.
rule dependency ( )
{
return $(self.dependency) ;
}
-
+
rule non-dependency ( )
{
return $(self.non-dependency) ;
}
-
+
rule conditional ( )
{
return $(self.conditional) ;
}
-
+
rule non-conditional ( )
{
return $(self.non-conditional) ;
}
-
- # Returns incidental properties
+
+ # Returns incidental properties.
rule incidental ( )
{
return $(self.incidental) ;
}
-
+
rule refine ( ps )
{
if ! $(self.refined.$(ps))
{
- local r = [ property.refine $(self.raw) : [ $(ps).raw ] ] ;
+ local r = [ property.refine $(self.raw) : [ $(ps).raw ] ] ;
if $(r[1]) != "@error"
{
- self.refined.$(ps) = [ property-set.create $(r) ] ;
+ self.refined.$(ps) = [ property-set.create $(r) ] ;
}
else
{
@@ -162,7 +157,7 @@
}
return $(self.refined.$(ps)) ;
}
-
+
rule expand ( )
{
if ! $(self.expanded)
@@ -171,8 +166,7 @@
}
return $(self.expanded) ;
}
-
-
+
rule expand-composites ( )
{
if ! $(self.composites)
@@ -182,18 +176,18 @@
}
return $(self.composites) ;
}
-
+
rule evaluate-conditionals ( context ? )
{
context ?= $(__name__) ;
if ! $(self.evaluated.$(context))
{
- self.evaluated.$(context) = [ property-set.create
+ self.evaluated.$(context) = [ property-set.create
[ property.evaluate-conditionals-in-context $(self.raw) : [ $(context).raw ] ] ] ;
}
- return $(self.evaluated.$(context)) ;
+ return $(self.evaluated.$(context)) ;
}
-
+
rule propagated ( )
{
if ! $(self.propagated-ps)
@@ -201,19 +195,18 @@
self.propagated-ps = [ property-set.create $(self.propagated) ] ;
}
return $(self.propagated-ps) ;
- }
-
+ }
+
rule link-incompatible ( )
{
if ! $(self.link-incompatible-ps)
{
- self.link-incompatible-ps =
+ self.link-incompatible-ps =
[ property-set.create $(self.link-incompatible) ] ;
}
return $(self.link-incompatible-ps) ;
}
-
-
+
rule run-actions ( )
{
if ! $(self.run)
@@ -222,37 +215,34 @@
}
return $(self.run) ;
}
-
+
rule add-defaults ( )
{
if ! $(self.defaults)
{
- self.defaults = [ property-set.create
+ self.defaults = [ property-set.create
[ feature.add-defaults $(self.raw) ] ] ;
}
return $(self.defaults) ;
}
-
-
+
rule as-path ( )
{
if ! $(self.as-path)
{
self.as-path = [ property.as-path $(self.base) ] ;
- }
+ }
return $(self.as-path) ;
- }
-
- # Computes the target path that should be used for
- # target with these properties.
+ }
+
+ # Computes the path to be used for a target with the given properties.
# Returns a list of
# - the computed path
- # - if the path is relative to build directory, a value of
- # 'true'.
+ # - if the path is relative to the build directory, a value of 'true'.
rule target-path ( )
{
if ! $(self.target-path)
- {
+ {
# The <location> feature can be used to explicitly
# change the location of generated targetsv
local l = [ get <location> ] ;
@@ -262,62 +252,61 @@
}
else
{
- local p = [ as-path ] ;
+ local p = [ as-path ] ;
# Really, an ugly hack. Boost regression test system requires
# specific target paths, and it seems that changing it to handle
- # other directory layout is really hard. For that reason,
- # we teach V2 to do the things regression system requires.
- # The value o '<location-prefix>' is predended to the path.
- local prefix = [ get <location-prefix> ] ;
+ # other directory layout is really hard. For that reason, we
+ # teach V2 to do the things regression system requires. The
+ # value of '<location-prefix>' is prepended to the path.
+ local prefix = [ get <location-prefix> ] ;
if $(prefix)
{
self.target-path = [ path.join $(prefix) $(p) ] ;
- }
+ }
else
{
self.target-path = $(p) ;
- }
+ }
if ! $(self.target-path)
{
self.target-path = . ;
- }
+ }
# The path is relative to build dir.
self.target-path += true ;
- }
- }
+ }
+ }
return $(self.target-path) ;
}
-
-
+
rule add ( ps )
{
- if ! $(self.added.$(ps))
+ if ! $(self.added.$(ps))
{
self.added.$(ps) = [ property-set.create $(self.raw) [ $(ps).raw ] ] ;
}
return $(self.added.$(ps)) ;
- }
-
+ }
+
rule add-raw ( properties * )
{
return [ add [ property-set.create $(properties) ] ] ;
- }
-
+ }
+
rule link-incompatible-with ( ps )
{
if ! $(.li.$(ps))
{
local li1 = [ $(__name__).link-incompatible ] ;
- local li2 = [ $(ps).link-incompatible ] ;
- if [ set.equal $(li1) : $(li2) ]
+ local li2 = [ $(ps).link-incompatible ] ;
+ if [ set.equal $(li1) : $(li2) ]
{
.li.$(ps) = false ;
}
else
{
.li.$(ps) = true ;
- }
- }
+ }
+ }
if $(.li.$(ps)) = true
{
return true ;
@@ -325,85 +314,79 @@
else
{
return ;
- }
+ }
}
-
-
# Returns all values of 'feature'.
rule get ( feature )
{
if ! $(self.map-built)
{
- # For each feature, create member var and assign all
- # values to it. Since all regular member vars start with
- # 'self', there will be no conflicts between names.
+ # For each feature, create a member var and assign all values to it.
+ # Since all regular member vars start with 'self', there will be no
+ # conflicts between names.
self.map-built = true ;
for local v in $(self.raw)
{
$(v:G) += $(v:G=) ;
- }
+ }
}
-
return $($(feature)) ;
}
-
}
-# Creates new 'property-set' instance for the given raw properties,
-# or returns an already existing ones.
+
+# Creates a new 'property-set' instance for the given raw properties or returns
+# an already existing ones.
rule create ( raw-properties * )
{
- raw-properties = [ sequence.unique
+ raw-properties = [ sequence.unique
[ sequence.insertion-sort $(raw-properties) ] ] ;
-
+
local key = $(raw-properties:J=-:E=) ;
-
- if ! $(.ps.$(key))
+
+ if ! $(.ps.$(key))
{
.ps.$(key) = [ new property-set $(raw-properties) ] ;
}
- return $(.ps.$(key)) ;
+ return $(.ps.$(key)) ;
}
NATIVE_RULE property-set : create ;
-# Creates new 'property-set' instances after checking
-# that all properties are valid and converting incidental
-# properties into gristed form.
+
+# Creates a new 'property-set' instance after checking that all properties are
+# valid and converting incidental properties into gristed form.
rule create-with-validation ( raw-properties * )
{
property.validate $(raw-properties) ;
-
return [ create [ property.make $(raw-properties) ] ] ;
}
-# Creates a property-set from the input given by the user, in the
-# context of 'jamfile-module' at 'location'
+
+# Creates a property-set from the input given by the user, in the context of
+# 'jamfile-module' at 'location'.
rule create-from-user-input ( raw-properties * : jamfile-module location )
{
local specification = [ property.translate-paths $(raw-properties)
- : $(location) ] ;
+ : $(location) ] ;
specification = [ property.translate-indirect $(specification)
- : $(jamfile-module) ] ;
- specification =
- [ property.expand-subfeatures-in-conditions $(specification) ] ;
- specification = [ property.make $(specification) ] ;
- result = [ property-set.create $(specification) ] ;
- return $(result) ;
+ : $(jamfile-module) ] ;
+ specification =
+ [ property.expand-subfeatures-in-conditions $(specification) ] ;
+ specification = [ property.make $(specification) ] ;
+ return [ property-set.create $(specification) ] ;
}
-# Refines requirements with requirements provided by the user.
-# Specially handles "-<property>value" syntax in specification
-# to remove given requirements.
-# - parent-requirements -- property-set object with requirements
-# to refine
-# - specification -- string list of requirements provided by the use
-# - project-module -- the module to which context indirect features
-# will be bound.
-# - location -- the path to which path features are relative.
-#
+
+# Refines requirements with requirements provided by the user. Specially handles
+# "-<property>value" syntax in specification to remove given requirements.
+# - parent-requirements -- property-set object with requirements to refine.
+# - specification -- string list of requirements provided by the user.
+# - project-module -- module to which context indirect features will be
+# bound.
+# - location -- path to which path features are relative.
#
-rule refine-from-user-input ( parent-requirements : specification *
+rule refine-from-user-input ( parent-requirements : specification *
: project-module : location )
{
if ! $(specification)
@@ -414,7 +397,7 @@
{
local add-requirements ;
local remove-requirements ;
-
+
for local r in $(specification)
{
local m = [ MATCH "^-(.*)" : $(r) ] ;
@@ -427,38 +410,35 @@
add-requirements += $(r) ;
}
}
-
+
if $(remove-requirements)
{
- # Need to create property set, so that path features
- # and indirect features are translated just like they
- # are in project requirements.
- local ps = [ property-set.create-from-user-input
+ # Need to create a property set, so that path features and indirect
+ # features are translated just like they are in project
+ # requirements.
+ local ps = [ property-set.create-from-user-input
$(remove-requirements) : $(project-module) $(location) ] ;
-
- parent-requirements = [ property-set.create
- [ set.difference [ $(parent-requirements).raw ]
- : [ $(ps).raw ] ] ] ;
+
+ parent-requirements = [ property-set.create
+ [ set.difference [ $(parent-requirements).raw ]
+ : [ $(ps).raw ] ] ] ;
specification = $(add-requirements) ;
}
- local requirements = [ property-set.create-from-user-input
+ local requirements = [ property-set.create-from-user-input
$(specification) : $(project-module) $(location) ] ;
-
- requirements = [ $(parent-requirements).refine $(requirements) ] ;
- return $(requirements) ;
+
+ return [ $(parent-requirements).refine $(requirements) ] ;
}
}
-
-# Returns property-set with empty set of properties.
+# Returns a property-set with an empty set of properties.
rule empty ( )
{
if ! $(.empty)
{
- .empty = [ create ] ;
+ .empty = [ create ] ;
}
-
return $(.empty) ;
}
Modified: branches/release/tools/build/v2/build/property.jam
==============================================================================
--- branches/release/tools/build/v2/build/property.jam (original)
+++ branches/release/tools/build/v2/build/property.jam 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -1,52 +1,50 @@
-# Copyright 2001, 2002, 2003 Dave Abrahams
-# Copyright 2006 Rene Rivera
-# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-import utility : ungrist ;
-import sequence : unique ;
-import errors : error ;
+# Copyright 2001, 2002, 2003 Dave Abrahams
+# Copyright 2006 Rene Rivera
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import errors ;
import feature ;
+import indirect ;
+import path ;
import regex ;
import string ;
import sequence ;
import set ;
-import path ;
-import assert ;
-import indirect ;
+import utility ;
-# Refines 'properties' by overriding any non-free properties
-# for which a different value is specified in 'requirements'.
-# Conditional requirements are just added without modification.
-# Returns the resulting list of properties.
+
+# Refines 'properties' by overriding any non-free and non-conditional properties
+# for which a different value is specified in 'requirements'. Returns the
+# resulting list of properties.
rule refine ( properties * : requirements * )
{
local result ;
local error ;
-
- # All the elements of requirements should be present in the result
- # Record them so that we can handle 'properties'.
+
+ # All the 'requirements' elements should be present in the result. Record
+ # them so that we can handle 'properties'.
for local r in $(requirements)
{
# Don't consider conditional requirements.
if ! [ MATCH (:) : $(r:G=) ]
- {
- # Note: cannot use local here, so take an ugly name
+ {
+ # Note: cannot use a local variable here, so use an ugly name.
__require__$(r:G) = $(r:G=) ;
- }
+ }
}
for local p in $(properties)
- {
- # No processing for free properties
+ {
if [ MATCH (:) : $(p:G=) ]
{
- # Skip conditional properties
+ # Do not modify conditional properties.
result += $(p) ;
- }
+ }
else if free in [ feature.attributes $(p:G) ]
{
+ # Do not modify free properties.
result += $(p) ;
}
else
@@ -54,8 +52,7 @@
local required-value = $(__require__$(p:G)) ;
if $(required-value)
{
- local value = $(p:G=) ;
- if $(value) != $(required-value)
+ if $(p:G=) != $(required-value)
{
result += $(p:G)$(required-value) ;
}
@@ -71,25 +68,26 @@
}
}
- # Unset our ugly map.
+ # Unset our ugly map.
for local r in $(requirements)
{
- __require__$(r:G) = ;
+ __require__$(r:G) = ;
}
-
+
if $(error)
{
return $(error) ;
}
else
{
- return [ unique $(result) $(requirements) ] ;
+ return [ sequence.unique $(result) $(requirements) ] ;
}
}
-# Removes all conditional properties which conditions are not met
-# For those with met conditions, removes the condition. Properies
-# in conditions are looked up in 'context'
+
+# Removes all conditional properties whose conditions are not met. For those
+# with met conditions, removes the condition. Properties in conditions are
+# looked up in 'context'.
rule evaluate-conditionals-in-context ( properties * : context * )
{
local base ;
@@ -103,31 +101,32 @@
else
{
base += $(p) ;
- }
+ }
}
local result = $(base) ;
for local p in $(conditionals)
{
- # Separate condition and property
+ # Separate condition and property.
local s = [ MATCH (.*):(<.*) : $(p) ] ;
- # Split condition into individual properties
+ # Split condition into individual properties.
local c = [ regex.split $(s[1]) "," ] ;
- # Evaluate condition
+ # Evaluate condition.
if $(c) in $(context)
{
result += $(s[2]) ;
- }
+ }
}
- return $(result) ;
+ return $(result) ;
}
+
rule expand-subfeatures-in-conditions ( properties * )
{
local result ;
for local p in $(properties)
{
- local s = [ MATCH (.*):(<.*) : $(p) ] ;
+ local s = [ MATCH (.*):(<.*) : $(p) ] ;
if ! $(s)
{
result += $(p) ;
@@ -135,20 +134,20 @@
else
{
local condition = $(s[1]) ;
- # Condition might include several elements
+ local value = $(s[2]) ;
+ # Condition might include several elements.
condition = [ regex.split $(condition) "," ] ;
- local value = $(s[2]) ;
local e ;
for local c in $(condition)
- {
- # It common that condition includes a toolset which
- # was never defined, or mentiones subfeatures which
- # were never defined. In that case, validation will
- # only produce an spirious error, so prevent
- # validation by passing 'true' as second parameter.
+ {
+ # It is common for a condition to include a toolset or
+ # subfeatures that have not been defined. In that case we want
+ # the condition to simply 'never be satisfied' and validation
+ # would only produce a spurious error so we prevent it by
+ # passing 'true' as the second parameter.
e += [ feature.expand-subfeatures $(c) : true ] ;
}
-
+
if $(e) = $(condition)
{
result += $(p) ;
@@ -157,17 +156,15 @@
{
local individual-subfeatures = [ set.difference $(e) : $(condition) ] ;
result += $(individual-subfeatures:J=,):$(value) ;
- }
- }
- }
+ }
+ }
+ }
return $(result) ;
}
-
-# Helper for as-path, below. Orders properties with the implicit ones
-# first, and within the two sections in alphabetical order of feature
-# name.
+# Helper for as-path, below. Orders properties with the implicit ones first, and
+# within the two sections in alphabetical order of feature name.
local rule path-order ( x y )
{
if $(y:G) && ! $(x:G)
@@ -185,7 +182,7 @@
x = [ feature.expand-subfeatures $(x) ] ;
y = [ feature.expand-subfeatures $(y) ] ;
}
-
+
if $(x[1]) < $(y[1])
{
return true ;
@@ -193,6 +190,7 @@
}
}
+
local rule abbreviate-dashed ( string )
{
local r ;
@@ -203,11 +201,13 @@
return $(r:J=-) ;
}
+
local rule identity ( string )
{
return $(string) ;
}
+
if --abbreviate-paths in [ modules.peek : ARGV ]
{
.abbrev = abbreviate-dashed ;
@@ -217,37 +217,38 @@
.abbrev = identity ;
}
-# Returns a path which represents the given expanded property set.
+
+# Returns a path representing the given expanded property set.
rule as-path ( properties * )
{
local entry = .result.$(properties:J=-) ;
-
+
if ! $($(entry))
{
# trim redundancy
properties = [ feature.minimize $(properties) ] ;
-
+
# sort according to path-order
properties = [ sequence.insertion-sort $(properties) : path-order ] ;
-
+
local components ;
for local p in $(properties)
{
if $(p:G)
{
- local f = [ ungrist $(p:G) ] ;
+ local f = [ utility.ungrist $(p:G) ] ;
p = $(f)-$(p:G=) ;
}
-
components += [ $(.abbrev) $(p) ] ;
}
-
+
$(entry) = $(components:J=/) ;
- }
-
+ }
+
return $($(entry)) ;
}
+
# Exit with error if property is not valid.
local rule validate1 ( property )
{
@@ -259,17 +260,19 @@
if ! [ feature.valid $(feature) ]
{
- feature = [ ungrist $(property:G) ] ; # Ungrist for better error messages
+ # Ungrist for better error messages.
+ feature = [ utility.ungrist $(property:G) ] ;
msg = "unknown feature '$(feature)'" ;
}
- else if $(value) && ! free in [ feature.attributes $(feature) ]
+ else if $(value) && ! free in [ feature.attributes $(feature) ]
{
feature.validate-value-string $(feature) $(value) ;
- }
+ }
else if ! ( $(value) || ( optional in [ feature.attributes $(feature) ] ) )
{
- feature = [ ungrist $(property:G) ] ; # Ungrist for better error messages
- msg = "No value specified for feature '$(feature)'" ;
+ # Ungrist for better error messages.
+ feature = [ utility.ungrist $(property:G) ] ;
+ msg = "No value specified for feature '$(feature)'" ;
}
}
else
@@ -277,12 +280,13 @@
local feature = [ feature.implied-feature $(property) ] ;
feature.validate-value-string $(feature) $(property) ;
}
- if $(msg)
+ if $(msg)
{
- error "Invalid property "'$(property:J=" ")'": "$(msg:J=" "). ;
+ errors.error "Invalid property "'$(property:J=" ")'": "$(msg:J=" "). ;
}
}
+
rule validate ( properties * )
{
for local p in $(properties)
@@ -291,6 +295,7 @@
}
}
+
rule validate-property-sets ( property-sets * )
{
for local s in $(property-sets)
@@ -299,12 +304,13 @@
}
}
-# Makes a property set from 'specification', converting implicit values into
-# full properties.
+
+# Expands any implicit property values in the given property 'specification' so
+# they explicitly state their feature.
rule make ( specification * )
{
local result ;
- for local e in $(specification)
+ for local e in $(specification)
{
if $(e:G)
{
@@ -313,18 +319,19 @@
else if [ feature.is-implicit-value $(e) ]
{
local feature = [ feature.implied-feature $(e) ] ;
- result += $(feature)$(e) ;
+ result += $(feature)$(e) ;
}
else
{
- error "'$(e)' is not a valid for property specification" ;
+ errors.error "'$(e)' is not a valid property specification" ;
}
}
return $(result) ;
}
-# Returns a property sets which include all the elements in 'properties' that
-# do not have attributes listed in 'attributes'.
+
+# Returns a property set containing all the elements in 'properties' that do not
+# have their attributes listed in 'attributes'.
rule remove ( attributes + : properties * )
{
local result ;
@@ -338,8 +345,9 @@
return $(result) ;
}
-# Returns a property set which include all properties in 'properties' that have
-# any of 'attributes'.
+
+# Returns a property set containig all the elements in 'properties' that have
+# their attributes listed in 'attributes'.
rule take ( attributes + : properties * )
{
local result ;
@@ -353,15 +361,16 @@
return $(result) ;
}
-# Selects properties which correspond to any of the given features.
+
+# Selects properties corresponding to any of the given features.
rule select ( features * : properties * )
{
local result ;
-
- # add any missing angle brackets
+
+ # Add any missing angle brackets.
local empty = "" ;
features = $(empty:G=$(features)) ;
-
+
for local p in $(properties)
{
if $(p:G) in $(features)
@@ -372,17 +381,17 @@
return $(result) ;
}
-# Returns a modified version of properties with all values of the
-# given feature replaced by the given value.
-# If 'value' is empty the feature will be removed
+
+# Returns a modified version of properties with all values of the given feature
+# replaced by the given value. If 'value' is empty the feature will be removed.
rule change ( properties * : feature value ? )
{
- local result ;
+ local result ;
for local p in $(properties)
{
if $(p:G) = $(feature)
{
- result += $(value:G=$(feature)) ;
+ result += $(value:G=$(feature)) ;
}
else
{
@@ -392,61 +401,61 @@
return $(result) ;
}
-# If 'property' is conditional property, returns
-# condition and the property, e.g
-# <variant>debug,<toolset>gcc:<inlining>full will become
-# <variant>debug,<toolset>gcc <inlining>full.
-# Otherwise, returns empty string.
+
+# If 'property' is a conditional property, returns the condition and the
+# property. E.g. <variant>debug,<toolset>gcc:<inlining>full will become
+# <variant>debug,<toolset>gcc <inlining>full. Otherwise, returns an empty
+# string.
rule split-conditional ( property )
{
local m = [ MATCH "(.+):<(.+)" : $(property) ] ;
if $(m)
{
return $(m[1]) <$(m[2]) ;
- }
+ }
}
-# Interpret all path properties in 'properties' as relative to 'path'
-# The property values are assumed to be in system-specific form, and
-# will be translated into normalized form.
+# Interpret all path properties in 'properties' as relative to 'path'. The
+# property values are assumed to be in system-specific form, and will be
+# translated into normalized form.
rule translate-paths ( properties * : path )
{
local result ;
for local p in $(properties)
{
local split = [ split-conditional $(p) ] ;
- local condition = "" ;
+ local condition = "" ;
if $(split)
{
condition = $(split[1]): ;
p = $(split[2]) ;
}
-
- if path in [ feature.attributes $(p:G) ]
+
+ if path in [ feature.attributes $(p:G) ]
{
local values = [ regex.split $(p:TG=) "&&" ] ;
local t ;
for local v in $(values)
{
- t += [ path.root [ path.make $(v) ] $(path) ] ;
+ t += [ path.root [ path.make $(v) ] $(path) ] ;
}
- t = $(t:J="&&") ;
+ t = $(t:J="&&") ;
result += $(condition)$(t:TG=$(p:G)) ;
}
else
{
result += $(condition)$(p) ;
- }
+ }
}
return $(result) ;
}
-# Assumes that all feature values that start with '@' are
-# names of rules, used in 'context-module'. Such rules
-# can be either local to the module or global. Converts such
-# values into 'indirect-rule' format (see indirect.jam), so
-# that they can be called from other modules.
+
+# Assumes that all feature values that start with '@' are names of rules, used
+# in 'context-module'. Such rules can be either local to the module or global.
+# Converts such values into 'indirect-rule' format (see indirect.jam), so they
+# can be called from other modules.
rule translate-indirect ( specification * : context-module )
{
local result ;
@@ -458,23 +467,21 @@
local v ;
if [ MATCH "^([^%]*)%([^%]+)$" : $(m) ]
{
- # Rule is already in indirect format
+ # Rule is already in indirect format.
v = $(m) ;
}
else
{
if ! [ MATCH ".*([.]).*" : $(m) ]
{
- # This is unqualified rule name. The user might want
- # to set flags on this rule name, and toolset.flag
- # auto-qualifies the rule name. Need to do the same
- # here so set flag setting work.
- # We can arrange for toolset.flag to *not* auto-qualify
- # the argument, but then two rules defined in two Jamfiles
- # will conflict.
+ # This is an unqualified rule name. The user might want to
+ # set flags on this rule name and toolset.flag
+ # auto-qualifies it. Need to do the same here so flag
+ # setting works. We can arrange for toolset.flag to *not*
+ # auto-qualify the argument but then two rules defined in
+ # two Jamfiles would conflict.
m = $(context-module).$(m) ;
}
-
v = [ indirect.make $(m) : $(context-module) ] ;
}
@@ -484,25 +491,24 @@
else
{
result += $(p) ;
- }
+ }
}
- return $(result) ;
+ return $(result) ;
}
-# Class which maintains a property set -> string
-# mapping
+# Class which maintains a property set -> string mapping.
class property-map
{
+ import errors ;
import numbers ;
import sequence ;
- import errors : error ;
-
+
rule __init__ ( )
- {
+ {
self.next-flag = 1 ;
}
-
+
# Associate 'value' with 'properties'
rule insert ( properties + : value )
{
@@ -513,18 +519,16 @@
self.next-flag = [ numbers.increment $(self.next-flag) ] ;
}
- # Return the value associated with 'properties'
- # or any subset of it. If more than one
- # subset has value assigned to it, return the
- # value for the longest subset, if it's unique.
+ # Returns the value associated with 'properties' or any subset of it. If
+ # more than one subset has a value assigned to it, returns the value for the
+ # longest subset, if it's unique.
rule find ( properties + )
{
return [ find-replace $(properties) ] ;
}
-
- # Find the value associated with 'properties'.
- # If 'value' parameter is given, replaces the found value
- # Returns the value that were stored originally.
+
+ # Returns the value associated with 'properties'. If 'value' parameter is
+ # given, replaces the found value.
rule find-replace ( properties + : value ? )
{
# First find all matches
@@ -535,106 +539,95 @@
if $(self.properties.$(i)) in $(properties)
{
matches += $(i) ;
- match-ranks += [ sequence.length
- $(self.properties.$(i)) ] ;
+ match-ranks += [ sequence.length $(self.properties.$(i)) ] ;
}
}
- local best = [ sequence.select-highest-ranked
- $(matches) : $(match-ranks) ] ;
+ local best = [ sequence.select-highest-ranked $(matches)
+ : $(match-ranks) ] ;
if $(best[2])
{
- error "Ambiguous key" ;
- }
+ errors.error "Ambiguous key" ;
+ }
local original = $(self.value.$(best)) ;
if $(value)
{
self.value.$(best) = $(value) ;
- }
+ }
return $(original) ;
- }
+ }
}
+
local rule __test__ ( )
{
+ import assert ;
+ import "class" : new ;
import errors : try catch ;
import feature ;
- import feature : feature subfeature compose ;
-
- # local rules must be explicitly re-imported
+
+ # Local rules must be explicitly re-imported.
import property : path-order abbreviate-dashed ;
-
+
feature.prepare-test property-test-temp ;
- feature toolset : gcc : implicit symmetric ;
- subfeature toolset gcc : version : 2.95.2 2.95.3 2.95.4
- 3.0 3.0.1 3.0.2 : optional ;
- feature define : : free ;
- feature runtime-link : dynamic static : symmetric link-incompatible ;
- feature optimization : on off ;
- feature variant : debug release : implicit composite symmetric ;
- feature rtti : on off : link-incompatible ;
+ feature.feature toolset : gcc : implicit symmetric ;
+ feature.subfeature toolset gcc : version : 2.95.2 2.95.3 2.95.4 3.0 3.0.1
+ 3.0.2 : optional ;
+ feature.feature define : : free ;
+ feature.feature runtime-link : dynamic static : symmetric link-incompatible ;
+ feature.feature optimization : on off ;
+ feature.feature variant : debug release : implicit composite symmetric ;
+ feature.feature rtti : on off : link-incompatible ;
- compose <variant>debug : <define>_DEBUG <optimization>off ;
- compose <variant>release : <define>NDEBUG <optimization>on ;
+ feature.compose <variant>debug : <define>_DEBUG <optimization>off ;
+ feature.compose <variant>release : <define>NDEBUG <optimization>on ;
- import assert ;
- import "class" : new ;
-
- validate <toolset>gcc <toolset>gcc-3.0.1 : $(test-space) ;
-
- assert.true path-order $(test-space) debug <define>foo ;
+ validate <toolset>gcc <toolset>gcc-3.0.1 : $(test-space) ;
+
+ assert.true path-order $(test-space) debug <define>foo ;
assert.false path-order $(test-space) <define>foo debug ;
- assert.true path-order $(test-space) gcc debug ;
+ assert.true path-order $(test-space) gcc debug ;
assert.false path-order $(test-space) debug gcc ;
- assert.true path-order $(test-space) <optimization>on <rtti>on ;
+ assert.true path-order $(test-space) <optimization>on <rtti>on ;
assert.false path-order $(test-space) <rtti>on <optimization>on ;
-
+
assert.result-equal <toolset>gcc <rtti>off <define>FOO
: refine <toolset>gcc <rtti>off
: <define>FOO
- : $(test-space)
- ;
+ : $(test-space) ;
assert.result-equal <toolset>gcc <optimization>on
: refine <toolset>gcc <optimization>off
: <optimization>on
- : $(test-space)
- ;
+ : $(test-space) ;
assert.result-equal <toolset>gcc <rtti>off
- : refine <toolset>gcc : <rtti>off : $(test-space)
- ;
+ : refine <toolset>gcc : <rtti>off : $(test-space) ;
assert.result-equal <toolset>gcc <rtti>off <rtti>off:<define>FOO
- : refine <toolset>gcc : <rtti>off <rtti>off:<define>FOO
- : $(test-space)
- ;
-
- assert.result-equal <toolset>gcc:<define>foo <toolset>gcc:<define>bar
- : refine <toolset>gcc:<define>foo : <toolset>gcc:<define>bar
- : $(test-space)
- ;
+ : refine <toolset>gcc : <rtti>off <rtti>off:<define>FOO
+ : $(test-space) ;
+
+ assert.result-equal <toolset>gcc:<define>foo <toolset>gcc:<define>bar
+ : refine <toolset>gcc:<define>foo : <toolset>gcc:<define>bar
+ : $(test-space) ;
assert.result <define>MY_RELEASE
- : evaluate-conditionals-in-context
+ : evaluate-conditionals-in-context
<variant>release,<rtti>off:<define>MY_RELEASE
- : <toolset>gcc <variant>release <rtti>off
-
- ;
+ : <toolset>gcc <variant>release <rtti>off ;
assert.result debug
- : as-path <optimization>off <variant>debug
- : $(test-space)
- ;
+ : as-path <optimization>off <variant>debug
+ : $(test-space) ;
assert.result gcc/debug/rtti-off
- : as-path <toolset>gcc <optimization>off <rtti>off <variant>debug
- : $(test-space)
- ;
+ : as-path <toolset>gcc <optimization>off <rtti>off <variant>debug
+ : $(test-space) ;
assert.result optmz-off : abbreviate-dashed optimization-off ;
assert.result rntm-lnk-sttc : abbreviate-dashed runtime-link-static ;
-
+
try ;
validate <feature>value : $(test-space) ;
catch "Invalid property '<feature>value': unknown feature 'feature'." ;
@@ -642,7 +635,7 @@
try ;
validate <rtti>default : $(test-space) ;
catch \"default\" is not a known value of feature <rtti> ;
-
+
validate <define>WHATEVER : $(test-space) ;
try ;
@@ -652,21 +645,20 @@
try ;
validate value : $(test-space) ;
catch "value" is not a value of an implicit feature ;
-
- assert.result-equal <rtti>on
+ assert.result-equal <rtti>on
: remove free implicit : <toolset>gcc <define>foo <rtti>on : $(test-space) ;
- assert.result-equal <include>a
+ assert.result-equal <include>a
: select include : <include>a <toolset>gcc ;
- assert.result-equal <include>a
+ assert.result-equal <include>a
: select include bar : <include>a <toolset>gcc ;
assert.result-equal <include>a <toolset>gcc
: select include <bar> <toolset> : <include>a <toolset>gcc ;
-
- assert.result-equal <toolset>kylix <include>a
+
+ assert.result-equal <toolset>kylix <include>a
: change <toolset>gcc <include>a : <toolset> kylix ;
pm = [ new property-map ] ;
@@ -674,31 +666,23 @@
$(pm).insert <toolset>gcc <os>NT : obj ;
$(pm).insert <toolset>gcc <os>CYGWIN : obj ;
- assert.equal o
- : [ $(pm).find <toolset>gcc ] ;
+ assert.equal o : [ $(pm).find <toolset>gcc ] ;
- assert.equal obj
- : [ $(pm).find <toolset>gcc <os>NT ] ;
+ assert.equal obj : [ $(pm).find <toolset>gcc <os>NT ] ;
try ;
$(pm).find <toolset>gcc <os>NT <os>CYGWIN ;
catch "Ambiguous key" ;
- # Test ordinary properties
- assert.result
- : split-conditional <toolset>gcc
- ;
-
- # Test properties with ":"
- assert.result
- : split-conditional <define>FOO=A::B
- ;
-
- # Test conditional feature
+ # Test ordinary properties.
+ assert.result : split-conditional <toolset>gcc ;
+
+ # Test properties with ":".
+ assert.result : split-conditional <define>FOO=A::B ;
+
+ # Test conditional feature.
assert.result-equal <toolset>gcc,<toolset-gcc:version>3.0 <define>FOO
- : split-conditional <toolset>gcc,<toolset-gcc:version>3.0:<define>FOO
- ;
-
+ : split-conditional <toolset>gcc,<toolset-gcc:version>3.0:<define>FOO ;
+
feature.finish-test property-test-temp ;
}
-
Modified: branches/release/tools/build/v2/build/scanner.jam
==============================================================================
--- branches/release/tools/build/v2/build/scanner.jam (original)
+++ branches/release/tools/build/v2/build/scanner.jam 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -79,6 +79,7 @@
NOCARE $(matches) ;
INCLUDES $(target) : $(matches) ;
SEARCH on $(matches) = $(target_path) $(self.includes:G=) ;
+ ISFILE $(matches) ;
scanner.propagate $(__name__) : $(matches) : $(target) ;
}
Modified: branches/release/tools/build/v2/build/targets.jam
==============================================================================
--- branches/release/tools/build/v2/build/targets.jam (original)
+++ branches/release/tools/build/v2/build/targets.jam 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -5,152 +5,149 @@
# (See accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
-
-# Supports 'abstract' targets, which are targets explicitly defined in Jamfile.
+# Supports 'abstract' targets, which are targets explicitly defined in a
+# Jamfile.
#
-# Abstract targets are represented by classes derived from 'abstract-target' class.
-# The first abstract target is 'project-target', which is created for each
-# Jamfile, and can be obtained by the 'target' rule in the Jamfile's module.
-# (see project.jam).
+# Abstract targets are represented by classes derived from 'abstract-target'
+# class. The first abstract target is 'project-target', which is created for
+# each Jamfile, and can be obtained by the 'target' rule in the Jamfile's
+# module (see project.jam).
#
-# Project targets keep a list of 'main-target' instances.
-# A main target is what the user explicitly defines in a Jamfile. It is
-# possible to have several definitions for a main target, for example to have
-# different lists of sources for different platforms. So, main targets
-# keep a list of alternatives.
+# Project targets keep a list of 'main-target' instances. A main target is
+# what the user explicitly defines in a Jamfile. It is possible to have
+# several definitions for a main target, for example to have different lists
+# of sources for different platforms. So, main targets keep a list of
+# alternatives.
#
# Each alternative is an instance of 'abstract-target'. When a main target
-# subvariant is defined by some rule, that rule will decide what class to
-# use, create an instance of that class and add it to the list of alternatives
-# for the main target.
+# subvariant is defined by some rule, that rule will decide what class to use,
+# create an instance of that class and add it to the list of alternatives for
+# the main target.
#
-# Rules supplied by the build system will use only targets derived
-# from 'basic-target' class, which will provide some default behaviour.
-# There will be two classes derived from it, 'make-target', created by the
-# 'make' rule, and 'typed-target', created by rules such as 'exe' and 'dll'.
+# Rules supplied by the build system will use only targets derived from
+# 'basic-target' class, which will provide some default behaviour. There will
+# be different classes derived from it such as 'make-target', created by the
+# 'make' rule, and 'typed-target', created by rules such as 'exe' and 'lib'.
#
# +------------------------+
# |abstract-target |
# +========================+
# |name |
-# |project |
-# | |
-# |generate(properties) = 0|
-# +-----------+------------+
-# |
-# ^
-# / \
-# +-+-+
-# |
-# |
-# +------------------------+------+------------------------------+
-# | | |
-# | | |
-# +----------+-----------+ +------+------+ +------+-------+
-# | project-target | | main-target | | basic-target |
-# +======================+ 1 * +=============+ alternatives +==============+
-# | generate(properties) |o-----------+ generate |<>------------->| generate |
+# |project |
+# | |
+# |generate(properties) = 0|
+# +-----------+------------+
+# |
+# ^
+# / \
+# +-+-+
+# |
+# |
+# +------------------------+------+------------------------------+
+# | | |
+# | | |
+# +----------+-----------+ +------+------+ +------+-------+
+# | project-target | | main-target | | basic-target |
+# +======================+ 1 * +=============+ alternatives +==============+
+# | generate(properties) |o-----------+ generate |<>------------->| generate |
# | main-target | +-------------+ | construct = 0|
-# +----------------------+ +--------------+
-# |
-# ^
-# / \
-# +-+-+
-# |
-# |
-# ...--+----------------+------------------+----------------+---+
-# | | | |
-# | | | |
+# +----------------------+ +--------------+
+# |
+# ^
+# / \
+# +-+-+
+# |
+# |
+# ...--+----------------+------------------+----------------+---+
+# | | | |
+# | | | |
# ... ---+-----+ +------+-------+ +------+------+ +--------+-----+
# | | typed-target | | make-target | | stage-target |
# . +==============+ +=============+ +==============+
# . | construct | | construct | | construct |
# +--------------+ +-------------+ +--------------+
+import assert ;
import "class" : new ;
-import sequence ;
-import regex ;
-import property ;
import errors ;
-import common ;
-import property-set ;
-import project ;
import feature ;
-import virtual-target ;
+import indirect ;
import path ;
+import property ;
+import property-set ;
+import sequence ;
import set ;
-import assert ;
-import indirect ;
import toolset ;
# Base class for all abstract targets.
-class abstract-target
+class abstract-target
{
- import project assert "class" errors ;
-
+ import project ;
+ import assert ;
+ import "class" ;
+ import errors ;
+
rule __init__ ( name # name of the target in Jamfile
: project-target # the project target to which this one belongs
)
- {
+ {
# Note: it might seem that we don't need either name or project at all.
- # However, there are places where we really need it. One example is error
- # messages which should name problematic targets. Another is setting correct
- # paths for sources and generated files.
-
+ # However, there are places where we really need it. One example is
+ # error messages which should name problematic targets. Another is
+ # setting correct paths for sources and generated files.
+
self.name = $(name) ;
self.project = $(project-target) ;
self.location = [ errors.nearest-user-location ] ;
- }
-
+ }
+
# Returns the name of this target.
rule name ( )
{
return $(self.name) ;
}
-
+
# Returns the project for this target.
rule project ( )
{
return $(self.project) ;
}
-
+
# Return the location where the target was declared
rule location ( )
{
return $(self.location) ;
}
-
+
# Returns a user-readable name for this target.
rule full-name ( )
{
local location = [ $(self.project).get location ] ;
return $(location)/$(self.name) ;
}
-
- # Takes a property set. Generates virtual targets for this abstract
- # target, using the specified properties, unless a different value of some
- # feature is required by the target.
- # On
- # success, returns:
- # - a property-set with the usage requirements to be
- # applied to dependents
- # - a list of produced virtual targets, which may be
- # empty.
- # If 'property-set' are empty, performs default build of this
- # target, in a way specific to derived class.
+
+ # Generates virtual targets for this abstract target using the specified
+ # properties, unless a different value of some feature is required by the
+ # target.
+ # On success, returns:
+ # - a property-set with the usage requirements to be applied to dependants
+ # - a list of produced virtual targets, which may be empty.
+ # If 'property-set' is empty, performs the default build of this target, in
+ # a way specific to the derived class.
rule generate ( property-set )
{
errors.error "method should be defined in derived classes" ;
}
-
+
rule rename ( new-name )
{
self.name = $(new-name) ;
- }
+ }
}
-if --debug-building in [ modules.peek : ARGV ]
+
+if --debug-building in [ modules.peek : ARGV ]
{
modules.poke : .debug-building : true ;
}
@@ -161,63 +158,65 @@
return $(.indent:J="") ;
}
+
rule increase-indent ( )
{
.indent += " " ;
}
+
rule decrease-indent ( )
{
.indent = $(.indent[2-]) ;
}
-# Project target class (derived from 'abstract-target')
+
+# Project target class (derived from 'abstract-target').
#
-# This class these responsibilities:
-# - maintaining a list of main target in this project and
-# building it
+# This class has the following responsibilities:
+# - Maintaining a list of main targets in this project and building them.
#
# Main targets are constructed in two stages:
-# - When Jamfile is read, a number of calls to 'add-alternative' is made.
-# At that time, alternatives can also be renamed to account for inline
-# targets.
-# - The first time 'main-target' or 'has-main-target' rule is called,
-# all alternatives are enumerated an main targets are created.
-class project-target : abstract-target
+# - When Jamfile is read, a number of calls to 'add-alternative' is made. At
+# that time, alternatives can also be renamed to account for inline targets.
+# - The first time 'main-target' or 'has-main-target' rule is called, all
+# alternatives are enumerated and main targets are created.
+class project-target : abstract-target
{
- import project targets ;
+ import project ;
+ import targets ;
import path ;
import print ;
import property-set ;
- import set : difference : set.difference ;
+ import set ;
import sequence ;
import "class" : new ;
import errors ;
-
+
rule __init__ ( name : project-module parent-project ?
: requirements * : default-build * )
- {
+ {
abstract-target.__init__ $(name) : $(__name__) ;
-
+
self.project-module = $(project-module) ;
self.location = [ project.attribute $(project-module) location ] ;
self.requirements = $(requirements) ;
self.default-build = $(default-build) ;
-
+
if $(parent-project)
- {
+ {
inherit $(parent-project) ;
- }
+ }
}
- # This is needed only by the 'make' rule. Need to find the
- # way to make 'make' work without this method.
+ # This is needed only by the 'make' rule. Need to find the way to make
+ # 'make' work without this method.
rule project-module ( )
{
return $(self.project-module) ;
}
-
- rule get ( attribute )
+
+ rule get ( attribute )
{
return [ project.attribute $(self.project-module) $(attribute) ] ;
}
@@ -225,57 +224,57 @@
rule build-dir ( )
{
if ! $(self.build-dir)
- {
+ {
self.build-dir = [ get build-dir ] ;
if ! $(self.build-dir)
{
- self.build-dir = [ path.join
+ self.build-dir = [ path.join
[ $(self.project).get location ]
bin
] ;
}
}
return $(self.build-dir) ;
- }
-
+ }
+
# Generates all possible targets contained in this project.
rule generate ( property-set * )
{
- if [ modules.peek : .debug-building ]
+ if [ modules.peek : .debug-building ]
{
ECHO [ targets.indent ] "building project" [ name ] " ('$(__name__)') with" [ $(property-set).raw ] ;
targets.increase-indent ;
}
-
+
local usage-requirements = [ property-set.empty ] ;
local targets ;
-
- for local t in [ targets-to-build ]
+
+ for local t in [ targets-to-build ]
{
local g = [ $(t).generate $(property-set) ] ;
usage-requirements = [ $(usage-requirements).add $(g[1]) ] ;
targets += $(g[2-]) ;
}
targets.decrease-indent ;
- return $(usage-requirements) [ sequence.unique $(targets) ] ;
+ return $(usage-requirements) [ sequence.unique $(targets) ] ;
}
-
- # Computes and returns a list of abstract-target instances which
- # must be built when this project is built.
+
+ # Computes and returns a list of abstract-target instances which must be
+ # built when this project is built.
rule targets-to-build ( )
{
local result ;
-
+
if ! $(self.built-main-targets)
{
build-main-targets ;
}
-
+
# Collect all main targets here, except for "explicit" ones.
for local t in $(self.main-targets)
{
if ! [ $(t).name ] in $(self.explicit-targets)
- {
+ {
result += $(t) ;
}
}
@@ -286,10 +285,10 @@
{
result += [ find $(pn) ] ;
}
-
+
return $(result) ;
}
-
+
# Add 'target' to the list of targets in this project that should be build
# only by explicit request
rule mark-target-as-explicit ( target-name )
@@ -298,19 +297,18 @@
# rule is called before main target instaces are created.
self.explicit-targets += $(target-name) ;
}
-
+
# Add new target alternative
rule add-alternative ( target-instance )
{
if $(self.built-main-targets)
{
- errors.error "add-alternative called when main targets are already created."
- : "in project" [ full-name ] ;
- }
+ errors.error "add-alternative called when main targets are already created."
+ : "in project" [ full-name ] ;
+ }
self.alternatives += $(target-instance) ;
}
-
-
+
# Returns a 'main-target' class instance corresponding to the 'name'.
rule main-target ( name )
{
@@ -318,7 +316,7 @@
{
build-main-targets ;
}
-
+
return $(self.main-target.$(name)) ;
}
@@ -329,79 +327,79 @@
{
build-main-targets ;
}
-
- if $(self.main-target.$(name))
+
+ if $(self.main-target.$(name))
{
return true ;
- }
+ }
}
# Find and return the target with the specified id, treated
# relative to self.
rule find-really ( id )
{
- local result ;
+ local result ;
local project = $(self.project) ;
local current-location = [ get location ] ;
-
- local split = [ MATCH (.*)//(.*) : $(id) ] ;
+
+ local split = [ MATCH (.*)//(.*) : $(id) ] ;
local project-part = $(split[1]) ;
local target-part = $(split[2]) ;
local extra-error-message ;
if $(project-part)
{
- # There's explicit project part in id. Looks up the
- # project and pass the request to it.
+ # There's explicit project part in id. Looks up the project and
+ # passes the request to it.
local pm = [ project.find $(project-part) : $(current-location) ] ;
if $(pm)
{
- project-target = [ project.target $(pm) ] ;
+ project-target = [ project.target $(pm) ] ;
result = [ $(project-target).find $(target-part) : no-error ] ;
- }
+ }
else
{
extra-error-message = "error: could not find project '$(project-part)'" ;
- }
+ }
}
else
- {
- # Interpret target-name as name of main target
- # Need to do this before checking for file. Consider this:
+ {
+ # Interpret target-name as name of main target. Need to do this
+ # before checking for file. Consider this:
#
# exe test : test.cpp ;
# install s : test : <location>. ;
#
# After first build we'll have target 'test' in Jamfile and file
- # 'test' on the disk. We need target to override the file.
- result = [ main-target $(id) ] ;
-
+ # 'test' on the disk. We need target to override the file.
+ result = [ main-target $(id) ] ;
+
if ! $(result)
{
result = [ new file-reference [ path.make $(id) ] : $(project) ] ;
-
+
if ! [ $(result).exists ]
{
- # File actually does not exist.
- # Reset 'target' so that an error is issued.
+ # File actually does not exist. Reset 'target' so that an
+ # error is issued.
result = ;
- }
+ }
}
-
+
# Interpret id as project-id
if ! $(result)
- {
+ {
local project-module = [ project.find $(id) : $(current-location) ] ;
if $(project-module)
{
result = [ project.target $(project-module) ] ;
- }
- }
+ }
+ }
}
-
+
return $(result) ;
}
-
+
rule find ( id : no-error ? )
{
local v = $(.id.$(id)) ;
@@ -414,11 +412,11 @@
}
.id.$(id) = $(v) ;
}
-
+
if $(v) != none
{
return $(v) ;
- }
+ }
else
{
if ! $(no-error)
@@ -430,12 +428,10 @@
ECHO "error: '$(current-location)'" ;
ECHO $(extra-error-message) ;
EXIT ;
- }
- }
+ }
+ }
}
-
-
rule build-main-targets ( )
{
self.built-main-targets = true ;
@@ -450,16 +446,16 @@
self.main-targets += $(t) ;
target = $(self.main-target.$(name)) ;
}
-
- $(target).add-alternative $(a) ;
- }
- }
-
+
+ $(target).add-alternative $(a) ;
+ }
+ }
+
# Accessor, add a constant.
rule add-constant (
- name # Variable name of the constant.
- : value + # Value of the constant.
- : type ? # Optional type of value.
+ name # Variable name of the constant.
+ : value + # Value of the constant.
+ : type ? # Optional type of value.
)
{
switch $(type)
@@ -467,14 +463,14 @@
case path :
local r ;
for local v in $(value)
- {
+ {
v = [ path.root [ path.make $(v) ] $(self.location) ] ;
- # Now make the value absolute path
+ # Now make the value absolute path.
v = [ path.root $(v) [ path.pwd ] ] ;
- # Constants should be in platform-native form
+ # Constants should be in platform-native form.
v = [ path.native $(v) ] ;
r += $(v) ;
- }
+ }
value = $(r) ;
}
if ! $(name) in $(self.constants)
@@ -482,31 +478,31 @@
self.constants += $(name) ;
}
self.constant.$(name) = $(value) ;
- # Inject the constant in the scope of project-root module
+ # Inject the constant in the scope of project-root module.
modules.poke $(self.project-module) : $(name) : $(value) ;
}
-
+
rule inherit ( parent )
{
- for local c in [ modules.peek $(parent) : self.constants ]
+ for local c in [ modules.peek $(parent) : self.constants ]
{
# No need to pass the type. Path constants were converted to
# absolute paths already by parent.
- add-constant $(c)
+ add-constant $(c)
: [ modules.peek $(parent) : self.constant.$(c) ] ;
- }
+ }
- # Import rules from parent
+ # Import rules from parent.
local this-module = [ project-module ] ;
local parent-module = [ $(parent).project-module ] ;
- # Don't import rules which comes from 'project-rules', they
- # must be imported localized.
- local user-rules = [ set.difference
+ # Don't import rules coming from 'project-rules' as they must be
+ # imported localized.
+ local user-rules = [ set.difference
[ RULENAMES $(parent-module) ] :
[ RULENAMES project-rules ] ] ;
IMPORT $(parent-module) : $(user-rules) : $(this-module) : $(user-rules) ;
EXPORT $(this-module) : $(user-rules) ;
- }
+ }
}
@@ -520,37 +516,38 @@
{
names += [ $(t).full-name ] ;
}
-
- errors.error "Recursion in main target references"
+
+ errors.error "Recursion in main target references"
: "the following target are being built currently:"
: $(names) ;
}
- .targets-being-built += $(main-target-instance) ;
+ .targets-being-built += $(main-target-instance) ;
}
+
local rule end-building ( main-target-instance )
{
.targets-being-built = $(.targets-being-built[1--2]) ;
}
-# A named top-level target in Jamfile
+# A named top-level target in Jamfile.
class main-target : abstract-target
{
- import errors : error ;
import assert ;
- import sequence ;
+ import build-request ;
+ import errors ;
+ import feature ;
import print ;
- import build-request feature property-set ;
+ import property-set ;
+ import sequence ;
import targets : start-building end-building ;
- import "class" : is-a ;
-
+
rule __init__ ( name : project )
{
abstract-target.__init__ $(name) : $(project) ;
}
-
-
+
# Add a new alternative for this target
rule add-alternative ( target )
{
@@ -565,51 +562,49 @@
else
{
self.default-build = $(d) ;
- }
+ }
self.alternatives += $(target) ;
}
- # Returns the best viable alternative for this property-set
- # See the documentation for selection rules.
+ # Returns the best viable alternative for this property-set. See the
+ # documentation for selection rules.
local rule select-alternatives ( property-set debug ? )
{
- # When selecting alternatives we have to consider defaults,
- # for example:
+ # When selecting alternatives we have to consider defaults, for example:
# lib l : l.cpp : <variant>debug ;
# lib l : l_opt.cpp : <variant>release ;
- # won't work unless we add default value <variant>debug.
- property-set = [ $(p).add-defaults ] ;
-
- # The algorithm: we keep the current best viable alternative.
- # When we've got new best viable alternative, we compare it
- # with the current one.
-
+ # won't work unless we add default value <variant>debug.
+ property-set = [ $(p).add-defaults ] ;
+
+ # The algorithm: we keep the current best viable alternative. When we've
+ # got a new best viable alternative, we compare it with the current one.
+
local best ;
local best-properties ;
-
+
if $(self.alternatives[2-])
{
local bad ;
- local worklist = $(self.alternatives) ;
+ local worklist = $(self.alternatives) ;
while $(worklist) && ! $(bad)
{
- local v = $(worklist[1]) ;
- local properties = [ $(v).match $(property-set) $(debug) ] ;
-
+ local v = $(worklist[1]) ;
+ local properties = [ $(v).match $(property-set) $(debug) ] ;
+
if $(properties) != no-match
- {
+ {
if ! $(best)
{
best = $(v) ;
best-properties = $(properties) ;
}
else
- {
+ {
if $(properties) = $(best-properties)
{
bad = true ;
}
- else if $(properties) in $(best-properties)
+ else if $(properties) in $(best-properties)
{
# Do nothing, this alternative is worse
}
@@ -618,55 +613,53 @@
best = $(v) ;
best-properties = $(properties) ;
}
- else
+ else
{
bad = true ;
- }
+ }
}
}
- worklist = $(worklist[2-]) ;
+ worklist = $(worklist[2-]) ;
}
if ! $(bad)
{
return $(best) ;
- }
+ }
}
else
{
return $(self.alternatives) ;
- }
+ }
}
-
-
+
rule apply-default-build ( property-set )
- {
- # 1. First, see what properties from default-build
- # are already present in property-set.
-
+ {
+ # 1. First, see what properties from default-build are already present
+ # in property-set.
+
local raw = [ $(property-set).raw ] ;
local specified-features = $(raw:G) ;
-
+
local defaults-to-apply ;
- for local d in [ $(self.default-build).raw ]
+ for local d in [ $(self.default-build).raw ]
{
if ! $(d:G) in $(specified-features)
{
- defaults-to-apply += $(d) ;
- }
+ defaults-to-apply += $(d) ;
+ }
}
-
- # 2. If there's any defaults to be applied, form the new
- # build request. Pass it throw 'expand-no-defaults', since
- # default-build might contain "release debug", which will
- # result in two property-sets.
+
+ # 2. If there are any defaults to be applied, form a new build request.
+ # Pass it through to 'expand-no-defaults' since default-build might
+ # contain "release debug" resulting in two property-sets.
local result ;
if $(defaults-to-apply)
{
- properties = [
- build-request.expand-no-defaults
-
- # We have to compress subproperties here to prevent
- # property lists like:
+ properties = [
+ build-request.expand-no-defaults
+
+ # We have to compress subproperties here to prevent property
+ # lists like:
#
# <toolset>msvc <toolset-msvc:version>7.1 <threading>multi
#
@@ -675,27 +668,25 @@
# <toolset-msvc:version>7.1/<threading>multi
# <toolset>msvc/<toolset-msvc:version>7.1/<threading>multi
#
- # due to cross-product property combination. That may
- # be an indication that
- # build-request.expand-no-defaults is the wrong rule
- # to use here.
- [ feature.compress-subproperties $(raw) ]
+ # due to a cross-product property combination. That may be an
+ # indication that build-request.expand-no-defaults is the wrong
+ # rule to use here.
+ [ feature.compress-subproperties $(raw) ]
$(defaults-to-apply)
] ;
-
+
if $(properties)
- {
+ {
for local p in $(properties)
{
- result += [ property-set.create
+ result += [ property-set.create
[ feature.expand [ feature.split $(p) ] ] ] ;
}
}
else
{
result = [ property-set.empty ] ;
- }
-
+ }
}
else
{
@@ -703,19 +694,19 @@
}
return $(result) ;
}
-
+
# Select an alternative for this main target, by finding all alternatives
# which requirements are satisfied by 'properties' and picking the one with
- # longest requirements set.
- # Returns the result of calling 'generate' on that alternative.
+ # longest requirements set. Returns the result of calling 'generate' on that
+ # alternative.
rule generate ( property-set )
{
start-building $(__name__) ;
- # We want composite properties in build request act as if
- # all the properties it expands too are explicitly specified.
+ # We want composite properties in build request act as if all the
+ # properties it expands too are explicitly specified.
property-set = [ $(property-set).expand ] ;
-
+
local all-property-sets = [ apply-default-build $(property-set) ] ;
local usage-requirements = [ property-set.empty ] ;
local result ;
@@ -729,16 +720,15 @@
}
}
end-building $(__name__) ;
- return $(usage-requirements) [ sequence.unique $(result) ] ;
+ return $(usage-requirements) [ sequence.unique $(result) ] ;
}
-
- # Generates the main target with the given property set
- # and returns a list which first element is property-set object
- # containing usage-requirements of generated target and with
- # generated virtual target in other elements. It's possible
- # that no targets are generated.
+
+ # Generates the main target with the given property set and returns a list
+ # which first element is property-set object containing usage-requirements
+ # of generated target and with generated virtual target in other elements.
+ # It's possible that no targets are generated.
local rule generate-really ( property-set )
- {
+ {
local best-alternatives = [ select-alternatives $(property-set) ] ;
if ! $(best-alternatives)
{
@@ -748,13 +738,11 @@
}
else
{
- local result = [ $(best-alternatives).generate $(property-set) ] ;
-
- # Now return virtual targets for the only alternative
- return $(result) ;
- }
+ # Now return virtual targets for the only alternative.
+ return [ $(best-alternatives).generate $(property-set) ] ;
+ }
}
-
+
rule rename ( new-name )
{
abstract-target.rename $(new-name) ;
@@ -762,48 +750,46 @@
{
$(a).rename $(new-name) ;
}
-
}
-
}
-# Abstract target which refers to a source file.
-# This is artificial creature; it's usefull so that sources to
-# a target can be represented as list of abstract target instances.
-class file-reference : abstract-target
+
+# Abstract target which refers to a source file. This is an artificial entity
+# allowing sources to a target to be represented using a list of abstract target
+# instances.
+class file-reference : abstract-target
{
import virtual-target ;
import property-set ;
import path ;
-
+
rule __init__ ( file : project )
{
abstract-target.__init__ $(file) : $(project) ;
}
-
+
rule generate ( properties )
{
location ;
- return [ property-set.empty ]
+ return [ property-set.empty ]
[ virtual-target.from-file $(self.name)
: $(self.file-location)
- : $(self.project) ] ;
- }
+ : $(self.project) ] ;
+ }
- # Returns true if the referred file really exists;
+ # Returns true if the referred file really exists.
rule exists ( )
{
location ;
return $(self.file-path) ;
}
-
- # Returns the location of target. Needed by 'testing.jam'
+
+ # Returns the location of target. Needed by 'testing.jam'.
rule location ( )
{
if ! $(self.file-location)
{
local source-location = [ $(self.project).get source-location ] ;
-
for local src-dir in $(source-location)
{
if ! $(self.file-location)
@@ -821,9 +807,10 @@
}
}
-# Given a target-reference, made in context of 'project',
-# returns the abstract-target instance that is referred to, as well
-# as properties explicitly specified for this reference.
+
+# Given a target-reference, made in context of 'project', returns the
+# abstract-target instance that is referred to, as well as properties explicitly
+# specified for this reference.
rule resolve-reference ( target-reference : project )
{
# Separate target name from properties override
@@ -838,596 +825,574 @@
# Find the target
local target = [ $(project).find $(id) ] ;
-
+
return $(target) [ property-set.create $(sproperties) ] ;
}
-
-# Attempts to generate the target given by target reference, which
-# can refer both to a main target or to a file.
-# Returns a list consisting of
+# Attempts to generate the target given by target reference, which can refer
+# both to a main target or to a file. Returns a list consisting of
# - usage requirements
# - generated virtual targets, if any
-rule generate-from-reference
- ( target-reference # Target reference
- : project # Project where the reference is made
- : property-set # Properties of the main target that
- # makes the reference
- )
+rule generate-from-reference (
+ target-reference # Target reference.
+ : project # Project where the reference is made.
+ : property-set # Properties of the main target that makes the reference.
+)
{
local r = [ resolve-reference $(target-reference) : $(project) ] ;
local target = $(r[1]) ;
local sproperties = $(r[2]) ;
-
- # Take properties which should be propagated and refine them
- # with source-specific requirements.
+
+ # Take properties which should be propagated and refine them with
+ # source-specific requirements.
local propagated = [ $(property-set).propagated ] ;
local rproperties = [ $(propagated).refine $(sproperties) ] ;
if $(rproperties[1]) = "@error"
{
errors.error
- "When building" [ full-name ] " with properties " $(properties) :
+ "When building" [ full-name ] " with properties " $(properties) :
"Invalid properties specified for " $(source) ":"
- $(rproperties[2-]) ;
+ $(rproperties[2-]) ;
}
return [ $(target).generate $(rproperties) ] ;
}
-# Given build request and requirements, return properties
-# common to dependency build request and target build
-# properties
+
+# Given a build request and requirements, return properties common to dependency
+# build request and target build properties.
rule common-properties ( build-request requirements )
{
- # For optimization, we add free requirements directly,
- # without using complex algorithsm.
- # This gives the complex algorithm better chance of caching results.
- local free = [ $(requirements).free ] ;
- local non-free = [ property-set.create
+ # For optimization, we add free requirements directly, without using a
+ # complex algorithm. This gives the complex algorithm better chance of
+ # caching results.
+ local free = [ $(requirements).free ] ;
+ local non-free = [ property-set.create
[ $(requirements).base ] [ $(requirements).incidental ] ] ;
-
+
local key = .rp.$(build-request)-$(non-free) ;
if ! $($(key))
- {
- $(key) = [ common-properties2 $(build-request) $(non-free) ] ;
- }
+ {
+ $(key) = [ common-properties2 $(build-request) $(non-free) ] ;
+ }
result = [ $($(key)).add-raw $(free) ] ;
}
+
# Given 'context' -- a set of already present properties, and 'requirements',
-# decide which extra properties should be applied to 'context'.
-# For conditional requirements, this means evaluating condition. For
-# indirect conditional requirements, this means calling a rule. Ordinary
-# requirements are always applied.
-#
-# Handles situation where evaluating one conditional requirements affects
-# condition of another conditional requirements, for example:
+# decide which extra properties should be applied to 'context'. For conditional
+# requirements, this means evaluating condition. For indirect conditional
+# requirements, this means calling a rule. Ordinary requirements are always
+# applied.
#
+# Handles the situation where evaluating one conditional requirement affects
+# conditions of another conditional requirements, such as:
# <toolset>gcc:<variant>release <variant>release:<define>RELEASE
#
-# If 'what' is 'refined' returns context refined with new requirements.
-# If 'what' is 'added' returns just the requirements that must be applied.
+# If 'what' is 'refined' returns context refined with new requirements. If
+# 'what' is 'added' returns just the requirements to be applied.
rule evaluate-requirements ( requirements : context : what )
{
- # Apply non-conditional requirements.
- # It's possible that that further conditional requirement change
- # a value set by non-conditional requirements. For example:
+ # Apply non-conditional requirements. It's possible that further conditional
+ # requirement change a value set by non-conditional requirements. For
+ # example:
#
# exe a : a.cpp : <threading>single <toolset>foo:<threading>multi ;
- #
+ #
# I'm not sure if this should be an error, or not, especially given that
#
- # <threading>single
+ # <threading>single
#
# might come from project's requirements.
-
+
local unconditional = [ feature.expand [ $(requirements).non-conditional ] ] ;
-
+
local raw = [ $(context).raw ] ;
raw = [ property.refine $(raw) : $(unconditional) ] ;
-
+
# We've collected properties that surely must be present in common
- # properties. We now try to figure out what other properties
- # should be added in order to satisfy rules (4)-(6) from the docs.
-
+ # properties. We now try to figure out what other properties should be added
+ # in order to satisfy rules (4)-(6) from the docs.
+
local conditionals = [ $(requirements).conditional ] ;
- # The 'count' variable has one element for each conditional feature
- # and for each occurence of '<indirect-conditional>' feature.
- # It's used as a loop counter: for each iteration of the loop
- # before we remove one element and the property set should
- # stabilize before we've done. It's supposed to #conditionals iterations
- # should be enough for properties to propagate along conditions in any
- # direction.
- local count = $(conditionals)
- [ $(requirements).get <conditional> ]
+ # The 'count' variable has one element for each conditional feature and for
+ # each occurence of '<indirect-conditional>' feature. It's used as a loop
+ # counter: for each iteration of the loop before we remove one element and
+ # the property set should stabilize before we're done. It's assumed that
+ # #conditionals iterations should be enough for properties to propagate
+ # along conditions in any direction.
+ local count = $(conditionals)
+ [ $(requirements).get <conditional> ]
and-once-more ;
-
+
local added-requirements ;
-
+
local current = $(raw) ;
-
+
# It's assumed that ordinary conditional requirements can't add
- # <indirect-conditional> properties, and that rules referred
- # by <indirect-conditional> properties can't add new
- # <indirect-conditional> properties. So the list of indirect conditionals
- # does not change.
+ # <indirect-conditional> properties, and that rules referred by
+ # <indirect-conditional> properties can't add new <indirect-conditional>
+ # properties. So the list of indirect conditionals does not change.
local indirect = [ $(requirements).get <conditional> ] ;
- indirect = [ MATCH @(.*) : $(indirect) ] ;
-
+ indirect = [ MATCH @(.*) : $(indirect) ] ;
+
local ok ;
- while $(count)
+ while $(count)
{
- # Evaluate conditionals in context of current properties
- local e = [ property.evaluate-conditionals-in-context $(conditionals)
- : $(current) ] ;
-
+ # Evaluate conditionals in context of current properties.
+ local e = [ property.evaluate-conditionals-in-context $(conditionals)
+ : $(current) ] ;
+
# Evaluate indirect conditionals.
for local i in $(indirect)
{
e += [ indirect.call $(i) $(current) ] ;
}
-
+
if $(e) = $(added-requirements)
- {
+ {
# If we got the same result, we've found final properties.
- count = ;
+ count = ;
ok = true ;
- }
+ }
else
{
- # Oops, results of evaluation of conditionals has changed.
- # Also 'current' contains leftover from previous evaluation.
- # Recompute 'current' using initial properties and conditional
- # requirements.
+ # Oops, results of evaluation of conditionals has changed. Also
+ # 'current' contains leftover from previous evaluation. Recompute
+ # 'current' using initial properties and conditional requirements.
added-requirements = $(e) ;
current = [ property.refine $(raw) : [ feature.expand $(e) ] ] ;
- }
+ }
count = $(count[2-]) ;
}
if ! $(ok)
{
errors.error "Can't evaluate conditional properties " $(conditionals) ;
}
-
if $(what) = added
{
return [ property-set.create $(unconditional) $(added-requirements) ] ;
}
else if $(what) = refined
- {
- return [ property-set.create $(current) ] ;
+ {
+ return [ property-set.create $(current) ] ;
}
else
{
errors.error "Invalid value of the 'what' parameter" ;
- }
+ }
}
-
+
rule common-properties2 ( build-request requirements )
-{
- # This guarantees that default properties are present
- # in result, unless they are overrided by some requirement.
- # FIXME: There is possibility that we've added <foo>bar, which is composite
- # and expands to <foo2>bar2, but default value of <foo2> is not bar2,
- # in which case it's not clear what to do.
- #
+{
+ # This guarantees that default properties are present in the result, unless
+ # they are overriden by some requirement. FIXME: There is possibility that
+ # we've added <foo>bar, which is composite and expands to <foo2>bar2, but
+ # default value of <foo2> is not bar2, in which case it's not clear what to
+ # do.
+ #
build-request = [ $(build-request).add-defaults ] ;
- # Featured added by 'add-default' can be composite and expand
- # to features without default values -- so they are not added yet.
- # It could be clearer/faster to expand only newly added properties
- # but that's not critical.
+ # Features added by 'add-default' can be composite and expand to features
+ # without default values -- so they are not added yet. It could be clearer/
+ # /faster to expand only newly added properties but that's not critical.
build-request = [ $(build-request).expand ] ;
-
- return [ evaluate-requirements $(requirements)
+
+ return [ evaluate-requirements $(requirements)
: $(build-request) : refined ] ;
}
-# Implements the most standard way of constructing main target
-# alternative from sources. Allows sources to be either file or
-# other main target and handles generation of those dependency
-# targets.
+
+# Implements the most standard way of constructing main target alternative from
+# sources. Allows sources to be either file or other main target and handles
+# generation of those dependency targets.
class basic-target : abstract-target
{
import build-request ;
- import virtual-target targets ;
- import property-set ;
- import set sequence errors ;
- import "class" : new ;
- import property feature ;
import build-system ;
-
- rule __init__ ( name : project
- : sources * : requirements * :
- default-build * : usage-requirements * )
- {
+ import "class" : new ;
+ import errors ;
+ import feature ;
+ import property ;
+ import property-set ;
+ import sequence ;
+ import set ;
+ import targets ;
+ import virtual-target ;
+
+ rule __init__ ( name : project : sources * : requirements *
+ : default-build * : usage-requirements * )
+ {
abstract-target.__init__ $(name) : $(project) ;
-
+
self.sources = $(sources) ;
if ! $(requirements) {
requirements = [ property-set.empty ] ;
- }
+ }
self.requirements = $(requirements) ;
- if ! $(default-build)
+ if ! $(default-build)
{
default-build = [ property-set.empty ] ;
- }
+ }
self.default-build = $(default-build) ;
if ! $(usage-requirements)
{
usage-requirements = [ property-set.empty ] ;
- }
+ }
self.usage-requirements = $(usage-requirements) ;
-
+
if $(sources:G)
{
errors.user-error "properties found in the 'sources' parameter for" [ full-name ] ;
}
}
-
- # Returns the list of abstract-targets which are used as sources.
- # The extra properties specified for sources are not represented.
- # The only used of this rule at the moment is the "--dump-test"
- # feature of the test system.
+
+ # Returns the list of abstract-targets which are used as sources. The extra
+ # properties specified for sources are not represented. The only user for
+ # this rule at the moment is the "--dump-tests" feature of the test system.
rule sources ( )
{
if ! $(self.source-targets) {
for local s in $(self.sources)
{
- self.source-targets +=
+ self.source-targets +=
[ targets.resolve-reference $(s) : $(self.project) ] ;
- }
- }
+ }
+ }
return $(self.source-targets) ;
}
-
+
rule requirements ( )
{
return $(self.requirements) ;
}
-
+
rule default-build ( )
{
return $(self.default-build) ;
}
-
- # Returns the alternative condition for this alternative, if
- # the condition is satisfied by 'property-set'.
+
+ # Returns the alternative condition for this alternative, if the condition
+ # is satisfied by 'property-set'.
rule match ( property-set debug ? )
- {
- # The condition is composed of all base non-conditional properties.
- # It's not clear if we should expand 'self.requirements' or not.
- # For one thing, it would be nice to be able to put
- # <toolset>msvc-6.0
+ {
+ # The condition is composed of all base non-conditional properties. It's
+ # not clear if we should expand 'self.requirements' or not. For one
+ # thing, it would be nice to be able to put
+ # <toolset>msvc-6.0
# in requirements.
- # On the other hand, if we have <variant>release in condition it
- # does not make sense to require <optimization>full to be in
- # build request just to select this variant.
+ # On the other hand, if we have <variant>release in condition it does
+ # not make sense to require <optimization>full to be in build request
+ # just to select this variant.
local bcondition = [ $(self.requirements).base ] ;
local ccondition = [ $(self.requirements).conditional ] ;
local condition = [ set.difference $(bcondition) : $(ccondition) ] ;
if $(debug)
{
ECHO " next alternative: required properties:" $(condition:E=(empty)) ;
- }
-
- if $(condition) in [ $(property-set).raw ]
+ }
+
+ if $(condition) in [ $(property-set).raw ]
{
if $(debug)
{
ECHO " matched" ;
- }
- return $(condition) ;
+ }
+ return $(condition) ;
}
else
{
if $(debug)
{
ECHO " not matched" ;
- }
+ }
return no-match ;
- }
+ }
}
-
- # Takes a target reference, which might be either target id
- # or a dependency property, and generates that target using
- # 'property-set' as build request.
+
+ # Takes a target reference, which might be either target id or a dependency
+ # property, and generates that target using 'property-set' as build request.
#
- # The results are added to to variable called 'result-var'.
- # Usage requirements are added variable called 'usage-requirements-var'.
- rule generate-dependencies ( dependencies * : property-set
+ # The results are added to the variable called 'result-var'. Usage
+ # requirements are added to the variable called 'usage-requirements-var'.
+ rule generate-dependencies ( dependencies * : property-set
: result-var usage-requirements-var )
{
for local dependency in $(dependencies)
- {
+ {
local grist = $(dependency:G) ;
local id = $(dependency:G=) ;
-
- local result =
- [ targets.generate-from-reference $(id) : $(self.project)
+
+ local result =
+ [ targets.generate-from-reference $(id) : $(self.project)
: $(property-set) ] ;
-
+
$(result-var) += $(result[2-]:G=$(grist)) ;
$(usage-requirements-var) += [ $(result[1]).raw ] ;
- }
+ }
}
-
-
- # Determines final build properties, generates sources,
- # and calls 'construct'. This method should not be
- # overridden.
+
+ # Determines final build properties, generates sources, and calls
+ # 'construct'. This method should not be overridden.
rule generate ( property-set )
{
- if [ modules.peek : .debug-building ]
+ if [ modules.peek : .debug-building ]
{
ECHO ;
local fn = [ full-name ] ;
ECHO [ targets.indent ] "Building target '$(fn)'" ;
targets.increase-indent ;
- ECHO [ targets.indent ] "Build request: " [ $(property-set).raw ] ;
+ ECHO [ targets.indent ] "Build request: " [ $(property-set).raw ] ;
local cf = [ build-system.command-line-free-features ] ;
- ECHO [ targets.indent ] "Command line free features: "
- [ $(cf).raw ] ;
- ECHO [ targets.indent ] "Target requirements: " [ $(self.requirements).raw ] ;
+ ECHO [ targets.indent ] "Command line free features: " [ $(cf).raw ] ;
+ ECHO [ targets.indent ] "Target requirements: " [ $(self.requirements).raw ] ;
}
-
- if ! $(self.generated.$(property-set))
+
+ if ! $(self.generated.$(property-set))
{
- # Apply free features form the command line. If user
- # said
+ # Apply free features form the command line. If user said
# define=FOO
- # he most likely want this define to be set for all compiles.
- property-set = [ $(property-set).refine
- [ build-system.command-line-free-features ] ] ;
- local rproperties = [ targets.common-properties $(property-set)
- $(self.requirements) ] ;
-
- if [ modules.peek : .debug-building ]
+ # he most likely wants this define to be set for all compiles.
+ property-set = [ $(property-set).refine
+ [ build-system.command-line-free-features ] ] ;
+ local rproperties = [ targets.common-properties $(property-set)
+ $(self.requirements) ] ;
+
+ if [ modules.peek : .debug-building ]
{
ECHO ;
ECHO [ targets.indent ] "Common properties:" [ $(rproperties).raw ] ;
}
-
+
if $(rproperties[1]) != "@error" && [ $(rproperties).get <build> ] != no
{
local source-targets ;
local properties = [ $(rproperties).non-dependency ] ;
local usage-requirements ;
-
+
generate-dependencies [ $(rproperties).dependency ]
- : $(rproperties)
- : properties usage-requirements ;
-
+ : $(rproperties)
+ : properties usage-requirements ;
+
generate-dependencies $(self.sources) : $(rproperties)
- : source-targets usage-requirements ;
-
- if [ modules.peek : .debug-building ]
+ : source-targets usage-requirements ;
+
+ if [ modules.peek : .debug-building ]
{
ECHO ;
- ECHO [ targets.indent ]
- "Usage requirements for $(self.name) are " $(usage-requirements) ;
+ ECHO [ targets.indent ]
+ "Usage requirements for $(self.name) are " $(usage-requirements) ;
}
- rproperties = [ property-set.create $(properties)
- $(usage-requirements) ] ;
+ rproperties = [ property-set.create $(properties)
+ $(usage-requirements) ] ;
usage-requirements = [ property-set.create $(usage-requirements) ] ;
-
- if [ modules.peek : .debug-building ]
+
+ if [ modules.peek : .debug-building ]
{
- ECHO [ targets.indent ]
- "Build properties: " [ $(rproperties).raw ] ;
+ ECHO [ targets.indent ]
+ "Build properties: " [ $(rproperties).raw ] ;
}
-
- local extra = [ $(rproperties).get <source> ] ;
+
+ local extra = [ $(rproperties).get <source> ] ;
source-targets += $(extra:G=) ;
- # We might get duplicate sources, for example if
- # we link to two library which have the same <library> in
- # usage requirements.
+ # We might get duplicate sources, for example if we link to two
+ # libraries having the same <library> usage requirement.
source-targets = [ sequence.unique $(source-targets) ] ;
-
- local result =
- [ construct $(self.name) :
+
+ local result =
+ [ construct $(self.name) :
$(source-targets) : $(rproperties) ] ;
-
+
if $(result)
{
local gur = $(result[1]) ;
result = $(result[2-]) ;
- local s = [ create-subvariant
- $(result) :
- [ virtual-target.recent-targets ]
- : $(property-set) : $(source-targets)
- : $(rproperties) : $(usage-requirements) ] ;
+ local s = [ create-subvariant $(result)
+ : [ virtual-target.recent-targets ]
+ : $(property-set) : $(source-targets)
+ : $(rproperties) : $(usage-requirements) ] ;
virtual-target.clear-recent-targets ;
local ur = [ compute-usage-requirements $(s) ] ;
ur = [ $(ur).add $(gur) ] ;
$(s).set-usage-requirements $(ur) ;
- if [ modules.peek : .debug-building ]
+ if [ modules.peek : .debug-building ]
{
ECHO [ targets.indent ]
"Usage requirements from $(self.name) are "
[ $(ur).raw ] ;
}
-
+
self.generated.$(property-set) = $(ur) $(result) ;
}
- }
+ }
else
{
- if $(rproperties[1]) = "@error"
+ if $(rproperties[1]) = "@error"
{
- ECHO [ targets.indent ]
- "Skipping build of: " [ full-name ] " cannot compute common properties" ;
+ ECHO [ targets.indent ]
+ "Skipping build of: " [ full-name ] " cannot compute common properties" ;
}
else if [ $(rproperties).get <build> ] = no
{
- ECHO [ targets.indent ]
- "Skipping build of: " [ full-name ] " <build>no in common properties" ;
+ ECHO [ targets.indent ]
+ "Skipping build of: " [ full-name ] " <build>no in common properties" ;
}
else
{
ECHO [ targets.indent ] "Skipping build of: " [ full-name ] " unknown reason" ;
}
-
- # We're here either because there's error computing
- # properties, or there's <build>no in properties.
- # In the latter case we don't want any diagnostic.
- # In the former case, we need diagnostics. FIXME.
+
+ # We're here either because there's been an error computing
+ # properties, or there's <build>no in properties. In the latter
+ # case we don't want any diagnostic. In the former case, we need
+ # diagnostics. FIXME
self.generated.$(property-set) = $(rproperties) ;
- }
- }
+ }
+ }
else
{
- if [ modules.peek : .debug-building ]
+ if [ modules.peek : .debug-building ]
{
ECHO [ targets.indent ] "Already built" ;
- }
+ }
}
targets.decrease-indent ;
return $(self.generated.$(property-set)) ;
}
- # Given the set of generated targets, and refined build
- # properties, determines and sets appripriate usage requirements
- # on those targets.
+ # Given the set of generated targets, and refined build properties,
+ # determines and sets appripriate usage requirements on those targets.
rule compute-usage-requirements ( subvariant )
{
local rproperties = [ $(subvariant).build-properties ] ;
- xusage-requirements = [ targets.evaluate-requirements
- $(self.usage-requirements)
- : $(rproperties)
- : added ] ;
-
- # We generate all dependency properties and add them,
- # as well as their usage requirements, to result.
+ xusage-requirements = [ targets.evaluate-requirements
+ $(self.usage-requirements) : $(rproperties) : added ] ;
+
+ # We generate all dependency properties and add them, as well as their
+ # usage requirements, to the result.
local extra ;
generate-dependencies [ $(xusage-requirements).dependency ] :
$(rproperties) : extra extra ;
-
+
local result = [ property-set.create
[ $(xusage-requirements).non-dependency ] $(extra) ] ;
-
- # Propagate usage requirements we've got from sources, except
- # for the <pch-header> and <pch-file> features.
+
+ # Propagate usage requirements we've got from sources, except for the
+ # <pch-header> and <pch-file> features.
#
- # That feature specifies which pch file to use, and should apply
- # only to direct dependents. Consider:
+ # That feature specifies which pch file to use, and should apply only to
+ # direct dependents. Consider:
#
# pch pch1 : ...
# lib lib1 : ..... pch1 ;
- # pch pch2 :
+ # pch pch2 :
# lib lib2 : pch2 lib1 ;
#
# Here, lib2 should not get <pch-header> property from pch1.
#
- # Essentially, when those two features are in usage requirements,
- # they are propagated only to direct dependents. We might need
- # a more general mechanism, but for now, only those two
- # features are special.
+ # Essentially, when those two features are in usage requirements, they
+ # are propagated only to direct dependents. We might need a more general
+ # mechanism, but for now, only those two features are special.
local raw = [ $(subvariant).sources-usage-requirements ] ;
raw = [ $(raw).raw ] ;
- raw = [ property.change $(raw) : <pch-header> ] ;
- raw = [ property.change $(raw) : <pch-file> ] ;
- result = [ $(result).add [ property-set.create $(raw) ] ] ;
-
- return $(result) ;
+ raw = [ property.change $(raw) : <pch-header> ] ;
+ raw = [ property.change $(raw) : <pch-file> ] ;
+ return [ $(result).add [ property-set.create $(raw) ] ] ;
}
-
- # Creates a new subvariant-dg instances for 'targets'
- # - 'root-targets' the virtual targets will be returned to dependents
- # - 'all-targets' all virtual
- # targets created while building this main target
- # - 'build-request' is property-set instance with requested build properties
- local rule create-subvariant ( root-targets *
- : all-targets * : build-request : sources * :
- rproperties
+
+ # Creates new subvariant instances for 'targets'.
+ # 'root-targets' - virtual targets to be returned to dependants
+ # 'all-targets' - virtual targets created while building this main target
+ # 'build-request' - property-set instance with requested build properties
+ local rule create-subvariant ( root-targets *
+ : all-targets * : build-request : sources * : rproperties
: usage-requirements )
{
- for local e in $(root-targets)
+ for local e in $(root-targets)
{
$(e).root true ;
- }
-
- # Process all vtargets that will be created if this main target
+ }
+
+ # Process all virtual targets that will be created if this main target
# is created.
local s = [ new subvariant $(__name__) : $(build-request) : $(sources)
- : $(rproperties) : $(usage-requirements) : $(all-targets) ] ;
- for local v in $(all-targets)
+ : $(rproperties) : $(usage-requirements) : $(all-targets) ] ;
+ for local v in $(all-targets)
{
if ! [ $(v).creating-subvariant ]
- {
- $(v).creating-subvariant $(s) ;
- }
- }
+ {
+ $(v).creating-subvariant $(s) ;
+ }
+ }
return $(s) ;
}
-
- # Constructs the virtual targets for this abstract targets and
- # the dependecy graph. Returns the list of virtual targets.
- # Should be overrided in derived classes.
+
+ # Constructs virtual targets for this abstract target and the dependency
+ # graph. Returns the list of virtual targets. Should be overriden in derived
+ # classes.
rule construct ( name : source-targets * : properties * )
{
errors.error "method should be defined in derived classes" ;
}
}
+
class typed-target : basic-target
{
- import generators ;
-
- rule __init__ ( name : project : type
- : sources * : requirements * : default-build * : usage-requirements * )
- {
- basic-target.__init__ $(name) : $(project)
- : $(sources) : $(requirements) : $(default-build) : $(usage-requirements) ;
-
+ import generators ;
+
+ rule __init__ ( name : project : type : sources * : requirements *
+ : default-build * : usage-requirements * )
+ {
+ basic-target.__init__ $(name) : $(project) : $(sources)
+ : $(requirements) : $(default-build) : $(usage-requirements) ;
+
self.type = $(type) ;
}
-
+
rule type ( )
{
return $(self.type) ;
}
-
+
rule construct ( name : source-targets * : property-set )
{
- local r = [ generators.construct $(self.project) $(name:S=) : $(self.type)
- : [ property-set.create [ $(property-set).raw ] # [ feature.expand
- <main-target-type>$(self.type) ]
- # ]
+ local r = [ generators.construct $(self.project) $(name:S=) : $(self.type)
+ : [ property-set.create [ $(property-set).raw ]
+ <main-target-type>$(self.type) ]
: $(source-targets) ] ;
if ! $(r)
- {
+ {
ECHO "warn: Unable to construct" [ full-name ] ;
-
+
# Are there any top-level generators for this type/property set.
- if ! [ generators.find-viable-generators
- $(self.type) : $(property-set) ]
+ if ! [ generators.find-viable-generators $(self.type)
+ : $(property-set) ]
{
ECHO "error: no generators were found for type '$(self.type)'" ;
ECHO "error: and the requested properties" ;
ECHO "error: make sure you've configured the needed tools" ;
ECHO "See http://boost.org/boost-build2/doc/html/bbv2/advanced/configuration.html" ;
-
ECHO "To debug this problem, try the --debug-generators option." ;
EXIT ;
}
}
-
return $(r) ;
- }
+ }
}
-# Return the list of sources to use, if main target rule is invoked
-# with 'sources'. If there are any objects in 'sources', they are treated
-# as main target instances, and the name of such targets are adjusted to
-# be '<name_of_this_target>__<name_of_source_target>'. Such renaming
-# is disabled is non-empty value is passed for 'no-renaming' parameter.
-#
+
+# Return the list of sources to use, if main target rule is invoked with
+# 'sources'. If there are any objects in 'sources', they are treated as main
+# target instances, and the name of such targets are adjusted to be
+# '<name_of_this_target>__<name_of_source_target>'. Such renaming is disabled if
+# a non-empty value is passed as the 'no-renaming' parameter.
+#
rule main-target-sources ( sources * : main-target-name : no-renaming ? )
{
local result ;
@@ -1435,78 +1400,76 @@
{
if [ class.is-instance $(t) ]
{
- local name = [ $(t).name ] ;
+ local name = [ $(t).name ] ;
if ! $(no-renaming)
- {
+ {
name = $(main-target-name)__$(name) ;
$(t).rename $(name) ;
- }
+ }
# Inline targets are not built by default.
local p = [ $(t).project ] ;
- $(p).mark-target-as-explicit $(name) ;
+ $(p).mark-target-as-explicit $(name) ;
result += $(name) ;
}
else
{
result += $(t) ;
- }
- }
+ }
+ }
return $(result) ;
}
-# Returns the requirement to use when declaring a main target,
-# which are obtained by
-# - translating all specified property paths, and
-# - refining project requirements with the one specified for the target
-rule main-target-requirements (
- specification * # Properties explicitly specified for a main target
- : project # Project where the main target is to be declared
- )
+# Returns the requirements to use when declaring a main target, obtained by
+# translating all specified property paths and refining project requirements
+# with the ones specified for the target.
+rule main-target-requirements (
+ specification * # Properties explicitly specified for the main target.
+ : project # Project where the main target is to be declared.
+)
{
specification += [ toolset.requirements ] ;
-
- local requirements = [ property-set.refine-from-user-input
+
+ local requirements = [ property-set.refine-from-user-input
[ $(project).get requirements ] : $(specification) :
[ $(project).project-module ] : [ $(project).get location ] ] ;
-
- if $(requirements[1]) = "@error"
+ if $(requirements[1]) = "@error"
{
errors.error "Conflicting requirements for target:" $(requirements) ;
}
return $(requirements) ;
}
-# Returns the use requirement to use when declaraing a main target,
-# which are obtained by
-# - translating all specified property paths, and
-# - adding project's usage requirements
+
+# Returns the usage requirements to use when declaring a main target, which are
+# obtained by translating all specified property paths and adding project's
+# usage requirements.
rule main-target-usage-requirements (
- specification * # Use-properties explicitly specified for a main target
- : project # Project where the main target is to be declared
- )
+ specification * # Use-properties explicitly specified for a main target.
+ : project # Project where the main target is to be declared.
+)
{
- local loc = [ $(project).get location ] ;
local project-usage-requirements = [ $(project).get usage-requirements ] ;
-
- # We don't use 'refine-from-user-input' because I'm not sure if:
- # - removing of parent's usage requirements makes sense
+
+ # We don't use 'refine-from-user-input' because:
+ # - I'm not sure if removing of parent's usage requirements makes sense
# - refining of usage requirements is not needed, since usage requirements
# are always free.
- local usage-requirements = [ property-set.create-from-user-input
- $(specification)
- : [ $(project).project-module ] [ $(project).get location ] ] ;
-
+ local usage-requirements = [ property-set.create-from-user-input
+ $(specification)
+ : [ $(project).project-module ] [ $(project).get location ] ] ;
+
return [ $(project-usage-requirements).add $(usage-requirements) ] ;
}
-# Return the default build value to use when declaring a main target,
-# which is obtained by using specified value if not empty and parent's
-# default build attribute otherwise.
+
+# Return the default build value to use when declaring a main target, which is
+# obtained by using the specified value if not empty and parent's default build
+# attribute otherwise.
rule main-target-default-build (
- specification * # Default build explicitly specified for a main target
- : project # Project where the main target is to be declared
- )
+ specification * # Default build explicitly specified for a main target.
+ : project # Project where the main target is to be declared.
+)
{
local result ;
if $(specification)
@@ -1516,35 +1479,32 @@
else
{
result = [ $(project).get default-build ] ;
- }
+ }
return [ property-set.create-with-validation $(result) ] ;
-}
+}
+
-# Registers the specified target as a main target alternatives.
-# Returns 'target'.
-rule main-target-alternative ( target )
-{
+# Registers the specified target as a main target alternative and returns it.
+rule main-target-alternative ( target )
+{
local ptarget = [ $(target).project ] ;
-
$(ptarget).add-alternative $(target) ;
return $(target) ;
}
-# Creates a typed-target with the specified properties.
-# The 'name', 'sources', 'requirements', 'default-build' and
-# 'usage-requirements' are assumed to be in the form specified
-# by the user in Jamfile corresponding to 'project'.
-rule create-typed-target ( type : project :
- name : sources * : requirements * : default-build *
- : usage-requirements * )
+
+# Creates a typed-target with the specified properties. The 'name', 'sources',
+# 'requirements', 'default-build' and 'usage-requirements' are assumed to be in
+# the form specified by the user in Jamfile corresponding to 'project'.
+rule create-typed-target ( type : project : name : sources * : requirements *
+ : default-build * : usage-requirements * )
{
return [
- targets.main-target-alternative
- [ new typed-target $(name) : $(project) : $(type)
- : [ targets.main-target-sources $(sources) : $(name) ]
- : [ targets.main-target-requirements $(requirements) : $(project) ]
- : [ targets.main-target-default-build $(default-build) : $(project) ]
- : [ targets.main-target-usage-requirements $(usage-requirements) : $(project) ]
- ] ] ;
-
+ targets.main-target-alternative
+ [ new typed-target $(name) : $(project) : $(type)
+ : [ targets.main-target-sources $(sources) : $(name) ]
+ : [ targets.main-target-requirements $(requirements) : $(project) ]
+ : [ targets.main-target-default-build $(default-build) : $(project) ]
+ : [ targets.main-target-usage-requirements $(usage-requirements) : $(project) ]
+ ] ] ;
}
Modified: branches/release/tools/build/v2/build/toolset.jam
==============================================================================
--- branches/release/tools/build/v2/build/toolset.jam (original)
+++ branches/release/tools/build/v2/build/toolset.jam 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -1,46 +1,47 @@
-# Copyright 2003 Dave Abrahams
-# Copyright 2005 Rene Rivera
-# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+# Copyright 2003 Dave Abrahams
+# Copyright 2005 Rene Rivera
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# Support for toolset definition.
+import errors ;
import feature ;
+import generators ;
import numbers ;
-import errors : error ;
-import property ;
import path ;
-import generators ;
-import set : difference ;
+import property ;
import regex ;
import sequence ;
+import set ;
+
.flag-no = 1 ;
.ignore-requirements = ;
-# This is used only for testing, to make sure
-# we don't get random extra elements in paths.
+# This is used only for testing, to make sure we don't get random extra elements
+# in paths.
if --ignore-toolset-requirements in [ modules.peek : ARGV ]
{
.ignore-requirements = 1 ;
}
-# Initializes an additional toolset-like module.
-# First load 'toolset-module' and then calls its 'init'
-# rule with trailing arguments
+
+# Initializes an additional toolset-like module. First load the 'toolset-module'
+# and then calls its 'init' rule with trailing arguments.
+#
rule using ( toolset-module : * )
{
import $(toolset-module) ;
$(toolset-module).init $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
}
-# Expands subfeatures in each property sets.
-# e.g
-# <toolset>gcc-3.2
-# will be converted to
-# <toolset>gcc/<toolset-version>3.2
+
+# Expands subfeatures in each property sets, e.g. '<toolset>gcc-3.2' will be
+# converted to '<toolset>gcc/<toolset-version>3.2'.
+#
local rule normalize-condition ( property-sets * )
{
local result ;
@@ -54,13 +55,11 @@
}
-# Specifies if the 'flags' rule should do checking that
-# the invoking module is the same as module we're setting
-# flag for.
-# 'v' can be either 'checked' or 'unchecked'.
-# Subsequent call to 'pop-checking-for-flags-module'
-# will restore the behaviour that was in effect before
-# calling this rule.
+# Specifies if the 'flags' rule should check that the invoking module is the
+# same as the module we're setting the flag for. 'v' can be either 'checked' or
+# 'unchecked'. Subsequent call to 'pop-checking-for-flags-module' will restore
+# the setting that was in effect before calling this rule.
+#
rule push-checking-for-flags-module ( v )
{
.flags-module-checking = $(v) $(.flags-module-checking) ;
@@ -71,123 +70,120 @@
.flags-module-checking = $(.flags-module-checking[2-]) ;
}
+
# Specifies the flags (variables) that must be set on targets under certain
# conditions, described by arguments.
-rule flags ( rule-or-module # If contains dot, should be a rule name.
- # The flags will be applied when that rule is
- # used to set up build actions.
- #
- # If does not contain dot, should be a module name.
- # The flags will be applied for all rules in that
- # module.
- # If module for rule is different from the calling
- # module, an error is issued.
-
- variable-name # Variable that should be set on target
- condition * : # A condition when this flag should be applied.
- # Should be set of property sets. If one of
- # those property sets is contained in build
- # properties, the flag will be used.
- # Implied values are not allowed:
- # "<toolset>gcc" should be used, not just
- # "gcc". Subfeatures, like in "<toolset>gcc-3.2"
- # are allowed. If left empty, the flag will
- # always used.
- #
- # Propery sets may use value-less properties
- # ('<a>' vs. '<a>value') to match absent
- # properties. This allows to separately match
- #
- # <architecture>/<address-model>64
- # <architecture>ia64/<address-model>
- #
- # Where both features are optional. Without this
- # syntax we'd be forced to define "default" value.
-
-
- values * : # The value to add to variable. If <feature>
- # is specified, then the value of 'feature'
- # will be added.
- unchecked ? # If value 'unchecked' is passed, will not test
- # that flags are set for the calling module.
- : hack-hack ? # For
- # flags rule OPTIONS <cxx-abi> : -model ansi
- # Treak <cxx-abi> as condition
- # FIXME: ugly hack.
- )
+#
+rule flags (
+ rule-or-module # If contains a dot, should be a rule name. The flags will
+ # be applied when that rule is used to set up build
+ # actions.
+ #
+ # If does not contain dot, should be a module name. The
+ # flag will be applied for all rules in that module. If
+ # module for rule is different from the calling module, an
+ # error is issued.
+
+ variable-name # Variable that should be set on target.
+ condition * : # A condition when this flag should be applied. Should be a
+ # set of property sets. If one of those property sets is
+ # contained in the build properties, the flag will be used.
+ # Implied values are not allowed: "<toolset>gcc" should be
+ # used, not just "gcc". Subfeatures, like in
+ # "<toolset>gcc-3.2" are allowed. If left empty, the flag
+ # will be used unconditionally.
+ #
+ # Propery sets may use value-less properties ('<a>' vs.
+ # '<a>value') to match absent properties. This allows to
+ # separately match:
+ #
+ # <architecture>/<address-model>64
+ # <architecture>ia64/<address-model>
+ #
+ # Where both features are optional. Without this syntax
+ # we'd be forced to define "default" values.
+
+ values * : # The value to add to variable. If <feature> is specified,
+ # then the value of 'feature' will be added.
+ unchecked ? # If value 'unchecked' is passed, will not test that flags
+ # are set for the calling module.
+ : hack-hack ? # For
+ # flags rule OPTIONS <cxx-abi> : -model ansi
+ # Treat <cxx-abi> as condition
+ # FIXME: ugly hack.
+)
{
local caller = [ CALLER_MODULE ] ;
if ! [ MATCH ".*([.]).*" : $(rule-or-module) ]
&& [ MATCH "(Jamfile<.*)" : $(caller) ]
{
- # Unqualified rule name, used inside Jamfile.
- # (most likely used with 'make' or 'notfile' rules.
- # This prevents setting flags on entire Jamfile module
- # (this will be considered as rule), but who cares?
+ # Unqualified rule name, used inside Jamfile. Most likely used with
+ # 'make' or 'notfile' rules. This prevents setting flags on the entire
+ # Jamfile module (this will be considered as rule), but who cares?
# Probably, 'flags' rule should be split into 'flags' and
# 'flags-on-module'.
rule-or-module = $(caller).$(rule-or-module) ;
}
else
- {
+ {
local module_ = [ MATCH "([^.]*).*" : $(rule-or-module) ] ;
- if $(unchecked) != unchecked
+ if $(unchecked) != unchecked
&& $(.flags-module-checking[1]) != unchecked
&& $(module_) != $(caller)
{
errors.error "Module $(caller) attempted to set flags for module $(module_)" ;
}
}
-
-
+
+
if $(condition) && ! $(condition:G=) && ! $(hack-hack)
{
- # We have condition in the form '<feature>', that is, without
- # value. That's a previous syntax:
- #
+ # We have condition in the form '<feature>', that is, without value.
+ # That's a previous syntax:
# flags gcc.link RPATH <dll-path> ;
# for compatibility, convert it to
- # flags gcc.link RPATH : <dll-path> ;
+ # flags gcc.link RPATH : <dll-path> ;
values = $(condition) ;
condition = ;
}
-
+
if $(condition)
{
property.validate-property-sets $(condition) ;
condition = [ normalize-condition $(condition) ] ;
}
-
+
add-flag $(rule-or-module) : $(variable-name)
: $(condition) : $(values) ;
}
-# Adds new flag setting with the specified values
-# Does no checking
-local rule add-flag ( rule-or-module :
- variable-name : condition * : values * )
+
+# Adds a new flag setting with the specified values. Does no checking.
+#
+local rule add-flag ( rule-or-module : variable-name : condition * : values * )
{
.$(rule-or-module).flags += $(.flag-no) ;
# Store all flags for a module
- local module_ = [ MATCH "([^.]*).*" : $(rule-or-module) ] ;
+ local module_ = [ MATCH "([^.]*).*" : $(rule-or-module) ] ;
.module-flags.$(module_) += $(.flag-no) ;
# Store flag-no -> rule-or-module mapping
.rule-or-module.$(.flag-no) = $(rule-or-module) ;
.$(rule-or-module).variable.$(.flag-no) += $(variable-name) ;
.$(rule-or-module).values.$(.flag-no) += $(values) ;
- .$(rule-or-module).condition.$(.flag-no) += $(condition) ;
-
+ .$(rule-or-module).condition.$(.flag-no) += $(condition) ;
+
.flag-no = [ numbers.increment $(.flag-no) ] ;
}
-
+
# Returns the first element of 'property-sets' which is a subset of
# 'properties', or an empty list if no such element exists.
+#
rule find-property-subset ( property-sets * : properties * )
{
- # cut property values off
+ # Cut property values off.
local prop-keys = $(properties:G) ;
local result ;
@@ -195,20 +191,20 @@
{
if ! $(result)
{
- # Handle value-less properties like '<architecture>' (compare with
+ # Handle value-less properties like '<architecture>' (compare with
# '<architecture>x86').
local set = [ feature.split $(s) ] ;
# Find the set of features that
- # - have no property specified in required property set
- # - are omitted in build property set
+ # - have no property specified in required property set
+ # - are omitted in the build property set.
local default-props ;
for local i in $(set)
{
- # If $(i) is a value-less property it should match default
- # value of an optional property. See the first line in the
- # example below:
+ # If $(i) is a value-less property it should match default value
+ # of an optional property. See the first line in the example
+ # below:
#
# property set properties result
# <a> <b>foo <b>foo match
@@ -230,6 +226,7 @@
return $(result) ;
}
+
rule handle-flag-value ( value * : properties * )
{
local result ;
@@ -241,98 +238,96 @@
local att = [ feature.attributes $(p:G) ] ;
if dependency in $(att)
{
- # the value of a dependency feature is a target
- # and must be actualized
+ # The value of a dependency feature is a target and needs to be
+ # actualized.
result += [ $(p:G=).actualize ] ;
- }
+ }
else if path in $(att) || free in $(att)
{
local values ;
- # Treat features with && in the value
- # specially -- each &&-separated element is considered
- # separate value. This is needed to handle searched
- # libraries, which must be in specific order.
+ # Treat features with && in the value specially -- each
+ # &&-separated element is considered a separate value. This is
+ # needed to handle searched libraries, which must be in a
+ # specific order.
if ! [ MATCH (&&) : $(p:G=) ]
{
values = $(p:G=) ;
}
- else
+ else
{
values = [ regex.split $(p:G=) "&&" ] ;
}
if path in $(att)
- {
- result += [ sequence.transform path.native : $(values) ] ;
+ {
+ result += [ sequence.transform path.native : $(values) ] ;
}
else
{
result += $(values) ;
- }
- }
- else
+ }
+ }
+ else
{
result += $(p:G=) ;
}
- }
+ }
}
else
{
result += $(value) ;
- }
+ }
return $(result) ;
}
-# Given a rule name and a property set, returns a list of interleaved
-# variables names and values which must be set on targets for that
-# rule/property-set combination.
+
+# Given a rule name and a property set, returns a list of interleaved variables
+# names and values which must be set on targets for that rule/property-set
+# combination.
+#
rule set-target-variables-aux ( rule-or-module : property-set )
{
local result ;
properties = [ $(property-set).raw ] ;
- for local f in $(.$(rule-or-module).flags)
+ for local f in $(.$(rule-or-module).flags)
{
local variable = $(.$(rule-or-module).variable.$(f)) ;
local condition = $(.$(rule-or-module).condition.$(f)) ;
local values = $(.$(rule-or-module).values.$(f)) ;
-
-
+
if ! $(condition) ||
- [ find-property-subset $(condition) : $(properties) ]
+ [ find-property-subset $(condition) : $(properties) ]
{
local processed ;
for local v in $(values)
- {
- # The value might be <feature-name> so needs special
- # treatment.
- processed += [
- handle-flag-value $(v) : $(properties) ] ;
+ {
+ # The value might be <feature-name> so needs special treatment.
+ processed += [ handle-flag-value $(v) : $(properties) ] ;
}
for local r in $(processed)
- {
+ {
result += $(variable) $(r) ;
- }
+ }
}
}
-
- # strip away last dot separated part and recurse.
+
+ # Strip away last dot separated part and recurse.
local next = [ MATCH ^(.+)\\.([^\\.])* : $(rule-or-module) ] ;
if $(next)
{
- result += [
- set-target-variables-aux $(next[1]) : $(property-set) ] ;
- }
- return $(result) ;
+ result += [ set-target-variables-aux $(next[1]) : $(property-set) ] ;
+ }
+ return $(result) ;
}
rule set-target-variables ( rule-or-module targets + : property-set )
-{
+{
properties = [ $(property-set).raw ] ;
local key = $(rule-or-module).$(property-set) ;
local settings = $(.stv.$(key)) ;
if ! $(settings)
{
- settings = [
+ settings = [
set-target-variables-aux $(rule-or-module) : $(property-set) ] ;
if ! $(settings)
@@ -341,7 +336,7 @@
}
.stv.$(key) = $(settings) ;
}
-
+
if $(settings) != none
{
local var-name = ;
@@ -360,23 +355,25 @@
}
}
-# Make toolset 'toolset', defined in a module of the same name,
-# inherit from 'base'
-# 1. The 'init' rule from 'base' is imported into 'toolset' with full
-# name. Another 'init' is called, which forwards to the base one.
-# 2. All generators from 'base' are cloned. The ids are adjusted and
-# <toolset> property in requires is adjusted too
-# 3. All flags are inherited
+
+# Make toolset 'toolset', defined in a module of the same name, inherit from
+# 'base'.
+# 1. The 'init' rule from 'base' is imported into 'toolset' with full name.
+# Another 'init' is called, which forwards to the base one.
+# 2. All generators from 'base' are cloned. The ids are adjusted and <toolset>
+# property in requires is adjusted too.
+# 3. All flags are inherited.
# 4. All rules are imported.
+#
rule inherit ( toolset : base )
{
import $(base) ;
-
inherit-generators $(toolset) : $(base) ;
- inherit-flags $(toolset) : $(base) ;
- inherit-rules $(toolset) : $(base) ;
+ inherit-flags $(toolset) : $(base) ;
+ inherit-rules $(toolset) : $(base) ;
}
+
rule inherit-generators ( toolset properties * : base : generators-to-ignore * )
{
properties ?= <toolset>$(toolset) ;
@@ -384,15 +381,15 @@
for local g in $(base-generators)
{
local id = [ $(g).id ] ;
-
+
if ! $(id) in $(generators-to-ignore)
- {
+ {
# Some generator names have multiple periods in their name, so
- # $(id:B=$(toolset)) doesn't generate the right new-id name.
- # e.g. if id = gcc.compile.c++, $(id:B=darwin) = darwin.c++,
- # which is not what we want. Manually parse the base and suffix
- # (if there's a better way to do this, I'd love to see it.)
- # See also register in module generators.
+ # $(id:B=$(toolset)) doesn't generate the right new-id name. E.g. if
+ # id = gcc.compile.c++, $(id:B=darwin) = darwin.c++, which is not
+ # what we want. Manually parse the base and suffix (if there's a
+ # better way to do this, I'd love to see it). See also register in
+ # module generators.
local base = $(id) ;
local suffix = "" ;
while $(base:S)
@@ -403,27 +400,27 @@
local new-id = $(toolset)$(suffix) ;
generators.register [ $(g).clone $(new-id) : $(properties) ] ;
- }
- }
+ }
+ }
}
-# Brings all flag definitions from 'base' toolset into
-# other toolset 'toolset'. Flag definitions which
-# condition make use of properties in 'prohibited-properties'
-# are ignored. Don't confuse property and feature, for
-# example <debug-symbols>on and <debug-symbols>off, so blocking
-# one of them does not block the other one.
-#
-# The flag conditions are not altered at all, so if condition
-# includes name, or version of base toolset, it won't ever match
-# the inheriting toolset. When such flag settings must be
-# inherited, define a rule in base toolset module and call it
-# as needed.
+
+# Brings all flag definitions from the 'base' toolset into the 'toolset'
+# toolset. Flag definitions whose conditions make use of properties in
+# 'prohibited-properties' are ignored. Don't confuse property and feature, for
+# example <debug-symbols>on and <debug-symbols>off, so blocking one of them does
+# not block the other one.
+#
+# The flag conditions are not altered at all, so if a condition includes a name,
+# or version of a base toolset, it won't ever match the inheriting toolset. When
+# such flag settings must be inherited, define a rule in base toolset module and
+# call it as needed.
+#
rule inherit-flags ( toolset : base : prohibited-properties * )
{
for local f in $(.module-flags.$(base))
- {
- local rule-or-module = $(.rule-or-module.$(f)) ;
+ {
+ local rule-or-module = $(.rule-or-module.$(f)) ;
if [ set.difference
$(.$(rule-or-module).condition.$(f)) :
$(prohibited-properties)
@@ -439,45 +436,47 @@
{
new-rule-or-module = $(toolset) ;
}
-
+
add-flag
- $(new-rule-or-module)
- : $(.$(rule-or-module).variable.$(f))
- : $(.$(rule-or-module).condition.$(f))
- : $(.$(rule-or-module).values.$(f))
- ;
+ $(new-rule-or-module)
+ : $(.$(rule-or-module).variable.$(f))
+ : $(.$(rule-or-module).condition.$(f))
+ : $(.$(rule-or-module).values.$(f)) ;
}
- }
+ }
}
+
rule inherit-rules ( toolset : base )
{
- # It appears that "action" creates local rule...
+ # It appears that "action" creates a local rule...
local base-generators = [ generators.generators-for-toolset $(base) ] ;
local rules ;
for local g in $(base-generators)
{
local id = [ MATCH "[^.]*\.(.*)" : [ $(g).id ] ] ;
rules += $(id) ;
- }
+ }
IMPORT $(base) : $(rules) : $(toolset) : $(rules) ;
# Import the rules to the global scope
IMPORT $(toolset) : $(rules) : : $(toolset).$(rules) ;
}
-# Return the list of global 'toolset requirements'.
-# Those requirements will be automatically added to
-# the requirements of any main target.
+
+# Return the list of global 'toolset requirements'. Those requirements will be
+# automatically added to the requirements of any main target.
+#
rule requirements ( )
{
return $(.requirements) ;
}
-# Adds elements to the list of global 'toolset requirements'.
-# The requirements will be automatically added to the requirements
-# for all main targets, as if they were specified literally.
-# For best results, all requirements added should be conditional or
-# indirect conditional.
+
+# Adds elements to the list of global 'toolset requirements'. The requirements
+# will be automatically added to the requirements for all main targets, as if
+# they were specified literally. For best results, all requirements added should
+# be conditional or indirect conditional.
+#
rule add-requirements ( requirements * )
{
if ! $(.ignore-requirements)
@@ -486,6 +485,7 @@
}
}
+
local rule __test__ ( )
{
import assert ;
@@ -494,8 +494,8 @@
assert.result : find-property-subset <a>0/<b>0/<c>9/<d>9/<e>5 <a>9 : $(p) ;
local p-set = <a>/<b> <a>0/<b> <a>/<b>1 <a>0/<b>1 ;
- assert.result <a>/<b> : find-property-subset $(p-set) : ;
+ assert.result <a>/<b> : find-property-subset $(p-set) : ;
assert.result <a>0/<b> : find-property-subset $(p-set) : <a>0 <c>2 ;
assert.result <a>/<b>1 : find-property-subset $(p-set) : <b>1 <c>2 ;
- assert.result <a>0/<b>1 : find-property-subset $(p-set) : <a>0 <b>1 ;
+ assert.result <a>0/<b>1 : find-property-subset $(p-set) : <a>0 <b>1 ;
}
Modified: branches/release/tools/build/v2/build/type.jam
==============================================================================
--- branches/release/tools/build/v2/build/type.jam (original)
+++ branches/release/tools/build/v2/build/type.jam 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -1,47 +1,45 @@
-# Copyright 2002, 2003 Dave Abrahams
-# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+# Copyright 2002, 2003 Dave Abrahams
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# Deals with target type declaration and defines target class which supports
# typed targets.
-import feature ;
-import generators : * ;
import "class" : new ;
import errors ;
+import feature ;
+import generators : * ;
+import project ;
import property ;
import scanner ;
-import project ;
-# This creates a circular dependency
-# project-test1 -> project -> project-root -> builtin -> type -> targets -> project
-# import targets ;
-
-# The feature is optional so that it never implicitly added.
-# It's used only for internal purposes, and in all cases we
-# want to explicitly use it.
+# The follwing import would create a circular dependency:
+# project -> project-root -> builtin -> type -> targets -> project
+# import targets ;
+
+# The feature is optional so it would never get added implicitly. It's used only
+# for internal purposes and in all cases we want to use it explicitly.
feature.feature target-type : : composite optional ;
-# feature.feature base-target-type : : composite optional ;
feature.feature main-target-type : : optional incidental ;
feature.feature base-target-type : : composite optional free ;
-# feature.feature main-target-type : : composite optional incidental ;
-# Registers a target type, possible derived from a 'base-type'.
-# If 'suffixes' are provided, they given all the suffixes that mean a file is of 'type'.
-# Also, the first element gives the suffix to be used when constructing and object of
-# 'type'.
+
+# Registers a target type, possible derived from a 'base-type'. Providing a list
+# of 'suffixes' here is a shortcut for separately calling the register-suffixes
+# rule with the given suffixes and the set-generated-target-suffix rule with the
+# first given suffix.
rule register ( type : suffixes * : base-type ? )
{
- # Type names cannot contain hyphens, because when used as
- # feature-values they will be interpreted as composite features
- # which need to be decomposed.
+ # Type names cannot contain hyphens, because when used as feature-values
+ # they would be interpreted as composite features which need to be
+ # decomposed.
switch $(type)
{
case *-* : errors.error "type name \"$(type)\" contains a hyphen" ;
}
-
+
if $(type) in $(.types)
{
errors.error "Type $(type) is already registered." ;
@@ -50,73 +48,72 @@
{
.types += $(type) ;
.bases.$(type) = $(base-type) ;
- .derived.$(base-type) += $(type) ;
+ .derived.$(base-type) += $(type) ;
- if $(suffixes)-not-empty
- {
- # Generated targets of 'type' will use the first of 'suffixes'
- # (this may be overriden)
- $(.suffixes).insert <target-type>$(type) : $(suffixes[1]) ;
- # Specify mapping from suffixes to type
+ if $(suffixes)-is-not-empty
+ {
+ # Specify mapping from suffixes to type.
register-suffixes $(suffixes) : $(type) ;
+ # Generated targets of 'type' will use the first of 'suffixes'. This
+ # may be overriden.
+ set-generated-target-suffix $(type) : : $(suffixes[1]) ;
}
-
- feature.extend target-type : $(type) ;
+
+ feature.extend target-type : $(type) ;
feature.extend main-target-type : $(type) ;
-
- feature.compose <target-type>$(type) : $(base-type:G=<base-target-type>) ;
feature.extend base-target-type : $(type) ;
-# feature.compose <target-type>$(type) : <base-target-type>$(type) ;
+
+ feature.compose <target-type>$(type) : $(base-type:G=<base-target-type>) ;
feature.compose <base-target-type>$(type) : <base-target-type>$(base-type) ;
- # We used to declare main target rule only when 'main' parameter
- # is specified. However, it's hard to decide that a type *never*
- # will need a main target rule and so from time to time we needed
- # to make yet another type 'main'. So, now main target rule is defined
- # for each type.
- main-rule-name = [ type-to-rule-name $(type) ] ;
- .main-target-type.$(main-rule-name) = $(type) ;
-
+ # We used to declare the main target rule only when a 'main' parameter
+ # was specified. However, it's hard to decide that a type will *never*
+ # need a main target rule and so from time to time we needed to make yet
+ # another type 'main'. So now a main target rule is defined for each
+ # type.
+ main-rule-name = [ type-to-rule-name $(type) ] ;
+ .main-target-type.$(main-rule-name) = $(type) ;
+
IMPORT $(__name__) : main-target-rule : : $(main-rule-name) ;
}
}
-# Given type, returns name of main target rule which creates
-# targets of that type.
+
+# Given a type, returns the name of the main target rule which creates targets
+# of that type.
rule type-to-rule-name ( type )
{
- # Lowercase everything. Convert underscores to dashes.ame.
+ # Lowercase everything. Convert underscores to dashes.
import regex ;
local n = [ regex.split $(type:L) "_" ] ;
- n = $(n:J=-) ;
- return $(n) ;
+ return $(n:J=-) ;
}
-# Returns a type, given the name of a main rule.
-rule type-from-rule-name ( main-target-name )
+
+# Given a main target rule name, returns the type for which it creates targets.
+rule type-from-rule-name ( rule-name )
{
- return $(.main-target-type.$(main-target-name)) ;
+ return $(.main-target-type.$(rule-name)) ;
}
-
-# Specifies that targets with suffix from 'suffixes' has the type 'type'.
-# If different type is already specified for any of syffixes,
-# issues an error.
+# Specifies that files with suffix from 'suffixes' be recognized as targets of
+# type 'type'. Issues an error if a different type is already specified for any
+# of the suffixes.
rule register-suffixes ( suffixes + : type )
{
for local s in $(suffixes)
- {
- if ! $(.type.$(s))
+ {
+ if ! $(.type.$(s))
{
- .type.$(s) = $(type) ;
+ .type.$(s) = $(type) ;
}
- else if $(.type.$(s)) != type
+ else if $(.type.$(s)) != type
{
- errors.error Attempting to specify type for suffix \"$(s)\"
- : "Old type $(.type.$(s)), New type $(type)" ;
+ errors.error Attempting to specify multiple types for suffix \"$(s)\"
+ : "Old type $(.type.$(s)), New type $(type)" ;
}
- }
+ }
}
@@ -129,13 +126,14 @@
}
}
+
# Issues an error if 'type' is unknown.
rule validate ( type )
{
if ! $(type) in $(.types)
{
errors.error "Unknown target type $(type)" ;
- }
+ }
}
@@ -145,19 +143,23 @@
if ! $(type) in $(.types)
{
error "Type" $(type) "is not declared" ;
- }
+ }
.scanner.$(type) = $(scanner) ;
}
+
# Returns a scanner instance appropriate to 'type' and 'properties'.
rule get-scanner ( type : property-set )
{
- if $(.scanner.$(type)) {
+ if $(.scanner.$(type))
+ {
return [ scanner.get $(.scanner.$(type)) : $(property-set) ] ;
- }
+ }
}
-# returns type and all of its bases in order of their distance from type.
+
+# Returns the given type and all of its base types in order of their distance
+# from type.
rule all-bases ( type )
{
local result = $(type) ;
@@ -169,6 +171,7 @@
return $(result) ;
}
+
rule all-derived ( type )
{
local result = $(type) ;
@@ -176,12 +179,11 @@
{
result += [ all-derived $(d) ] ;
}
- return $(result) ;
+ return $(result) ;
}
-# Returns true if 'type' has 'base' as its direct or
-# indirect base.
+# Returns true if 'type' has 'base' as its direct or indirect base.
rule is-derived ( type base )
{
if $(base) in [ all-bases $(type) ]
@@ -190,8 +192,8 @@
}
}
-# Returns true if 'type' is either derived from 'base',
-# or 'type' is equal to 'base'.
+
+# Returns true if 'type' is either derived from or is equal to 'base'.
rule is-subtype ( type base )
{
if $(type) = $(base)
@@ -201,110 +203,119 @@
else
{
return [ is-derived $(type) $(base) ] ;
- }
+ }
}
-# Store suffixes for generated targets
+# Store suffixes for generated targets.
.suffixes = [ new property-map ] ;
-# Store prefixes for generated targets (e.g. "lib" for library)
+# Store prefixes for generated targets (e.g. "lib" for library).
.prefixes = [ new property-map ] ;
-# Sets a target suffix that should be used when generating target
-# of 'type' with the specified properties. Can be called with
-# empty properties if no suffix for 'type' was specified yet.
-# This does not automatically specify that files 'suffix' have
-# 'type' --- two different types can use the same suffix for
-# generating, but only one type should be auto-detected for
-# a file with that suffix. User should explicitly specify which
-# one.
+# Sets a file suffix to be used when generating a target of 'type' with the
+# specified properties. Can be called with no properties if no suffix has
+# already been specified for the 'type'. The 'suffix' parameter can be an empty
+# string ("") to indicate that no suffix should be used.
+#
+# Note that this does not cause files with 'suffix' to be automatically
+# recognized as being of 'type'. Two different types can use the same suffix for
+# their generated files but only one type can be auto-detected for a file with
+# that suffix. User should explicitly specify which one using the
+# register-suffixes rule.
#
-# The 'suffix' parameter can be empty string ("") to indicate that
-# no suffix should be used.
rule set-generated-target-suffix ( type : properties * : suffix )
{
- set-generated-target-ps suffix : $(type) : $(properties) : $(suffix) ;
-}
+ set-generated-target-ps suffix : $(type) : $(properties) : $(suffix) ;
+}
+
-# Change the suffix previously registered for this type/properties
-# combination. If suffix is not yet specified, sets it.
+# Change the suffix previously registered for this type/properties combination.
+# If suffix is not yet specified, sets it.
rule change-generated-target-suffix ( type : properties * : suffix )
-{
- change-generated-target-ps suffix : $(type) : $(properties) : $(suffix) ;
+{
+ change-generated-target-ps suffix : $(type) : $(properties) : $(suffix) ;
}
+
+# Returns the suffix used when generating a file of 'type' with the given
+# properties.
rule generated-target-suffix ( type : property-set )
{
- return [ generated-target-ps suffix : $(type) : $(property-set) ] ;
+ return [ generated-target-ps suffix : $(type) : $(property-set) ] ;
}
-# Sets a target prefix that should be used when generating target
-# of 'type' with the specified properties. Can be called with
-# empty properties if no prefix for 'type' was specified yet.
+
+# Sets a target prefix that should be used when generating targets of 'type'
+# with the specified properties. Can be called with empty properties if no
+# prefix for 'type' has been specified yet.
#
-# The 'prefix' parameter can be empty string ("") to indicate that
-# no prefix should be used.
+# The 'prefix' parameter can be empty string ("") to indicate that no prefix
+# should be used.
#
-# Example usage is for library names that have to have a "lib"
-# prefix as in unix.
+# Usage example: library names use the "lib" prefix on unix.
rule set-generated-target-prefix ( type : properties * : prefix )
{
- set-generated-target-ps prefix : $(type) : $(properties) : $(prefix) ;
-}
+ set-generated-target-ps prefix : $(type) : $(properties) : $(prefix) ;
+}
-# Change the prefix previously registered for this type/properties
-# combination. If prefix is not yet specified, sets it.
+
+# Change the prefix previously registered for this type/properties combination.
+# If prefix is not yet specified, sets it.
rule change-generated-target-prefix ( type : properties * : prefix )
-{
- change-generated-target-ps prefix : $(type) : $(properties) : $(prefix) ;
+{
+ change-generated-target-ps prefix : $(type) : $(properties) : $(prefix) ;
}
+
rule generated-target-prefix ( type : property-set )
{
- return [ generated-target-ps prefix : $(type) : $(property-set) ] ;
+ return [ generated-target-ps prefix : $(type) : $(property-set) ] ;
}
-# Common rules for prefix/suffix provisioning follow
-rule set-generated-target-ps ( ps : type : properties * : psval )
+# Common rules for prefix/suffix provisioning follow.
+
+local rule set-generated-target-ps ( ps : type : properties * : psval )
{
properties = <target-type>$(type) $(properties) ;
$(.$(ps)es).insert $(properties) : $(psval) ;
-}
+}
-rule change-generated-target-ps ( ps : type : properties * : psval )
-{
- properties = <target-type>$(type) $(properties) ;
+
+local rule change-generated-target-ps ( ps : type : properties * : psval )
+{
+ properties = <target-type>$(type) $(properties) ;
local prev = [ $(.$(ps)es).find-replace $(properties) : $(psval) ] ;
if ! $(prev)
{
set-generated-target-ps $(ps) : $(type) : $(properties) : $(psval) ;
- }
+ }
}
-# Returns either prefix or suffix (as indicated by 'ps') that
-# should be used when generating target of 'type' with the specified properties.
-# Parameter 'ps' can be either "prefix" or "suffix". If no prefix/suffix is
-# specified for 'type', returns prefix/suffix for base type, if any.
-rule generated-target-ps-real ( ps : type : properties * )
+
+# Returns either prefix or suffix (as indicated by 'ps') that should be used
+# when generating a target of 'type' with the specified properties. Parameter
+# 'ps' can be either "prefix" or "suffix". If no prefix/suffix is specified for
+# 'type', returns prefix/suffix for base type, if any.
+local rule generated-target-ps-real ( ps : type : properties * )
{
local result ;
local found ;
while $(type) && ! $(found)
{
result = [ $(.$(ps)es).find <target-type>$(type) $(properties) ] ;
- # If the prefix/suffix is explicitly set to empty string,
- # we consider prefix/suffix to be found. If we did not compare with "",
- # there would be no way for user to set empty prefix/suffix.
+ # If the prefix/suffix is explicitly set to an empty string, we consider
+ # prefix/suffix to be found. If we were not to compare with "", there
+ # would be no way to specify an empty prefix/suffix.
if $(result)-is-not-empty
{
found = true ;
}
type = $(.bases.$(type)) ;
}
- if $(result) = ""
+ if $(result) = ""
{
result = ;
}
@@ -312,35 +323,35 @@
}
-rule generated-target-ps ( ps : type : property-set )
+local rule generated-target-ps ( ps : type : property-set )
{
local key = .$(ps).$(type).$(property-set) ;
local v = $($(key)) ;
if ! $(v)
{
v = [ generated-target-ps-real $(ps) : $(type)
- : [ $(property-set).raw ] ] ;
+ : [ $(property-set).raw ] ] ;
if ! $(v)
{
v = none ;
}
$(key) = $(v) ;
}
-
+
if $(v) != none
{
return $(v) ;
- }
+ }
}
# Returns file type given it's name. If there are several dots in filename,
-# tries each suffix. E.g. for name of "file.so.1.2" suffixes "2", "1", and
-# "so" will be tried.
-rule type ( filename )
+# tries each suffix. E.g. for name of "file.so.1.2" suffixes "2", "1", and "so"
+# will be tried.
+rule type ( filename )
{
local type ;
- while ! $(type) && $(filename:S)
+ while ! $(type) && $(filename:S)
{
local suffix = $(filename:S) ;
type = $(.type$(suffix)) ;
@@ -350,21 +361,23 @@
}
-
-rule main-target-rule ( name : sources * : requirements * : default-build *
- : usage-requirements * )
+# Rule used to construct all main targets. Note that this rule gets imported
+# into the global namespace under different alias names and exactly what type of
+# target it is supposed to construct is read from the name of the alias rule
+# actually used to invoke it.
+rule main-target-rule ( name : sources * : requirements * : default-build *
+ : usage-requirements * )
{
- # First find required target type, which is equal to the name used
- # to invoke us.
+ # First discover the required target type, which is equal to the rule name
+ # used to invoke us.
local bt = [ BACKTRACE 1 ] ;
local rulename = $(bt[4]) ;
-
+
local project = [ project.current ] ;
-
- # This is a circular module dependency, so it must be imported here
+
+ # This is a circular module dependency so it must be imported here.
import targets ;
- return [ targets.create-typed-target $(.main-target-type.$(rulename)) : $(project)
- : $(name) : $(sources) : $(requirements)
- : $(default-build) : $(usage-requirements) ] ;
+ return [ targets.create-typed-target $(.main-target-type.$(rulename))
+ : $(project) : $(name) : $(sources) : $(requirements)
+ : $(default-build) : $(usage-requirements) ] ;
}
-
Modified: branches/release/tools/build/v2/build/version.jam
==============================================================================
--- branches/release/tools/build/v2/build/version.jam (original)
+++ branches/release/tools/build/v2/build/version.jam 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -1,11 +1,13 @@
-# Copyright 2002, 2003, 2004, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+# Copyright 2002, 2003, 2004, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
rule boost-build ( )
{
return "V2 (Milestone 12)" ;
}
+
+
rule jam ( )
{
local v = [ modules.peek : JAM_VERSION ] ;
Modified: branches/release/tools/build/v2/build/virtual-target.jam
==============================================================================
--- branches/release/tools/build/v2/build/virtual-target.jam (original)
+++ branches/release/tools/build/v2/build/virtual-target.jam 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -1,16 +1,21 @@
-# Copyright 2003 Dave Abrahams
-# Copyright 2005, 2006 Rene Rivera
-# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+# Copyright 2003 Dave Abrahams
+# Copyright 2005, 2006 Rene Rivera
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# Implements virtual targets, which correspond to actual files created during
-# build, but are not yet targets in Jam sense. They are needed, for example,
-# when searching for possible transormation sequences, when it's not known
-# if particular target should be created at all.
+# a build, but are not yet targets in Jam sense. They are needed, for example,
+# when searching for possible transormation sequences, when it's not yet known
+# whether a particular target should be created at all.
import "class" : new ;
-import path property-set utility sequence errors set type os ;
+import errors ;
+import path ;
+import sequence ;
+import set ;
+import type ;
+import utility ;
# +--------------------------+
# | virtual-target |
@@ -56,39 +61,41 @@
# +---------------------+ | actualize-sources() |
# +---------------------+
#
-# The 'compile-action' and 'link-action' classes are defined not here,
-# but in builtin.jam modules. They are shown in the diagram to give
-# the big picture.
-
-# Potential target. It can be converted into jam target and used in
-# building, if needed. However, it can be also dropped, which allows
-# to search for different transformation and select only one.
+# The 'compile-action' and 'link-action' classes are not defined here but in
+# builtin.jam modules. They are shown in the diagram to give the big picture.
+
+# Models a potential target. It can be converted into a Jam target and used in
+# building, if needed. However, it can be also dropped, which allows us to
+# search for different transformations and select only one.
#
-class virtual-target
+class virtual-target
{
- import virtual-target utility scanner ;
-
- rule __init__ ( name # Name of this target -- specifies the name of
- : project # Project to which this target belongs
+ import scanner ;
+ import sequence ;
+ import utility ;
+ import virtual-target ;
+
+ rule __init__ (
+ name # Target/project name.
+ : project # Project to which this target belongs.
)
- {
+ {
self.name = $(name) ;
self.project = $(project) ;
self.dependencies = ;
}
-
+
# Name of this target.
rule name ( ) { return $(self.name) ; }
# Project of this target.
rule project ( ) { return $(self.project) ; }
- # Adds additional instances of 'virtual-target' that this
- # one depends on.
+ # Adds additional 'virtual-target' instances this one depends on.
rule depends ( d + )
{
self.dependencies = [ sequence.merge $(self.dependencies)
- : [ sequence.insertion-sort $(d) ] ] ;
+ : [ sequence.insertion-sort $(d) ] ] ;
}
rule dependencies ( )
@@ -96,16 +103,16 @@
return $(self.dependencies) ;
}
- # Generates all the actual targets and sets up build actions for
- # this target.
+ # Generates all the actual targets and sets up build actions for this
+ # target.
#
- # If 'scanner' is specified, creates an additional target
- # with the same location as actual target, which will depend on the
- # actual target and be associated with 'scanner'. That additional
- # target is returned. See the docs (#dependency_scanning) for rationale.
- # Target must correspond to a file if 'scanner' is specified.
+ # If 'scanner' is specified, creates an additional target with the same
+ # location as the actual target, which will depend on the actual target and
+ # be associated with a 'scanner'. That additional target is returned. See
+ # the docs (#dependency_scanning) for rationale. Target must correspond to a
+ # file if 'scanner' is specified.
#
- # If scanner is not specified, then actual target is returned.
+ # If scanner is not specified then the actual target is returned.
rule actualize ( scanner ? )
{
local actual-name = [ actualize-no-scanner ] ;
@@ -121,7 +128,8 @@
[ utility.ungrist $(actual-name:G) ] $(scanner) : - ] ;
local name = $(actual-name:G=$(g)) ;
- if ! $(self.made.$(name)) {
+ if ! $(self.made.$(name))
+ {
self.made.$(name) = true ;
DEPENDS $(name) : $(actual-name) ;
@@ -132,13 +140,12 @@
}
return $(name) ;
}
-
}
# private: (overridables)
- # Sets up build actions for 'target'. Should call appropriate rules
- # and set target variables.
+ # Sets up build actions for 'target'. Should call appropriate rules and set
+ # target variables.
rule actualize-action ( target )
{
errors.error "method should be defined in derived classes" ;
@@ -149,16 +156,16 @@
{
errors.error "method should be defined in derived classes" ;
}
-
- # If the target is generated one, returns the path where it will be
- # generated. Otherwise, returns empty list.
+
+ # If the target is a generated one, returns the path where it will be
+ # generated. Otherwise, returns an empty list.
rule path ( )
{
- errors.error "method should be defined in derived classes" ;
+ errors.error "method should be defined in derived classes" ;
}
-
- # Return that actual target name that should be used
- # (for the case where no scanner is involved)
+
+ # Returns the actual target name to be used in case when no scanner is
+ # involved.
rule actual-name ( )
{
errors.error "method should be defined in derived classes" ;
@@ -167,56 +174,58 @@
# implementation
rule actualize-no-scanner ( )
{
- # In fact, we just need to merge virtual-target with
- # abstract-virtual-target and the latter is the only class
- # derived from the former. But that's for later.
- errors.error "method should be defined in derived classes" ;
- }
+ # In fact, we just need to merge virtual-target with
+ # abstract-virtual-target as the latter is the only class derived from
+ # the former. But that's for later.
+ errors.error "method should be defined in derived classes" ;
+ }
}
-# Target which correspond to a file. The exact mapping for file
-# is not yet specified in this class. (TODO: Actually, the class name
-# could be better...)
+# Target corresponding to a file. The exact mapping for file is not yet
+# specified in this class. (TODO: Actually, the class name could be better...)
#
-# May be a source file (when no action is specified), or
-# derived file (otherwise).
+# May be a source file (when no action is specified) or a derived file
+# (otherwise).
#
-# The target's grist is concatenation of project's location,
-# properties of action (for derived files), and, optionally,
-# value identifying the main target.
+# The target's grist is concatenation of its project's location, properties of
+# action (for derived files) and, optionally, value identifying the main target.
class abstract-file-target : virtual-target
{
- import project regex sequence path type ;
+ import project ;
+ import regex ;
+ import sequence ;
+ import path ;
+ import type ;
import property-set ;
import indirect ;
-
- rule __init__ (
- name # Name for this target
- exact ? # If non-empty, the name is exactly the name
- # created file should have. Otherwise, the '__init__'
- # method will add suffix obtained from 'type' by
- # calling 'type.generated-target-suffix'.
- : type ? # The type of this target.
+
+ rule __init__ (
+ name # Target's name.
+ exact ? # If non-empty, the name is exactly the name created file
+ # should have. Otherwise, the '__init__' method will add a
+ # suffix obtained from 'type' by calling
+ # 'type.generated-target-suffix'.
+ : type ? # Target's type.
: project
: action ?
)
{
virtual-target.__init__ $(name) : $(project) ;
-
+
self.type = $(type) ;
self.action = $(action) ;
if $(action)
- {
+ {
$(action).add-targets $(__name__) ;
- if $(self.type) && ! $(exact)
- {
+ if $(self.type) && ! $(exact)
+ {
_adjust-name $(name) ;
- }
- }
+ }
+ }
}
-
+
rule type ( ) { return $(self.type) ; }
# Sets the path. When generating target name, it will override any path
@@ -226,15 +235,14 @@
self.path = [ path.native $(path) ] ;
}
- # If 'a' is supplied, sets action to 'a'.
- # Returns the action currently set.
+ # Returns the currently set action.
rule action ( )
{
return $(self.action) ;
}
- # Sets/gets the 'root' flag. Target is root if it directly correspods to some
- # variant of a main target.
+ # Sets/gets the 'root' flag. Target is root if it directly corresponds to
+ # some variant of a main target.
rule root ( set ? )
{
if $(set)
@@ -243,26 +251,19 @@
}
return $(self.root) ;
}
-
- # Gets or sets the subvariant which created this target. Subvariant
- # is set when target is brought into existance, and is never changed
- # after that. In particual, if target is shared by subvariant, only
- # the first is stored.
- rule creating-subvariant ( s ? # If specified, specified the value to set,
- # which should be instance of 'subvariant'
- # class.
+
+ # Gets or sets the subvariant which created this target. Subvariant is set
+ # when target is brought into existance and is never changed after that. In
+ # particular, if a target is shared by a subvariant, only the first is
+ # stored.
+ rule creating-subvariant ( s ? # If specified, specifies the value to set,
+ # which should be a 'subvariant' class
+ # instance.
)
{
- if $(s) && ( ! $(self.creating-subvariant) && ! $(overwrite) )
+ if $(s) && ! $(self.creating-subvariant) && ! $(overwrite)
{
- if $(self.creating-subvariant)
- {
- errors.error "Attempt to change 'dg'" ;
- }
- else
- {
- self.creating-subvariant = $(s) ;
- }
+ self.creating-subvariant = $(s) ;
}
return $(self.creating-subvariant) ;
}
@@ -274,10 +275,9 @@
$(self.action).actualize ;
}
}
-
- # Return a human-readable representation of this target
- #
- # If this target has an action, that's:
+
+ # Return a human-readable representation of this target. If this target has
+ # an action, that's:
#
# { <action-name>-<self.name>.<self.type> <action-sources>... }
#
@@ -288,21 +288,19 @@
rule str ( )
{
local action = [ action ] ;
-
- local name-dot-type = [ sequence.join $(self.name) "." $(self.type) ] ;
-
+ local name-dot-type = [ sequence.join $(self.name) "." $(self.type) ] ;
+
if $(action)
{
- local sources = [ $(action).sources ] ;
-
- local action-name = [ $(action).action-name ] ;
+ local sources = [ $(action).sources ] ;
+ local action-name = [ $(action).action-name ] ;
- local ss ;
+ local ss ;
for local s in $(sources)
{
ss += [ $(s).str ] ;
}
-
+
return "{" $(action-name)-$(name-dot-type) $(ss) "}" ;
}
else
@@ -331,70 +329,65 @@
rule actual-name ( )
{
if ! $(self.actual-name)
- {
+ {
local grist = [ grist ] ;
-
local basename = [ path.native $(self.name) ] ;
self.actual-name = <$(grist)>$(basename) ;
-
+
}
return $(self.actual-name) ;
}
-
- # Helper to 'actual-name', above. Compute unique prefix used to distinguish
- # this target from other targets with the same name which create different
- # file.
+
+ # Helper to 'actual-name', above. Computes a unique prefix used to
+ # distinguish this target from other targets with the same name creating
+ # different files.
rule grist ( )
{
# Depending on target, there may be different approaches to generating
- # unique prefixes. We'll generate prefixes in the form
+ # unique prefixes. We'll generate prefixes in the form
# <one letter approach code> <the actual prefix>
local path = [ path ] ;
if $(path)
{
# The target will be generated to a known path. Just use the path
# for identification, since path is as unique as it can get.
- return p$(path) ;
+ return p$(path) ;
}
else
{
- # File is either source, which will be searched for, or is not a file at
- # all. Use the location of project for distinguishing.
+ # File is either source, which will be searched for, or is not a
+ # file at all. Use the location of project for distinguishing.
local project-location = [ $(self.project).get location ] ;
- local location-grist =
- [ sequence.join [ regex.split $(project-location) "/" ] : "!" ] ;
-
+ local location-grist = [ sequence.join [ regex.split
+ $(project-location) "/" ] : "!" ] ;
+
if $(self.action)
{
- local ps = [ $(self.action).properties ] ;
+ local ps = [ $(self.action).properties ] ;
local property-grist = [ $(ps).as-path ] ;
- # 'property-grist' can be empty when 'ps' is an empty
- # property set.
+ # 'property-grist' can be empty when 'ps' is an empty property
+ # set.
if $(property-grist)
- {
+ {
location-grist = $(location-grist)/$(property-grist) ;
- }
- }
-
+ }
+ }
+
return l$(location-grist) ;
- }
- }
+ }
+ }
- # Given the target name specified in constructor, returns the
- # name which should be really used, by looking at the <tag> properties.
- # The tag properties come in two flavour:
- # - <tag>value,
- # - <tag>@rule-name
- # In the first case, value is just added to name
- # In the second case, the specified rule is called with specified name,
- # target type and properties and should return the new name.
- # If not <tag> property is specified, or the rule specified by
- # <tag> returns nothing, returns the result of calling
- # virtual-target.add-suffix
+ # Given the target name specified in constructor, returns the name which
+ # should be really used, by looking at the <tag> properties. Tag properties
+ # need to be specified as <tag>@rule-name. This makes Boost.Build call the
+ # specified rule with the target name, type and properties to get the new
+ # name. If no <tag> property is specified or the rule specified by <tag>
+ # returns nothing, returns the result of calling
+ # virtual-target.add-prefix-and-suffix.
rule _adjust-name ( specified-name )
- {
+ {
local ps ;
- if $(self.action)
+ if $(self.action)
{
ps = [ $(self.action).properties ] ;
}
@@ -402,15 +395,14 @@
{
ps = [ property-set.empty ] ;
}
-
- #~ We add ourselves to the properties so that any tag rule can get
- #~ more direct information about the target than just that available
- #~ through the properties. This is useful in implementing
- #~ name changes based on the sources of the target. For example to
- #~ make unique names of object files based on the source file.
- #~ --grafik
+
+ # We add ourselves to the properties so that any tag rule can get more
+ # direct information about the target than just that available through
+ # the properties. This is useful in implementing name changes based on
+ # the sources of the target. For example to make unique names of object
+ # files based on the source file. --grafik
ps = [ property-set.create [ $(ps).raw ] <target>$(__name__) ] ;
-
+
local tag = [ $(ps).get <tag> ] ;
if $(tag)
@@ -420,46 +412,49 @@
{
if $(tag[2])
{
- errors.error "<tag>@rulename is present but is not the only <tag> feature" ;
+ errors.error "<tag>@rulename is present but is not the only"
+ "<tag> feature" ;
}
-
- self.name = [ indirect.call $(rule-name) $(specified-name) :
- $(self.type) : $(ps) ] ;
+
+ self.name = [ indirect.call $(rule-name) $(specified-name)
+ : $(self.type) : $(ps) ] ;
}
else
{
- errors.error
- "The value of the <tag> feature must be '@rule-nane'" ;
+ errors.error
+ "The value of the <tag> feature must be '@rule-name'" ;
}
}
-
+
# If there's no tag or the tag rule returned nothing.
if ! $(tag) || ! $(self.name)
- {
- self.name = [ virtual-target.add-prefix-and-suffix
- $(specified-name) : $(self.type) : $(ps) ] ;
- }
+ {
+ self.name = [ virtual-target.add-prefix-and-suffix $(specified-name)
+ : $(self.type) : $(ps) ] ;
+ }
}
rule actualize-no-scanner ( )
{
local name = [ actual-name ] ;
- # Do anything only on the first invocation
- if ! $(self.made.$(name)) {
+ # Do anything only on the first invocation.
+ if ! $(self.made.$(name))
+ {
self.made.$(name) = true ;
-
+
if $(self.action)
- {
- # For non-derived target, we don't care if there
- # are several virtual targets that refer to the same name.
- # One case when this is unavoidable is when file name is
- # main.cpp and two targets have types CPP (for compiling)
- # and MOCCABLE_CPP (for convertion to H via Qt tools).
+ {
+ # For non-derived target, we don't care if there are several
+ # virtual targets that refer to the same name. One case when
+ # this is unavoidable is when the file name is main.cpp and two
+ # targets have types CPP (for compiling) and MOCCABLE_CPP (for
+ # conversion to H via Qt tools).
virtual-target.register-actual-name $(name) : $(__name__) ;
}
-
- for local i in $(self.dependencies) {
+
+ for local i in $(self.dependencies)
+ {
DEPENDS $(name) : [ $(i).actualize ] ;
}
@@ -468,18 +463,18 @@
}
return $(name) ;
}
-
}
-# Appends the suffix appropriate to 'type/property-set' combination
-# to the specified name and returns the result.
+
+# Appends the suffix appropriate to 'type/property-set' combination to the
+# specified name and returns the result.
rule add-prefix-and-suffix ( specified-name : type ? : property-set )
{
local suffix = [ type.generated-target-suffix $(type) : $(property-set) ] ;
-
- # Handle suffixes for which no leading dot is desired. Those are
- # specified by enclosing them in <...>. Needed by python so it
- # can create "_d.so" extensions, for example.
+
+ # Handle suffixes for which no leading dot is desired. Those are specified
+ # by enclosing them in <...>. Needed by python so it can create "_d.so"
+ # extensions, for example.
if $(suffix:G)
{
suffix = [ utility.ungrist $(suffix) ] ;
@@ -488,9 +483,9 @@
{
suffix = .$(suffix) ;
}
-
+
local prefix = [ type.generated-target-prefix $(type) : $(property-set) ] ;
-
+
if [ MATCH ^($(prefix)) : $(specified-name) ]
{
prefix = ;
@@ -499,26 +494,26 @@
}
-# File target with explicitly known location.
+# File targets with explicitly known location.
#
# The file path is determined as
-# - value passed to the 'set-path' method, if any
-# - for derived files, project's build dir, joined with components
-# that describe action's properties. If the free properties
-# are not equal to the project's reference properties
-# an element with name of main target is added.
-# - for source files, project's source dir
+# * Value passed to the 'set-path' method, if any.
+# * For derived files, project's build dir, joined with components that
+# describe action properties. If free properties are not equal to the
+# project's reference properties an element with the name of the main
+# target is added.
+# * For source files, project's source dir.
#
-# The file suffix is
-# - the value passed to the 'suffix' method, if any, or
-# - the suffix which correspond to the target's type.
+# The file suffix is determined as:
+# * The value passed to the 'suffix' method, if any.
+# * The suffix corresponding to the target's type.
#
-class file-target : abstract-file-target
+class file-target : abstract-file-target
{
- import common ;
- import errors ;
import "class" : new ;
-
+ import common ;
+ import errors ;
+
rule __init__ (
name exact ?
: type ? # Optional type for this target
@@ -527,149 +522,150 @@
: path ?
)
{
- abstract-file-target.__init__ $(name) $(exact) : $(type) : $(project)
- : $(action) ;
+ abstract-file-target.__init__ $(name) $(exact) : $(type) : $(project)
+ : $(action) ;
self.path = $(path) ;
}
-
+
rule clone-with-different-type ( new-type )
{
return [ new file-target $(self.name) exact : $(new-type)
- : $(self.project) : $(self.action) : $(self.path) ] ;
+ : $(self.project) : $(self.action) : $(self.path) ] ;
}
-
+
rule actualize-location ( target )
{
if $(self.action)
{
# This is a derived file.
local path = [ path ] ;
- LOCATE on $(target) = $(path) ;
+ LOCATE on $(target) = $(path) ;
# Make sure the path exists.
DEPENDS $(target) : $(path) ;
common.MkDir $(path) ;
- # It's possible that the target name includes a directory
- # too, for example when installing headers. Create that
- # directory.
+ # It's possible that the target name includes a directory too, for
+ # example when installing headers. Create that directory.
if $(target:D)
{
local d = $(target:D) ;
d = $(d:R=$(path)) ;
DEPENDS $(target) : $(d) ;
-
common.MkDir $(d) ;
- }
-
- # For real file target, we create a fake target that
- # depends on the real target. This allows to run
+ }
+
+ # For a real file target, we create a fake target depending on the
+ # real target. This allows us to run
#
# bjam hello.o
#
- # without trying to guess the name of the real target.
- # Note the that target has no directory name, and a special
- # grist <e>.
+ # without trying to guess the name of the real target. Note that the
+ # target has no directory name and uses a special <e> grist.
#
- # First, that means that "bjam hello.o" will build all
- # known hello.o targets.
- # Second, the <e> grist makes sure this target won't be confused
- # with other targets, for example, if we have subdir 'test'
- # with target 'test' in it that includes 'test.o' file,
- # then the target for directory will be just 'test' the target
- # for test.o will be <ptest/bin/gcc/debug>test.o and the target
- # we create below will be <e>test.o
+ # First, that means that "bjam hello.o" will build all known hello.o
+ # targets. Second, the <e> grist makes sure this target won't be
+ # confused with other targets, for example, if we have subdir 'test'
+ # with target 'test' in it that includes a 'test.o' file, then the
+ # target for directory will be just 'test' the target for test.o
+ # will be <ptest/bin/gcc/debug>test.o and the target we create below
+ # will be <e>test.o
DEPENDS $(target:G=e) : $(target) ;
}
else
{
SEARCH on $(target) = [ path.native $(self.path) ] ;
- }
+ }
}
-
- # Returns the directory for this target
+
+ # Returns the directory for this target.
rule path ( )
{
if ! $(self.path)
{
if $(self.action)
- {
- local p = [ $(self.action).properties ] ;
+ {
+ local p = [ $(self.action).properties ] ;
local path = [ $(p).target-path ] ;
-
+
if $(path[2]) = true
- {
- # Indicates that the path is relative to
- # build dir.
+ {
+ # Indicates that the path is relative to the build dir.
path = [ path.join [ $(self.project).build-dir ]
- $(path[1]) ] ;
+ $(path[1]) ] ;
}
-
- # Store the computed path, so that it's not recomputed
- # any more
+
self.path = [ path.native $(path) ] ;
- }
+ }
}
return $(self.path) ;
}
-
}
+
class notfile-target : abstract-file-target
{
rule __init__ ( name : project : action ? )
{
abstract-file-target.__init__ $(name) : : $(project) : $(action) ;
}
-
- # Returns nothing, to indicate that target path is not known.
+
+ # Returns nothing to indicate that the target's path is not known.
rule path ( )
{
return ;
}
-
+
rule actualize-location ( target )
{
NOTFILE $(target) ;
ALWAYS $(target) ;
- }
-}
+ }
+}
-# Class which represents an action.
-# Both 'targets' and 'sources' should list instances of 'virtual-target'.
-# Action name should name a rule with this prototype
+
+# Class representing an action. Both 'targets' and 'sources' should list
+# instances of 'virtual-target'. Action name should name a rule with this
+# prototype:
# rule action-name ( targets + : sources * : properties * )
-# Targets and sources are passed as actual jam targets. The rule may
-# not establish dependency relationship, but should do everything else.
-class action
+# Targets and sources are passed as actual Jam targets. The rule may not
+# establish additional dependency relationships.
+
+class action
{
- import type toolset property-set indirect class path assert errors ;
-
+ import "class" ;
+ import errors ;
+ import type ;
+ import toolset ;
+ import property-set ;
+ import indirect ;
+ import path ;
+
rule __init__ ( sources * : action-name + : property-set ? )
- {
+ {
self.sources = $(sources) ;
-
+
self.action-name = [ indirect.make-qualified $(action-name) ] ;
-
- if ! $(property-set)
+
+ if ! $(property-set)
{
property-set = [ property-set.empty ] ;
}
-
+
if ! [ class.is-instance $(property-set) ]
- {
+ {
errors.error "Property set instance required" ;
}
-
+
self.properties = $(property-set) ;
- }
-
+ }
+
rule add-targets ( targets * )
{
self.targets += $(targets) ;
}
-
+
rule targets ( )
{
return $(self.targets) ;
@@ -710,30 +706,29 @@
DEPENDS $(actual-targets) : $(self.actual-sources) $(self.dependency-only-sources) ;
- # Action name can include additional argument to rule, which should not
- # be passed to 'set-target-variables'
+ # Action name can include additional argument to rule, which should
+ # not be passed to 'set-target-variables'
toolset.set-target-variables
- [ indirect.get-rule $(self.action-name[1]) ] $(actual-targets)
+ [ indirect.get-rule $(self.action-name[1]) ] $(actual-targets)
: $(properties) ;
-
+
# Reflect ourselves in a variable for the target. This allows
# looking up additional info for the action given the raw target.
- # For example to debug or output action information from action rules.
+ # For example to debug or output action information from action
+ # rules.
.action on $(actual-targets) = $(__name__) ;
-
- indirect.call $(self.action-name)
- $(actual-targets) : $(self.actual-sources) : [ $(properties).raw ]
- ;
-
- # Since we set up creating action here, we also set up
- # action for cleaning up
+
+ indirect.call $(self.action-name) $(actual-targets)
+ : $(self.actual-sources) : [ $(properties).raw ] ;
+
+ # Since we set up the creating action here, we set up the action for
+ # cleaning up as well.
common.Clean clean-all : $(actual-targets) ;
}
}
- # Helper for 'actualize-sources'.
- # For each passed source, actualizes it with the appropriate scanner.
- # Returns the actualized virtual targets.
+ # Helper for 'actualize-sources'. For each passed source, actualizes it with
+ # the appropriate scanner. Returns the actualized virtual targets.
rule actualize-source-type ( sources * : property-set )
{
local result = ;
@@ -742,118 +737,117 @@
local scanner ;
if [ $(i).type ]
{
- scanner =
- [ type.get-scanner [ $(i).type ] : $(property-set) ] ;
+ scanner = [ type.get-scanner [ $(i).type ] : $(property-set) ] ;
}
result += [ $(i).actualize $(scanner) ] ;
}
-
return $(result) ;
}
-
- # Creates actual jam targets for sources. Initialized two member
- # variables:.
- # 'self.actual-sources' -- sources which are passed to updating action
- # 'self.dependency-only-sources' -- sources which are made dependencies, but
- # are not used otherwise.
+
+ # Creates actual Jam targets for sources. Initializes the following member
+ # variables:
+ # 'self.actual-sources' -- sources passed to the updating action.
+ # 'self.dependency-only-sources' -- sources marked as dependencies, but
+ # are not used otherwise.
#
- # New values will be *appended* to the variables. They may be non-empty,
- # if caller wants it.
+ # New values will be *appended* to the variables. They may be non-empty if
+ # caller wants it.
rule actualize-sources ( sources * : property-set )
{
local dependencies = [ $(self.properties).get <dependency> ] ;
-
- self.dependency-only-sources += [
- actualize-source-type $(dependencies) : $(property-set) ] ;
- self.actual-sources += [
- actualize-source-type $(sources) : $(property-set) ] ;
-
- # This is used to help bjam find dependencies in generated headers
- # in other main targets.
- # Say:
+
+ self.dependency-only-sources +=
+ [ actualize-source-type $(dependencies) : $(property-set) ] ;
+ self.actual-sources +=
+ [ actualize-source-type $(sources) : $(property-set) ] ;
+
+ # This is used to help bjam find dependencies in generated headers and
+ # other main targets, e.g. in:
#
# make a.h : ....... ;
# exe hello : hello.cpp : <implicit-dependency>a.h ;
#
- # However, for bjam to find the dependency the generated target must
- # be actualized (i.e. have the jam target). In the above case,
- # if we're building just hello ("bjam hello"), 'a.h' won't be
- # actualized unless we do it here.
+ # For bjam to find the dependency the generated target must be
+ # actualized (i.e. have its Jam target constructed). In the above case,
+ # if we're building just hello ("bjam hello"), 'a.h' won't be actualized
+ # unless we do it here.
local implicit = [ $(self.properties).get <implicit-dependency> ] ;
for local i in $(implicit)
{
$(i:G=).actualize ;
- }
+ }
}
- # Determined real properties when trying building with 'properties'.
- # This is last chance to fix properties, for example to adjust includes
- # to get generated headers correctly. Default implementation returns
- # its argument.
+ # Determines real properties when trying to build with 'properties'. This is
+ # the last chance to fix properties, for example to adjust includes to get
+ # generated headers correctly. Default implementation simply returns its
+ # argument.
rule adjust-properties ( property-set )
{
return $(property-set) ;
}
}
-# Action class which does nothing --- it produces the targets with
-# specific properties out of nowhere. It's needed to distinguish virtual
-# targets with different properties that are known to exist, and have no
-# actions which create them.
-class null-action : action
+
+# Action class which does nothing --- it produces the targets with specific
+# properties out of nowhere. It's needed to distinguish virtual targets with
+# different properties that are known to exist and have no actions which create
+# them.
+class null-action : action
{
rule __init__ ( property-set ? )
{
- action.__init__ : .no-action : $(property-set) ;
+ action.__init__ : .no-action : $(property-set) ;
}
-
+
rule actualize ( )
{
if ! $(self.actualized)
{
self.actualized = true ;
-
for local i in [ targets ]
{
$(i).actualize ;
}
- }
+ }
}
}
-# Class which acts exactly like 'action', except that the sources
-# are not scanned for dependencies.
-class non-scanning-action : action
+
+# Class which acts exactly like 'action', except that its sources are not
+# scanned for dependencies.
+class non-scanning-action : action
{
rule __init__ ( sources * : action-name + : property-set ? )
- {
+ {
action.__init__ $(sources) : $(action-name) : $(property-set) ;
}
+
rule actualize-source-type ( sources * : property-set )
{
local result ;
for local i in $(sources)
{
result += [ $(i).actualize ] ;
- }
+ }
return $(result) ;
}
}
-# Creates a virtual target with approariate name and type from 'file'.
-# If a target with that name in that project was already created, returns that already
+# Creates a virtual target with an appropriate name and type from 'file'. If a
+# target with that name in that project already exists, returns that already
# created target.
-# FIXME: more correct way would be to compute path to the file, based on name and source location
-# for the project, and use that path to determine if the target was already created.
-# TODO: passing project with all virtual targets starts to be annoying.
+# FIXME: a more correct way would be to compute the path to the file, based on
+# name and source location for the project, and use that path to determine if
+# the target was already created.
+# TODO: passing a project with all virtual targets is starting to be annoying.
rule from-file ( file : file-loc : project )
{
- import type ; # had to do this here to break a circular dependency
+ import type ; # Had to do this here to break a circular dependency.
# Check if we've created a target corresponding to this file.
- local path = [ path.root [ path.root $(file) $(file-loc) ]
- [ path.pwd ] ] ;
+ local path = [ path.root [ path.root $(file) $(file-loc) ] [ path.pwd ] ] ;
if $(.files.$(path))
{
@@ -866,62 +860,64 @@
local result ;
result = [ new file-target $(file)
- : $(type)
- : $(project)
- : #action
- : $(file-loc) ] ;
+ : $(type)
+ : $(project)
+ : #action
+ : $(file-loc) ] ;
.files.$(path) = $(result) ;
return $(result) ;
}
}
-# Registers a new virtual target. Checks if there's already registered target, with the same
-# name, type, project and subvariant properties, and also with the same sources
-# and equal action. If such target is found it is retured and 'target' is not registers.
-# Otherwise, 'target' is registered and returned.
+
+# Registers a new virtual target. Checks if there's already a registered target
+# with the same name, type, project and subvariant properties as well as the
+# same sources and equal action. If such target is found it is returned and a
+# new 'target' is not registered. Otherwise, 'target' is registered and
+# returned.
rule register ( target )
{
local signature = [ sequence.join
- [ $(target).path ] [ $(target).name ] : - ] ;
+ [ $(target).path ] [ $(target).name ] : - ] ;
-
local result ;
for local t in $(.cache.$(signature))
{
local a1 = [ $(t).action ] ;
local a2 = [ $(target).action ] ;
-
+
if ! $(result)
{
if ! $(a1) && ! $(a2)
{
result = $(t) ;
}
- else
+ else
{
- if $(a1) && $(a2) && [ $(a1).action-name ] = [ $(a2).action-name ] &&
- [ $(a1).sources ] = [ $(a2).sources ]
+ if $(a1) && $(a2) &&
+ [ $(a1).action-name ] = [ $(a2).action-name ] &&
+ [ $(a1).sources ] = [ $(a2).sources ]
{
local ps1 = [ $(a1).properties ] ;
local ps2 = [ $(a2).properties ] ;
local p1 = [ $(ps1).base ] [ $(ps1).free ] [ $(ps1).dependency ] ;
local p2 = [ $(ps2).base ] [ $(ps2).free ] [ $(ps2).dependency ] ;
- if $(p1) = $(p2)
- {
+ if $(p1) = $(p2)
+ {
result = $(t) ;
- }
+ }
}
- }
+ }
}
}
-
+
if ! $(result)
{
- .cache.$(signature) += $(target) ;
+ .cache.$(signature) += $(target) ;
result = $(target) ;
}
-
+
.recent-targets += $(result) ;
.all-targets += $(result) ;
@@ -929,28 +925,30 @@
}
-# Each target returned by 'register' is added to a list of
-# 'recent-target', returned by this function. So, this allows
-# us to find all targets created when building a given main
-# target, even if the target
+# Each target returned by 'register' is added to a recent-targets list, returned
+# by this function. This allows us to find all targets created when building a
+# given main target, even if the target... !!!MISSING TEXT HERE!!!
rule recent-targets ( )
{
return $(.recent-targets) ;
}
+
rule clear-recent-targets ( )
{
.recent-targets = ;
}
-# Returns all virtual targets ever created
+
+# Returns all virtual targets ever created.
rule all-targets ( )
{
return $(.all-targets) ;
}
-# Returns all targets from 'targets' with types
-# equal to 'type' or derived from it.
+
+# Returns all targets from 'targets' with types equal to 'type' or derived from
+# it.
rule select-by-type ( type : targets * )
{
local result ;
@@ -959,14 +957,12 @@
if [ type.is-subtype [ $(t).type ] $(type) ]
{
result += $(t) ;
- }
+ }
}
-
- return $(result) ;
+ return $(result) ;
}
-
rule register-actual-name ( actual-name : virtual-target )
{
if $(.actual.$(actual-name))
@@ -975,8 +971,7 @@
local cs2 = [ $(virtual-target).creating-subvariant ] ;
local cmt1 = [ $(cs1).main-target ] ;
local cmt2 = [ $(cs2).main-target ] ;
-
-
+
local action1 = [ $(.actual.$(actual-name)).action ] ;
local action2 = [ $(virtual-target).action ] ;
local properties-added ;
@@ -991,33 +986,34 @@
properties-removed ?= "none" ;
properties-added = [ set.difference $(p2) : $(p1) ] ;
properties-added ?= "none" ;
- }
- errors.error "Duplicate name of actual target:" $(actual-name)
- : "previous virtual target" [ $(.actual.$(actual-name)).str ]
- : "created from" [ $(cmt1).full-name ]
- : "another virtual target" [ $(virtual-target).str ]
- : "created from" [ $(cmt2).full-name ]
- : "added properties: " $(properties-added)
- : "removed properties: " $(properties-removed) ;
+ }
+ errors.error "Duplicate name of actual target:" $(actual-name)
+ : "previous virtual target" [ $(.actual.$(actual-name)).str ]
+ : "created from" [ $(cmt1).full-name ]
+ : "another virtual target" [ $(virtual-target).str ]
+ : "created from" [ $(cmt2).full-name ]
+ : "added properties:" $(properties-added)
+ : "removed properties:" $(properties-removed) ;
}
else
{
.actual.$(actual-name) = $(virtual-target) ;
- }
+ }
}
-# Traverses the dependency graph of 'target' and return all targets that will
-# be created before this one is created. If root of some dependency graph is
-# found during traversal, it's either included or not, dependencing of the
-# value of 'include-roots'. In either case, sources of root are not traversed.
+# Traverses the dependency graph of 'target' and return all targets that will be
+# created before this one is created. If the root of some dependency graph is
+# found during traversal, it's either included or not, depending on the value of
+# 'include-roots'. In either case traversal stops at root targets, i.e. sources
+# of root targets are not traversed.
rule traverse ( target : include-roots ? : include-sources ? )
{
local result ;
if [ $(target).action ]
{
local action = [ $(target).action ] ;
- # This includes 'target' as well
+ # This includes the 'target' as well.
result += [ $(action).targets ] ;
for local t in [ $(action).sources ]
@@ -1029,20 +1025,21 @@
else if $(include-roots)
{
result += $(t) ;
- }
+ }
}
}
else if $(include-sources)
{
result = $(target) ;
- }
+ }
return $(result) ;
}
-# Takes an 'action' instances and creates new instance of it
-# and all produced target. The rule-name and properties are set
-# to 'new-rule-name' and 'new-properties', if those are specified.
-# Returns the cloned action.
+
+# Takes an 'action' instance and creates a new instance of it and all targets
+# produced by the action. The rule-name and properties are set to
+# 'new-rule-name' and 'new-properties', if those are specified. Returns the
+# cloned action.
rule clone-action ( action : new-project : new-action-name ? : new-properties ? )
{
if ! $(new-action-name)
@@ -1055,142 +1052,139 @@
}
local action-class = [ modules.peek $(action) : __class__ ] ;
- local cloned-action = [ class.new $(action-class)
+ local cloned-action = [ class.new $(action-class)
[ $(action).sources ] : $(new-action-name) : $(new-properties) ] ;
-
+
local cloned-targets ;
for local target in [ $(action).targets ]
{
local n = [ $(target).name ] ;
- # Don't modify the name of the produced targets.Strip the directory f
- local cloned-target = [ class.new file-target $(n) exact : [ $(target).type ]
- : $(new-project) : $(cloned-action) ] ;
+ # Don't modify produced targets names.
+ local cloned-target = [ class.new file-target $(n) exact
+ : [ $(target).type ] : $(new-project) : $(cloned-action) ] ;
local d = [ $(target).dependencies ] ;
if $(d)
- {
+ {
$(cloned-target).depends $(d) ;
- }
+ }
$(cloned-target).root [ $(target).root ] ;
$(cloned-target).creating-subvariant [ $(target).creating-subvariant ] ;
-
+
cloned-targets += $(cloned-target) ;
- }
-
- return $(cloned-action) ;
+ }
+
+ return $(cloned-action) ;
}
+
class subvariant
{
- import sequence ;
+ import sequence ;
import type ;
-
- rule __init__ ( main-target # The instance of main-target class
- : property-set # Properties requested for this target
+
+ rule __init__ ( main-target # The instance of main-target class.
+ : property-set # Properties requested for this target.
: sources *
- : build-properties # Actually used properties
- : sources-usage-requirements # Properties propagated from sources
- : created-targets * ) # Top-level created targets
- {
- self.main-target = $(main-target) ;
+ : build-properties # Actually used properties.
+ : sources-usage-requirements # Properties propagated from sources.
+ : created-targets * ) # Top-level created targets.
+ {
+ self.main-target = $(main-target) ;
self.properties = $(property-set) ;
self.sources = $(sources) ;
self.build-properties = $(build-properties) ;
self.sources-usage-requirements = $(sources-usage-requirements) ;
self.created-targets = $(created-targets) ;
- # Pre-compose the list of other dependency graphs, on which this one
- # depends
+ # Pre-compose a list of other dependency graphs this one depends on.
local deps = [ $(build-properties).get <implicit-dependency> ] ;
for local d in $(deps)
{
self.other-dg += [ $(d:G=).creating-subvariant ] ;
}
-
+
self.other-dg = [ sequence.unique $(self.other-dg) ] ;
}
-
-
+
rule main-target ( )
{
return $(self.main-target) ;
}
-
- rule created-targets ( )
+
+ rule created-targets ( )
{
return $(self.created-targets) ;
}
-
+
rule requested-properties ( )
{
return $(self.properties) ;
}
-
+
rule build-properties ( )
{
return $(self.build-properties) ;
}
-
+
rule sources-usage-requirements ( )
{
return $(self.sources-usage-requirements) ;
}
-
+
rule set-usage-requirements ( usage-requirements )
{
self.usage-requirements = $(usage-requirements) ;
}
-
+
rule usage-requirements ( )
{
return $(self.usage-requirements) ;
}
-
- # Returns all targets referenced by this subvariant,
- # either directly or indirectly, and
- # either as sources, or as dependency properties.
- # Targets referred with dependency property are returned a properties,
- # not targets.
+
+ # Returns all targets referenced by this subvariant, either directly or
+ # indirectly, and either as sources, or as dependency properties. Targets
+ # referred to using the dependency property are returned as properties, not
+ # targets.
rule all-referenced-targets ( )
{
# Find directly referenced targets.
local deps = [ $(self.build-properties).dependency ] ;
local all-targets = $(self.sources) $(deps) ;
-
+
# Find other subvariants.
local r ;
for local t in $(all-targets)
- {
+ {
r += [ $(t:G=).creating-subvariant ] ;
}
r = [ sequence.unique $(r) ] ;
- for local s in $(r)
+ for local s in $(r)
{
if $(s) != $(__name__)
{
all-targets += [ $(s).all-referenced-targets ] ;
- }
+ }
}
- return $(all-targets) ;
+ return $(all-targets) ;
}
-
- # Returns the properties which specify implicit include paths to
- # generated headers. This traverses all targets in this subvariant,
- # and subvariants referred by <implcit-dependecy>properties.
- # For all targets which are of type 'target-type' (or for all targets,
- # if 'target-type' is not specified), the result will contain
- # <$(feature)>path-to-that-target.
+
+ # Returns the properties specifying implicit include paths to generated
+ # headers. This traverses all targets in this subvariant and subvariants
+ # referred by <implcit-dependecy> properties. For all targets of type
+ # 'target-type' (or for all targets, if 'target-type' is not specified), the
+ # result will contain <$(feature)>path-to-that-target.
rule implicit-includes ( feature : target-type ? )
{
local key = ii$(feature)-$(target-type:E="") ;
if ! $($(key))-is-nonempty
{
- local target-paths = [ all-target-directories $(target-type) ] ;
+ local target-paths = [ all-target-directories $(target-type) ] ;
target-paths = [ sequence.unique $(target-paths) ] ;
local result = $(target-paths:G=$(feature)) ;
if ! $(result)
{
result = "" ;
- }
+ }
$(key) = $(result) ;
}
if $($(key)) = ""
@@ -1200,37 +1194,34 @@
else
{
return $($(key)) ;
- }
+ }
}
-
+
rule all-target-directories ( target-type ? )
{
if ! $(self.target-directories)
{
compute-target-directories $(target-type) ;
- }
+ }
return $(self.target-directories) ;
}
-
+
rule compute-target-directories ( target-type ? )
- {
+ {
local result ;
for local t in $(self.created-targets)
{
- if $(target-type) && ! [ type.is-derived [ $(t).type ] $(target-type) ]
+ # Skip targets of the wrong type.
+ if ! $(target-type) ||
+ [ type.is-derived [ $(t).type ] $(target-type) ]
{
- # Skip target which is of wrong type.
- }
- else
- {
result = [ sequence.merge $(result) : [ $(t).path ] ] ;
- }
+ }
}
for local d in $(self.other-dg)
{
result += [ $(d).all-target-directories $(target-type) ] ;
}
self.target-directories = $(result) ;
- }
+ }
}
-
Modified: branches/release/tools/build/v2/doc/src/advanced.xml
==============================================================================
--- branches/release/tools/build/v2/doc/src/advanced.xml (original)
+++ branches/release/tools/build/v2/doc/src/advanced.xml 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -777,10 +777,10 @@
such as defines. It can be also useful for ordinary properties. Consider
this example:
<programlisting>
-project test : requirements <threading;>multi ;
+project test : requirements <threading>multi ;
exe test1 : test1.cpp ;
-exe test2 : test2.cpp : <threading;>single ;
-exe test3 : test3.cpp : -<threading;>multi ;
+exe test2 : test2.cpp : <threading>single ;
+exe test3 : test3.cpp : -<threading>multi ;
</programlisting>
Here, <code>test1</code> inherits project requirements and will always
be built in multi-threaded mode. The <code>test2</code> target
Modified: branches/release/tools/build/v2/doc/src/architecture.xml
==============================================================================
--- branches/release/tools/build/v2/doc/src/architecture.xml (original)
+++ branches/release/tools/build/v2/doc/src/architecture.xml 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -334,7 +334,7 @@
"bin" directory, it should be supported as well. I.e. in the
scanario above, Jamfile in "dir" might create a main target,
which generates "a.h". The file will be generated to "dir/bin"
- directory, but we still have to recornize the dependency.
+ directory, but we still have to recognize the dependency.
</simpara>
</listitem>
</orderedlist>
Modified: branches/release/tools/build/v2/doc/src/extending.xml
==============================================================================
--- branches/release/tools/build/v2/doc/src/extending.xml (original)
+++ branches/release/tools/build/v2/doc/src/extending.xml 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -37,30 +37,28 @@
</listitem>
</orderedlist>
- <para>It's quite easy to achieve. You write special verbatim files
- that are just C++, except that the very first line of the file
- contains the name of a variable that should be generated. A simple tool
- is created that takes a verbatim file and creates a cpp file with
- a single <code>char*</code> variable whose name is taken from the first line
- of the verbatim file and whose value is the file's properly quoted content.</para>
+ <para>It's quite easy to achieve. You write special verbatim files that are
+ just C++, except that the very first line of the file contains the name of a
+ variable that should be generated. A simple tool is created that takes a
+ verbatim file and creates a cpp file with a single <code>char*</code> variable
+ whose name is taken from the first line of the verbatim file and whose value
+ is the file's properly quoted content.</para>
<para>Let's see what Boost.Build can do.</para>
- <para>First off, Boost.Build has no idea about "verbatim files". So,
- you must register a new target type. The following code does
- it:</para>
+ <para>First off, Boost.Build has no idea about "verbatim files". So, you must
+ register a new target type. The following code does it:</para>
<programlisting>
import type ;
-type.register VERBATIM : vrb ;
+type.register VERBATIM : verbatim ;
</programlisting>
- <para>The first parameter to
- <functionname>type.register</functionname> gives the name of the
- declared type. By convention, it's uppercase. The second parameter
- is the suffix for files of this type. So, if Boost.Build sees
- <filename>code.vrb</filename> in a list of sources, it knows that it's of type
- <code>VERBATIM</code>.</para>
+ <para>The first parameter to <functionname>type.register</functionname> gives
+ the name of the declared type. By convention, it's uppercase. The second
+ parameter is the suffix for files of this type. So, if Boost.Build sees
+ <filename>code.verbatim</filename> in a list of sources, it knows that it's of
+ type <code>VERBATIM</code>.</para>
<para>Next, you tell Boost.Build that the verbatim files can be
transformed into C++ files in one build step. A
@@ -96,26 +94,29 @@
-->
</para>
- <para>Now, we're ready to tie it all together. Put all the code
- above in file <filename>verbatim.jam</filename>, add <code>import verbatim ;</code>
- to <filename>project-root.jam</filename>, and it's possible to write
- the following in Jamfile:</para>
+ <para>
+ Now, we're ready to tie it all together. Put all the code above in file
+ <filename>verbatim.jam</filename>, add <code>import verbatim ;</code> to
+ <filename>Jamroot.jam</filename>, and it's possible to write the following
+ in your Jamfile:
+ </para>
<programlisting>
exe codegen : codegen.cpp class_template.verbatim usage.verbatim ;
</programlisting>
<para>
-The verbatim files will be automatically converted into C++
-and linked it.
+ The listed verbatim files will be automatically converted into C++ source
+ files, compiled and then linked to the codegen executable.
</para>
- <para>In the subsequent sections, we will extend this example, and review
- all the mechanisms in detail. The complete code is available in <filename>example/customization</filename>
- directory.
- </para>
-
+ <para>
+ In subsequent sections, we will extend this example, and review all the
+ mechanisms in detail. The complete code is available in the
+ <filename>example/customization</filename> directory.
+ </para>
</section>
+
<section id="bbv2.extending.targets">
<title>Target types</title>
<para>The first thing we did in the <link
@@ -281,13 +282,13 @@
generators.register-standard verbatim.inline-file : VERBATIM : CPP ;
generators.register-composing mex.mex : CPP LIB : MEX ;
</programlisting>
- Standard generators take a <emphasis>single</emphasis> source of type
- <code>VERBATIM</code> and produces a result. The second generator
- takes any number of sources, which can have either the
- <code>CPP</code> or the <code>LIB</code> type. Composing generators
+ The first (standard) generator takes a <emphasis>single</emphasis>
+ source of type <code>VERBATIM</code> and produces a result. The second
+ (composing) generator takes any number of sources, which can have either
+ the <code>CPP</code> or the <code>LIB</code> type. Composing generators
are typically used for generating top-level target type. For example,
- the first generator invoked when building an <code>exe</code> target
- is a composing generator corresponding to the proper linker.
+ the first generator invoked when building an <code>exe</code> target is
+ a composing generator corresponding to the proper linker.
</para>
<para>You should also know about two specific functions for registering
@@ -723,51 +724,46 @@
in Jamfiles, which will convert source to the OBFUSCATED_CPP type.
</para>
- <para>The second way is to write a wrapper rule that calls
- any of the existing rules. For example, suppose you have only one library per
- directory and want all cpp files in the directory to be compiled into that library. You
- can achieve this effect with:
+ <para>
+ The second way is to write a wrapper rule that calls any of the existing
+ rules. For example, suppose you have only one library per directory and
+ want all cpp files in the directory to be compiled into that library. You
+ can achieve this effect using:
<programlisting>
lib codegen : [ glob *.cpp ] ;
</programlisting>
- but if you want to make it even simpler, you could add the following
- definition to the <filename>project-root.jam</filename> file:
+ If you want to make it even simpler, you could add the following
+ definition to the <filename>Jamroot.jam</filename> file:
<programlisting>
rule glib ( name : extra-sources * : requirements * )
{
lib $(name) : [ glob *.cpp ] $(extra-sources) : $(requirements) ;
}
</programlisting>
-which would allow you to reduce the Jamfile to
+ allowing you to reduce the Jamfile to just
<programlisting>
glib codegen ;
</programlisting>
</para>
<para>
- Note that because you can associate a custom generator with a target
- type, the logic of building can be rather compiler.
- <!-- "compiler" is not an adjective. The logic can't be "rather
- compiler." What you might mean here is completely
- mysterious to me -->
- For example, the
+ Note that because you can associate a custom generator with a target type,
+ the logic of building can be rather complicated. For example, the
<code>boostbook</code> module declares a target type
- <code>BOOSTBOOK_MAIN</code> and a custom generator for that
- type. You can use that as example if your main target rule is
- non-trivial.
+ <code>BOOSTBOOK_MAIN</code> and a custom generator for that type. You can
+ use that as example if your main target rule is non-trivial.
</para>
-
</section>
<section id="bbv2.extending.toolset_modules">
<title>Toolset modules</title>
- <para>If your extensions will be used only on one project, they can be
- placed in a separate <filename>.jam</filename> file that will be
- imported by your <filename>project-root.jam</filename>. If the
- extensions will be used on many projects, users will thank you for
- a finishing touch.
+ <para>
+ If your extensions will be used only on one project, they can be placed in
+ a separate <filename>.jam</filename> file and imported by your
+ <filename>Jamroot.jam</filename>. If the extensions will be used on many
+ projects, users will thank you for a finishing touch.
</para>
<para>The <code>using</code> rule provides a standard mechanism
Modified: branches/release/tools/build/v2/doc/src/faq.xml
==============================================================================
--- branches/release/tools/build/v2/doc/src/faq.xml (original)
+++ branches/release/tools/build/v2/doc/src/faq.xml 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -216,12 +216,12 @@
</section>
<section>
- <title>How to get the project-root location?
+ <title>How to get the project root (a.k.a. Jamroot.jam) location?
</title>
- <para>You might want to use the location of the project-root in your
- Jamfiles. To do it, you'd need to declare path constant in your
- project-root.jam:
+ <para>
+ You might want to use your project's root location in your Jamfiles. To
+ access it just declare a path constant in your Jamroot.jam file using:
<programlisting>
path-constant TOP : . ;
</programlisting>
@@ -264,7 +264,7 @@
<para>(This entry is specific to Unix system.)Before answering the
questions, let's recall a few points about shared libraries. Shared
libraries can be used by several applications, or other libraries,
- without phisycally including the library in the application. This can
+ without physically including the library in the application. This can
greatly decrease the total size of applications. It's also possible to
upgrade a shared library when the application is already
installed. Finally, shared linking can be faster.
Modified: branches/release/tools/build/v2/doc/src/reference.xml
==============================================================================
--- branches/release/tools/build/v2/doc/src/reference.xml (original)
+++ branches/release/tools/build/v2/doc/src/reference.xml 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -329,7 +329,7 @@
will print the names of all C++ files in your project. The
<literal>.svn</literal> exclude pattern prevents the
<code>glob-tree</code> rule from entering administrative
- directories of the Subverion version control system.
+ directories of the Subversion version control system.
</para></listitem>
</varlistentry>
@@ -462,13 +462,35 @@
<term><literal>link</literal></term>
<listitem>
+
+ <para><emphasis role="bold">Allowed values:</emphasis> <literal>shared</literal>,
+ <literal>static</literal></para>
+
<simpara>
A feature that controls how libraries are built.
</simpara>
+ </listitem></varlistentry>
+
+ <varlistentry id="bbv2.advanced.builtins.features.runtime-link">
+ <indexterm><primary>runtime linking</primary></indexterm>
+ <term><literal>runtime-link</literal></term>
+
+ <listitem>
<para><emphasis role="bold">Allowed values:</emphasis> <literal>shared</literal>,
<literal>static</literal></para>
- </listitem></varlistentry>
+
+ <simpara>
+ Controls if a static or shared C/C++ runtime should be used. There
+ are some restrictions how this feature can be used, for example
+ on some compilers an application using static runtime should
+ not use shared libraries at all, and on some compilers,
+ mixing static and shared runtime requires extreme care. Check
+ your compiler documentation for more details.
+ </simpara>
+
+ </listitem>
+ </varlistentry>
<varlistentry><term><literal>source</literal></term>
@@ -678,9 +700,50 @@
release variant with debugging information.</para>
</listitem>
</varlistentry>
-
+ <varlistentry><term><literal>architecture</literal></term>
+ <listitem>
+ <para>The <literal>architecture</literal> features specifies
+ the general processor familty to generate code for.</para>
+
+ </listitem>
+ </varlistentry>
+
+ <varlistentry><term><literal>instruction-set</literal></term>
+ <indexterm><primary>instruction-set</primary></indexterm>
+ <listitem>
+ <para>Allowed values for this feature depend on used toolset.</para>
+
+ <para>The <literal>instruction-set</literal> specifies for which
+ specific instruction set the code should be generated. The
+ code in general might not run on processors with older/different
+ instruction sets.</para>
+
+ <para>While Boost.Build allows a large set of possible values
+ for this features, whether a given value works depends on which
+ compiler you use. Please see
+ <xref linkend="bbv2.reference.tools.compilers"/> for details.
+ </para>
+
+ </listitem>
+ </varlistentry>
+
+ <varlistentry><term><literal>address-model</literal></term>
+ <indexterm><primary>64-bit compilation</primary></indexterm>
+ <listitem>
+ <para><emphasis role="bold">Allowed values:</emphasis> <literal>32</literal>, <literal>64</literal>.</para>
+
+ <para>The <literal>address-model</literal> specifies if 32-bit or
+ 64-bit code should be generated by the compiler. Whether this feature
+ works depends on the used compiler, it's version, how the compiler
+ is configured, and the values of the <literal>architecture</literal>
+ <literal>instruction-set</literal>
+ features. Please see <xref linkend="bbv2.reference.tools.compilers"/>
+ for details.</para>
+ </listitem>
+ </varlistentry>
+
</variablelist>
</section>
@@ -770,6 +833,15 @@
</variablelist>
+ <indexterm><primary>64-bit compilation</primary>
+ <secondary>gcc</secondary></indexterm>
+
+ In order to compile 64-bit applications, you have to specify
+ <code>address-model=64</code>, and the <code>instruction-set</code>
+ feature should refer to a 64 bit processor. Currently, those
+ include <literal>nocona</literal>, <literal>opteron</literal>,
+ <literal>athlon64</literal> and <literal>athlon-fx</literal>.
+
</section>
@@ -887,6 +959,70 @@
</variablelist>
+
+ <section>
+ <title>64-bit support</title>
+
+ <indexterm><primary>64-bit compilation</primary>
+ <secondary>Microsoft Visual Studio</secondary></indexterm>
+
+ <para>Starting with version 8.0, Microsoft Visual Studio
+ can generate binaries for 64-bit processor, both 64-bit
+ flavours of x86 (codenamed AMD64/EM64T), and
+ Itanium (codenamed IA64). In addition, compilers that are
+ itself run in 64-bit mode, for better performance, are provided.
+ The complete list of compiler configurations are as follows
+ (we abbreviate AMD64/EM64T to just AMD64):</para>
+ <itemizedlist>
+ <listitem><para>32-bit x86 host, 32-bit x86 target</para>
+ </listitem>
+ <listitem><para>32-bit x86 host, 64-bit AMD64 target</para>
+ </listitem>
+ <listitem><para>32-bit x86 host, 64-bit IA64 target</para>
+ </listitem>
+ <listitem><para>64-bit AMD64 host, 64-bit AMD64 target</para>
+ </listitem>
+ <listitem><para>64-bit IA64 host, 64-bit IA64 target</para>
+ </listitem>
+ </itemizedlist>
+ <para>
+ The 32-bit host compilers can be always used, even on 64-bit Windows.
+ On the contrary, 64-bit host compilers require both 64-bit
+ host processor and 64-bit Windows, but can be faster. By default,
+ only 32-bit host, 32-bit target compiler is installed, and additional
+ compilers should be installed explicitly.
+ </para>
+
+ <para>To use 64-bit compilation you should:</para>
+ <orderedlist>
+ <listitem><para>Configure you compiler as usual. If you provide
+ a path to the compiler explicitly, provide the path to the
+ 32-bit compiler. If you try to specify the path to any of 64-bit
+ compilers, configuration won't work.</para>
+ </listitem>
+
+ <listitem><para>When compiling, use <code>address-model=64</code>,
+ to generate AMD64 code.</para></listitem>
+
+ <listitem><para>To generate IA64 code, use
+ <code>architecture=ia64</code></para></listitem>
+ </orderedlist>
+
+ <para>The (AMD64 host, AMD64 target) compiler will be used
+ automatically when you're generating AMD64 code and are
+ running 64-bit Windows on AMD64. The (IA64 host, IA64 target)
+ compiler won't be ever used, since nobody has an IA64 machine
+ to test.</para>
+
+ <para>It is believed that AMD64 and EM64T targets are essentially
+ compatible. The compiler options <code>/favor:AMD64</code>
+ and <code>/favor:EM64T</code>, which are accepted only by
+ AMD64 targeting compilers, cause the generated code to be
+ tuned to a specific flavor of 64-bit x86. Boost.Build will
+ make use of those options depending on the value
+ of the<code>instruction-set</code> feature.</para>
+
+ </section>
</section>
@@ -1183,7 +1319,12 @@
<xi:include href="fragments.xml#xpointer(id('common_options')/*)"
parse="xml"/>
</variablelist>
-
+
+ <indexterm><primary>64-bit compilation</primary>
+ <secondary>Sun Studio</secondary></indexterm>
+ Starting with Sun Studio 12, you can create 64-bit applications
+ by using the <code>address-model=64</code> property.
+
</section>
<section id="bbv2.reference.tools.compiler.vacpp">
@@ -1208,6 +1349,47 @@
</section>
+ </section>
+
+ <section>
+ <title>Third-party libraries</title>
+
+ <para>Boost.Build provides special support for some
+ third-party C++ libraries, documented below.</para>
+
+ <section id="bbv2.reference.tools.libraries.stlport">
+ <title>STLport library</title>
+ <indexterm><primary>STLport</primary></indexterm>
+
+ <para>The <ulink url="http://stlport.org">STLport</ulink> library
+ is an alternative implementation of C++ runtime library. Boost.Build
+ supports using that library on Windows platfrom. Linux is
+ hampered by different naming of libraries in each STLport
+ version and is not officially supported.</para>
+
+ <para>Before using STLport, you need to configure it in
+ <filename>user-config.jam</filename> using the following syntax:
+ </para>
+ <programlisting>
+using stlport : <optional><replaceable>version</replaceable></optional> : <replaceable>header-path</replaceable> : <optional><replaceable>library-path</replaceable></optional> ;
+</programlisting>
+ <para>
+ Where <replaceable>version</replaceable> is the version of
+ STLport, for example <literal>5.1.4</literal>,
+ <replaceable>headers</replaceable> is the location where
+ STLport headers can be found, and <replaceable>libraries</replaceable>
+ is the location where STLport libraries can be found.
+ The version should always be provided, and the library path should
+ be provided if you're using STLport's implementation of
+ iostreams. Note that STLport 5.* always uses its own iostream
+ implementation, so the library path is required.
+ </para>
+
+ <para>When STLport is configured, you can build with STLport by
+ requesting <literal>stdlib=stlport</literal> on the command line.
+ </para>
+
+ </section>
</section>
@@ -1254,10 +1436,10 @@
<orderedlist>
<listitem>
<simpara>
- For each alternative <emphasis>condition</emphasis> is defined
- as the set of base properies in requirements. [Note: it might be
- better to specify the condition explicitly, as in
- conditional requirements].
+ For each alternative <emphasis>condition</emphasis> is defined as
+ the set of base properties in requirements. [Note: it might be
+ better to specify the condition explicitly, as in conditional
+ requirements].
</simpara>
</listitem>
Modified: branches/release/tools/build/v2/doc/src/tasks.xml
==============================================================================
--- branches/release/tools/build/v2/doc/src/tasks.xml (original)
+++ branches/release/tools/build/v2/doc/src/tasks.xml 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -192,17 +192,16 @@
<title>Alias</title>
<para>
- The <functionname>alias</functionname> rule gives alternative name to
+ The <functionname>alias</functionname> rule gives an alternative name to
a group of targets. For example, to give the name
<filename>core</filename> to a group of three other targets with the
following code:
<programlisting>
alias core : im reader writer ;</programlisting>
- Using <filename>core</filename> on the command line, or in the source list
- of any other target is the same as explicitly using
+ Using <filename>core</filename> on the command line, or in the source
+ list of any other target is the same as explicitly using
<filename>im</filename>, <filename>reader</filename>, and
<filename>writer</filename>, but it is just more convenient.
-
</para>
@@ -214,7 +213,6 @@
alias threads : /boost/thread//boost_thread : <link>static ;
</programlisting>
and use only the <code>threads</code> alias in your Jamfiles.
- <!-- changed name for clarity -->
</para>
<para>
@@ -224,14 +222,16 @@
alias header_only_library : : : : <include>/usr/include/header_only_library ;
</programlisting>
then using <code>header_only_library</code> in sources will only add an
- include path. Also note that when there are some sources, their usage
- requirements are propagated, too. For example:
+ include path. Also note that when an alias has sources, their usage
+ requirements are propagated as well. For example:
<programlisting>
-lib lib : lib.cpp : : : <include>. ;
-alias lib_alias ; <!-- This line can't possibly be correct!?? -->
-exe main : main.cpp lib_alias ;
+lib library1 : library1.cpp : : : <include>/library/include1 ;
+lib library2 : library2.cpp : : : <include>/library/include2 ;
+alias static_libraries : library1 library2 : <link>static ;
+exe main : main.cpp static_libraries ;
</programlisting>
- will compile <filename>main.cpp</filename> with the additional include.
+ will compile <filename>main.cpp</filename> with additional includes
+ required for using the specified static libraries.
</para>
</section>
@@ -575,7 +575,7 @@
and add that precompiled header to the sources of the target whose compilation
you want to speed up:
<programlisting>
-cpp-pch pch : header.hpp ;
+cpp-pch pch : pch.hpp ;
exe main : main.cpp pch ;</programlisting>
You can use the <code>c-pch</code> if you want to use the precompiled
header in C programs.
@@ -602,6 +602,15 @@
it in the source file, even if the same header is included from
the precompiled header. This makes sure that your project will build
even if precompiled headers are not supported.</para></listitem>
+
+ <listitem><para>On the gcc compiler, the name of the header being
+ precompiled must be equal to the name of the <code>cpp-pch</code>
+ target. This is gcc requirement.</para></listitem>
+
+ <listitem><para>Prior to version 4.2, the gcc compiler did not
+ handle anonymous namespaces in precompiled headers, which
+ limit their utility. See the <ulink url="http://gcc.gnu.org/bugzilla/show_bug.cgi?id=29085">bug
+ report</ulink> for details.</para></listitem>
</itemizedlist>
Modified: branches/release/tools/build/v2/doc/src/tutorial.xml
==============================================================================
--- branches/release/tools/build/v2/doc/src/tutorial.xml (original)
+++ branches/release/tools/build/v2/doc/src/tutorial.xml 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -605,13 +605,12 @@
lib demangler : demangler_msvc.cpp : <toolset>msvc ; # alternative 3
</programlisting>
When building <filename>demangler</filename>, Boost.Build will compare
- requirements for each alternative with build properties to find the best match.
- For example, when building with with <code><toolset>gcc</code>
- alternative 2, will be selected, and when building with
- <code><toolset>msvc</code> alternative 3 will be selected. In all other
- cases, the most generic alternative 1 will be built.
+ requirements for each alternative with build properties to find the best
+ match. For example, when building with <code><toolset>gcc</code>
+ alternative 2, will be selected, and when building with
+ <code><toolset>msvc</code> alternative 3 will be selected. In all
+ other cases, the most generic alternative 1 will be built.
</para>
-
</section>
<section id="bbv2.tutorial.prebuilt">
Modified: branches/release/tools/build/v2/hacking.txt
==============================================================================
--- branches/release/tools/build/v2/hacking.txt (original)
+++ branches/release/tools/build/v2/hacking.txt 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -1,20 +1,20 @@
-Copyright 2003, 2006 Vladimir Prus
-Distributed under the Boost Software License, Version 1.0.
-(See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+Copyright 2003, 2006 Vladimir Prus
+Distributed under the Boost Software License, Version 1.0.
+(See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
----------------------------------
Boost.Build contributor guidelines
----------------------------------
-Boost.Build is an open-source project. This means that we welcome and
-appreciate all contributions --- be it ideas, bug reports, or patches.
-This document contains guidelines which helps to assure that development
-goes on smoothly, and changes are made quickly.
-
-The guidelines are not mandatory, and you can decide for yourself which one
-to follow. But note, that 10 mins that you spare writing a comment, for
-example, might lead to significally longer delay for everyone.
+Boost.Build is an open-source project. This means that we welcome and appreciate
+all contributions --- be it ideas, bug reports, or patches. This document
+contains guidelines which helps to assure that development goes on smoothly, and
+changes are made quickly.
+
+The guidelines are not mandatory, and you can decide for yourself which one to
+follow. But note, that 10 mins that you spare writing a comment, for example,
+might lead to significally longer delay for everyone.
Before contributing, make sure you are subscribed to our mailing list
@@ -25,7 +25,7 @@
- The issue tracker
http://zigzag.cs.msu.su/boost.build
- - commits mailing list:
+ - commits mailing list:
boost-build_at_[hidden]
http://sourceforge.net/mailarchive/forum.php?forum_id=9097
@@ -37,8 +37,8 @@
When reporting a bug, please try to provide the following information.
- What you did. A minimal reproducible testcase is very much appreciated.
- Shell script with some annotations is much better than verbose description of
- the problem. A regression test is the best (see test/test_system.html).
+ Shell script with some annotations is much better than verbose description
+ of the problem. A regression test is the best (see test/test_system.html).
- What you got.
- What you expected.
- What version of Boost.Build and Boost.Jam did you use. If possible,
@@ -53,48 +53,48 @@
- provide a log message together with the patch
- put the patch and the log message as attachment to your email.
-The purpose of log message serves to communicate what was changed, and
-*why*. Without a good log message, you might spend a lot of time later,
-wondering where a strange piece of code came from and why it was necessary.
+The purpose of log message serves to communicate what was changed, and *why*.
+Without a good log message, you might spend a lot of time later, wondering where
+a strange piece of code came from and why it was necessary.
The good log message mentions each changed file and each rule/method, saying
what happend to it, and why. Consider, the following log message
Better direct request handling.
-
+
* new/build-request.jam
(directly-requested-properties-adjuster): Redo.
-
+
* new/targets.jam
(main-target.generate-really): Adjust properties here.
-
+
* new/virtual-target.jam
(register-actual-name): New rule.
(virtual-target.actualize-no-scanner): Call the above, to detected bugs,
where two virtual target correspond to one Jam target name.
-
-The log messages for the last two files are good. They tell what was
-changed. The change to the first file is clearly undercommented.
-It's OK to use terse log messages for uninteresting changes, like
-ones induces by interface changes elsewhere.
+The log messages for the last two files are good. They tell what was changed.
+The change to the first file is clearly undercommented.
+
+It's OK to use terse log messages for uninteresting changes, like ones induced
+by interface changes elsewhere.
POLICIES.
1. Testing.
-All serious changes must be tested. New rules must be tested by the module
-where they are declared. Test system (test/test_system.html) should be used
-to verify user-observable behaviour.
+All serious changes must be tested. New rules must be tested by the module where
+they are declared. Test system (test/test_system.html) should be used to verify
+user-observable behaviour.
2. Documentation.
-It turns out that it's hard to have too much comments, but it's easy to have
-too little. Please prepend each rule with a comment saying what the rule does
-and what arguments mean. Stop for a minute and consider if the comment makes
-sense for anybody else, and completely describes what the rules does. Generic
-phrases like "adjusts properties" are really not enough.
+It turns out that it's hard to have too much comments, but it's easy to have too
+little. Please prepend each rule with a comment saying what the rule does and
+what arguments mean. Stop for a minute and consider if the comment makes sense
+for anybody else, and completely describes what the rules does. Generic phrases
+like "adjusts properties" are really not enough.
When applicable, make changes to the user documentation as well.
@@ -106,8 +106,8 @@
rule call-me-ishmael ( ) ...
- 2. Names with dots in them are "intended globals". Ordinary globals use
- a dot prefix:
+ 2. Names with dots in them are "intended globals". Ordinary globals use a
+ dot prefix:
.foobar
$(.foobar)
@@ -142,12 +142,12 @@
Please pass HTML files though HTML Tidy (http://tidy.sf.net) before
comitting. This has to important purposes:
- detecting bad HTML
- - converting files to uniform indentation style, which inverses effect
- of different editors and makes differences between revisions much
- smaller and easy for review.
+ - converting files to uniform indentation style, which inverses effect of
+ different editors and makes differences between revisions much smaller and
+ easy for review.
- Alas, the way Tidy indents HTML differs between version. Please use
- the version awailable at
+ Alas, the way Tidy indents HTML differs between version. Please use the
+ version available at
http://tidy.sourceforge.net/src/old/tidy_src_020411.tgz
Modified: branches/release/tools/build/v2/kernel/bootstrap.jam
==============================================================================
--- branches/release/tools/build/v2/kernel/bootstrap.jam (original)
+++ branches/release/tools/build/v2/kernel/bootstrap.jam 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -118,7 +118,7 @@
import option ;
local dont-build = [ option.process ] ;
-# Should we skip building, i.e. loding the build system, according
+# Should we skip building, i.e. loading the build system, according
# to the options processed?
#
if ! $(dont-build)
@@ -131,4 +131,3 @@
# Use last element in case of multiple command-line options
import $(build-system[-1]) ;
}
-
Modified: branches/release/tools/build/v2/kernel/errors.jam
==============================================================================
--- branches/release/tools/build/v2/kernel/errors.jam (original)
+++ branches/release/tools/build/v2/kernel/errors.jam 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -1,50 +1,48 @@
-# Copyright 2003 Dave Abrahams
-# Copyright 2004 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Print a stack backtrace leading to this rule's caller. Each
-# argument represents a line of output to be printed after the first
-# line of the backtrace.
+# Copyright 2003 Dave Abrahams
+# Copyright 2004 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Print a stack backtrace leading to this rule's caller. Each argument
+# represents a line of output to be printed after the first line of the
+# backtrace.
rule backtrace ( skip-frames prefix messages * : * )
{
local frame-skips = 5 9 13 17 21 25 29 33 37 41 45 49 53 57 61 65 69 73 77 81 ;
local drop-elements = $(frame-skips[$(skip-frames)]) ;
if ! ( $(skip-frames) in 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 )
{
- ECHO warning: backtrace doesn't support skipping
- $(skip-frames) frames; using 1 instead. ;
+ ECHO warning: backtrace doesn't support skipping $(skip-frames) frames;
+ using 1 instead. ;
drop-elements = 5 ;
}
-
- local args = $(.args) ;
+
+ local args = $(.args) ;
if $(.user-modules-only)
{
local bt = [ nearest-user-location ] ;
ECHO "$(prefix) at $(bt) " ;
for local n in $(args)
- {
+ {
if $($(n))-is-not-empty
{
ECHO $(prefix) $($(n)) ;
}
- }
+ }
}
else
{
- # get the whole backtrace, then drop the initial quadruples
+ # Get the whole backtrace, then drop the initial quadruples
# corresponding to the frames that must be skipped.
local bt = [ BACKTRACE ] ;
- bt = $(bt[$(drop-elements)-]) ;
-
+ bt = $(bt[$(drop-elements)-]) ;
+
while $(bt)
{
local m = [ MATCH ^(.+)\\.$ : $(bt[3]) ] ;
ECHO $(bt[1]):$(bt[2]): "in" $(bt[4]) "from module" $(m) ;
-
-
- # the first time through, print each argument on a separate
- # line
+
+ # The first time through, print each argument on a separate line.
for local n in $(args)
{
if $($(n))-is-not-empty
@@ -52,39 +50,40 @@
ECHO $(prefix) $($(n)) ;
}
}
- args = ; # kill args so that this never happens again
-
- # Move on to the next quadruple
+ args = ; # Kill args so that this never happens again.
+
+ # Move on to the next quadruple.
bt = $(bt[5-]) ;
}
- }
+ }
}
.args ?= messages 2 3 4 5 6 7 8 9 ;
.disabled ?= ;
-.last-error-$(.args) ?= ;
+.last-error-$(.args) ?= ;
+
# try-catch --
#
-# This is not really an exception-handling mechanism, but it does
-# allow us to perform some error-checking on our
-# error-checking. Errors are suppressed after a try, and the first one
-# is recorded. Use catch to check that the error message matched
-# expectations.
+# This is not really an exception-handling mechanism, but it does allow us to
+# perform some error-checking on our error-checking. Errors are suppressed after
+# a try, and the first one is recorded. Use catch to check that the error
+# message matched expectations.
-# begin looking for error messages
+# Begin looking for error messages.
rule try ( )
{
.disabled += true ;
.last-error-$(.args) = ;
}
-# stop looking for error messages; generate an error if an argument of
-# messages is not found in the corresponding argument in the error call.
+
+# Stop looking for error messages; generate an error if an argument of messages
+# is not found in the corresponding argument in the error call.
rule catch ( messages * : * )
{
- .disabled = $(.disabled[2-]) ; # pop the stack
-
+ .disabled = $(.disabled[2-]) ; # Pop the stack.
+
import sequence ;
if ! $(.last-error-$(.args))-is-nonempty
@@ -109,6 +108,7 @@
}
}
+
rule error-skip-frames ( skip-frames messages * : * )
{
if ! $(.disabled)
@@ -122,12 +122,12 @@
{
# Add an extra empty string so that we always have
# something in the event of an error
- .last-error-$(n) = $($(n)) "" ;
+ .last-error-$(n) = $($(n)) "" ;
}
}
}
-if --no-error-backtrace in [ modules.peek : ARGV ]
+if --no-error-backtrace in [ modules.peek : ARGV ]
{
.no-error-backtrace = true ;
}
@@ -144,24 +144,25 @@
if $($(n))-is-not-empty
{
if ! $(first-printed)
- {
+ {
ECHO error: $($(n)) ;
first-printed = true ;
}
else
{
- ECHO $($(n)) ;
- }
+ ECHO $($(n)) ;
+ }
}
- }
+ }
EXIT ;
}
else
- {
+ {
error-skip-frames 3 $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
- }
+ }
}
+
# Same as 'error', but the generated backtrace will include only user files.
rule user-error ( messages * : * )
{
@@ -176,10 +177,11 @@
backtrace 2 warning: $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
}
-# convert an arbitrary argument list into a list with ":" separators
-# and quoted elements representing the same information. This is
-# mostly useful for formatting descriptions of the arguments with
-# which a rule was called when reporting an error.
+
+# Convert an arbitrary argument list into a list with ":" separators and quoted
+# elements representing the same information. This is mostly useful for
+# formatting descriptions of arguments with which a rule was called when
+# reporting an error.
rule lol->list ( * )
{
local result ;
@@ -188,7 +190,7 @@
{
local n = $(remaining[1]) ;
remaining = $(remaining[2-]) ;
-
+
if $(n) != 1
{
result += ":" ;
@@ -198,49 +200,50 @@
return $(result) ;
}
-# Return the file:line for the nearest entry in backtrace which correspond
-# to a user module.
+
+# Return the file:line for the nearest entry in backtrace which correspond to a
+# user module.
rule nearest-user-location ( )
{
local bt = [ BACKTRACE ] ;
-
+
local result ;
while $(bt) && ! $(result)
{
local m = [ MATCH ^(.+)\\.$ : $(bt[3]) ] ;
local user-modules = ([Jj]amroot(.jam|.v2|)|([Jj]amfile(.jam|.v2|)|user-config.jam|site-config.jam|project-root.jam) ;
-
- if [ MATCH $(user-modules) : $(bt[1]:D=) ]
- {
+
+ if [ MATCH $(user-modules) : $(bt[1]:D=) ]
+ {
result = $(bt[1]):$(bt[2]) ;
- }
+ }
bt = $(bt[5-]) ;
}
return $(result) ;
}
-# If optimized rule is available in jam, use it.
-if NEAREST_USER_LOCATION in [ RULENAMES ]
+
+# If optimized rule is available in Jam, use it.
+if NEAREST_USER_LOCATION in [ RULENAMES ]
{
rule nearest-user-location ( )
{
local r = [ NEAREST_USER_LOCATION ] ;
return $(r[1]):$(r[2]) ;
- }
+ }
}
-
rule __test__ ( )
{
- # show that we can correctly catch an expected error
+ # Show that we can correctly catch an expected error.
try ;
{
error an error occurred : somewhere ;
}
catch an error occurred : somewhere ;
-
- # show that unexpected errors generate real errors
+
+ # Show that unexpected errors generate real errors.
try ;
{
try ;
@@ -250,9 +253,8 @@
catch an error occurred : nowhere ;
}
catch expected \"nowhere\" in argument 2 ;
-
- # show that not catching an error where one was expected is an
- # error
+
+ # Show that not catching an error where one was expected is an error.
try ;
{
try ;
Modified: branches/release/tools/build/v2/kernel/modules.jam
==============================================================================
--- branches/release/tools/build/v2/kernel/modules.jam (original)
+++ branches/release/tools/build/v2/kernel/modules.jam 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -1,22 +1,24 @@
-# Copyright 2003 Dave Abrahams
-# Copyright 2003, 2005 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+# Copyright 2003 Dave Abrahams
+# Copyright 2003, 2005 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-# Essentially an include guard; ensures that no module is loaded multiple times
+# Essentially an include guard; ensures that no module is loaded multiple times.
.loaded ?= ;
-# A list of modules currently being loaded for error reporting of circular dependencies
+# A list of modules currently being loaded for error reporting of circular
+# dependencies.
.loading ?= ;
-# A list of modules needing to be tested via __test__ rule
+# A list of modules needing to be tested via __test__ rule.
.untested ?= ;
-# A list of modules which have been tested via __test__
+# A list of modules which have been tested via __test__.
.tested ?= ;
-# meant to be invoked from import when no __test__ rule is defined in a given
-# module
+
+# Meant to be invoked from import when no __test__ rule is defined in the given
+# module.
local rule no-test-defined
{
import modules ;
@@ -26,15 +28,17 @@
}
}
-# return the binding of the given module
+
+# Return the binding of the given module.
rule binding ( module )
{
return $($(module).__binding__) ;
}
-# Sets the module-local value of a variable. This is the most
-# reliable way to set a module-local variable in a different module;
-# it eliminates issues of name shadowing due to dynamic scoping.
+
+# Sets the module-local value of a variable. This is the most reliable way to
+# set a module-local variable in a different module; it eliminates issues of
+# name shadowing due to dynamic scoping.
rule poke ( module-name ? : variables + : value * )
{
module $(<)
@@ -43,10 +47,10 @@
}
}
-# Returns the module-local value of a variable. This is the most
-# reliable way to examine a module-local variable in a different
-# module; it eliminates issues of name shadowing due to dynamic
-# scoping.
+
+# Returns the module-local value of a variable. This is the most reliable way to
+# examine a module-local variable in a different module; it eliminates issues of
+# name shadowing due to dynamic scoping.
rule peek ( module-name ? : variables + )
{
module $(<)
@@ -55,10 +59,11 @@
}
}
-# Call the given rule locally in the given module. Use this for rules
-# which accept rule names as arguments, so that the passed rule may be
-# invoked in the context of the rule's caller (for example, if the
-# rule accesses module globals or is a local rule).
+
+# Call the given rule locally in the given module. Use this for rules accepting
+# rule names as arguments, so that the passed rule may be invoked in the context
+# of the rule's caller (for example, if the rule accesses module globals or is a
+# local rule).
rule call-in ( module-name ? : rule-name args * : * )
{
module $(module-name)
@@ -67,65 +72,66 @@
}
}
-# Given a possibly qualified rule name and arguments, remove any
-# initial module qualification from the rule and invoke it in that
-# module. If there is no module qualification, the rule is invoked in
-# the global module.
+
+# Given a possibly qualified rule name and arguments, remove any initial module
+# qualification from the rule and invoke it in that module. If there is no
+# module qualification, the rule is invoked in the global module.
rule call-locally ( qualified-rule-name args * : * )
{
local module-rule = [ MATCH (.*)\\.(.*) : $(qualified-rule-name) ] ;
local rule-name = $(module-rule[2]) ;
rule-name ?= $(qualified-rule-name) ;
- return [
- call-in $(module-rule[1])
- : $(rule-name) $(args) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9)
- ] ;
+ return [ call-in $(module-rule[1]) : $(rule-name) $(args)
+ : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ] ;
}
-# load the indicated module if it is not already loaded.
+
+# Load the indicated module if it is not already loaded.
rule load (
- module-name # name of module to load. Rules will be defined in this module
- : filename ? # (partial) path to file; Defaults to $(module-name).jam
- : search * # Directories in which to search for filename. Defaults to $(BOOST_BUILD_PATH)
+ module-name # Name of module to load. Rules will be defined in this
+ # module.
+ : filename ? # (partial) path to file; Defaults to $(module-name).jam.
+ : search * # Directories in which to search for filename. Defaults to
+ # $(BOOST_BUILD_PATH).
)
{
# Avoid loading modules twice
if ! ( $(module-name) in $(.loaded) )
{
filename ?= $(module-name).jam ;
-
- # Mark the module loaded so we don't try to load it recursively
+
+ # Mark the module loaded so we don't try to load it recursively.
.loaded += $(module-name) ;
-
- # suppress tests if any module loads are already in progress.
+
+ # Suppress tests if any module loads are already in progress.
local suppress-test = $(.loading[1]) ;
-
- # Push this module on the loading stack
+
+ # Push this module on the loading stack.
.loading += $(module-name) ;
-
- # Remember that it's untested
- .untested += $(module-name) ;
-
- # Insert the new module's __name__ and __file__ globals
+
+ # Remember that it's untested.
+ .untested += $(module-name) ;
+
+ # Insert the new module's __name__ and __file__ globals.
poke $(module-name) : __name__ : $(module-name) ;
poke $(module-name) : __file__ : $(filename) ;
-
+
module $(module-name)
{
- # Prepare a default behavior, in case no __test__ is defined.
+ # Prepare default behavior, in case no __test__ is defined.
IMPORT modules : no-test-defined : $(__name__) : __test__ ;
- # Add some grist so that the module will have a unique target name
+ # Add some grist so that the module will have a unique target name.
local module-target = $(__file__:G=module@) ;
-
+
local search = $(3) ;
search ?= [ modules.peek : BOOST_BUILD_PATH ] ;
SEARCH on $(module-target) = $(search) ;
BINDRULE on $(module-target) = modules.record-binding ;
-
+
include $(module-target) ;
-
- # Allow the module to see its own names with full qualification
+
+ # Allow the module to see its own names with full qualification.
local rules = [ RULENAMES $(__name__) ] ;
IMPORT $(__name__) : $(rules) : $(__name__) : $(__name__).$(rules) ;
}
@@ -133,40 +139,41 @@
if $(module-name) != modules && ! [ binding $(module-name) ]
{
import errors ;
- errors.error "couldn't find module" $(module-name) in $(search) ;
+ errors.error "Couldn't find module" $(module-name) in $(search) ;
}
-
- # Pop the loading stack. Must happen before testing or we'll find a circular loading dependency
+
+ # Pop the loading stack. Must happen before testing or we'll run into a
+ # circular loading dependency.
.loading = $(.loading[1--2]) ;
-
- # Run any pending tests if this is an outer load
+
+ # Run any pending tests if this is an outer load.
if ! $(suppress-test)
{
local argv = [ peek : ARGV ] ;
for local m in $(.untested)
{
- if ( ! $(m) in $(.tested) ) # avoid recursive test invocations
- && ( ( --debug in $(argv) ) || ( --debug-module=$(m) in $(argv) ) )
+ if ( ! $(m) in $(.tested) ) # Avoid recursive test invocations.
+ && ( ( --debug in $(argv) ) || ( --debug-module=$(m) in $(argv) ) )
{
.tested += $(m) ;
if ! ( --quiet in $(argv) )
{
ECHO testing module $(m)... ;
}
-
- # Import m's rules into __test-$(m)__ for easy access
+
+ # Import m's rules into __test-$(m)__ for easy access.
IMPORT $(m) : [ RULENAMES $(m) ] : __test-$(m)__ : [ RULENAMES $(m) ] ;
-
- # execute the module's __test__ rule in its own module to
- # eliminate the inadvertent effects of testing
- # module dependencies (such as assert) on the module itself.
+
+ # Execute the module's __test__ rule in its own module to
+ # eliminate the inadvertent effects of testing module
+ # dependencies (such as assert) on the module itself.
IMPORT $(m) : __test__ : __test-$(m)__ : __test__ : LOCALIZE ;
-
+
module __test-$(m)__
{
- # set up the name of the module we're testing
- # so that no-test-defined can find it.
- __module__ = $(1) ;
+ # Set up the name of the module we're testing so that
+ # no-test-defined can find it.
+ __module__ = $(1) ;
__test__ ;
}
}
@@ -178,23 +185,24 @@
{
import errors ;
errors.error loading \"$(module-name)\"
- : circular module loading dependency:
- : $(.loading)" ->" $(module-name) ;
+ : circular module loading dependency:
+ : $(.loading)" ->" $(module-name) ;
}
}
-# This helper is used by load (above) to record the binding (path) of
-# each loaded module.
+
+# This helper is used by load (above) to record the binding (path) of each
+# loaded module.
rule record-binding ( module-target : binding )
{
$(.loading[-1]).__binding__ = $(binding) ;
}
-# Transform each path in the list, with all backslashes converted to
-# forward slashes and all detectable redundancy removed. Something
-# like this is probably needed in path.jam, but I'm not sure of that,
-# I don't understand it, and I'm not ready to move all of path.jam
-# into the kernel.
+
+# Transform each path in the list, with all backslashes converted to forward
+# slashes and all detectable redundancy removed. Something like this is probably
+# needed in path.jam, but I'm not sure of that, I don't understand it, and I'm
+# not ready to move all of path.jam into the kernel.
local rule normalize-raw-paths ( paths * )
{
local result ;
@@ -205,64 +213,62 @@
return $(result) ;
}
+
.cwd = [ PWD ] ;
-# load the indicated module and import rule names into the current
-# module. Any members of rules-opt will be available without
-# qualification in the caller's module. Any members of rename-opt will
-# be taken as the names of the rules in the caller's module, in place
-# of the names they have in the imported module. If rules-opt = '*',
-# all rules from the indicated module are imported into the caller's
-# module. If rename-opt is supplied, it must have the same number of
+# Load the indicated module and import rule names into the current module. Any
+# members of rules-opt will be available without qualification in the caller's
+# module. Any members of rename-opt will be taken as the names of the rules in
+# the caller's module, in place of the names they have in the imported module.
+# If rules-opt = '*', all rules from the indicated module are imported into the
+# caller's module. If rename-opt is supplied, it must have the same number of
# elements as rules-opt.
rule import ( module-names + : rules-opt * : rename-opt * )
{
- if $(rules-opt) = * || ! $(rules-opt)
+ if ( $(rules-opt) = * || ! $(rules-opt) ) && $(rename-opt)
{
- if $(rename-opt)
- {
- errors.error "rule aliasing is only available for explicit imports." ;
- }
+ errors.error "Rule aliasing is only available for explicit imports." ;
}
-
+
if $(module-names[2]) && ( $(rules-opt) || $(rename-opt) )
{
- errors.error when loading multiple modules, no specific rules or renaming is allowed ;
+ errors.error "When loading multiple modules, no specific rules or"
+ "renaming is allowed" ;
}
-
- local caller = [ CALLER_MODULE ] ;
-
+
+ local caller = [ CALLER_MODULE ] ;
+
# Import each specified module
for local m in $(module-names)
{
if ! $(m) in $(.loaded)
- {
- # if the importing module isn't already in the BOOST_BUILD_PATH,
- # prepend it to the path. We don't want to invert the search
- # order of modules that are already there.
-
- local caller-location ;
+ {
+ # If the importing module isn't already in the BOOST_BUILD_PATH,
+ # prepend it to the path. We don't want to invert the search order
+ # of modules that are already there.
+
+ local caller-location ;
if $(caller)
{
caller-location = [ binding $(caller) ] ;
caller-location = $(caller-location:D) ;
caller-location = [ normalize-raw-paths $(caller-location:R=$(.cwd)) ] ;
}
-
+
local search = [ peek : BOOST_BUILD_PATH ] ;
search = [ normalize-raw-paths $(search:R=$(.cwd)) ] ;
-
+
if $(caller-location) && ! $(caller-location) in $(search)
{
search = $(caller-location) $(search) ;
}
-
+
load $(m) : : $(search) ;
}
-
+
IMPORT_MODULE $(m) : $(caller) ;
-
+
if $(rules-opt)
{
local source-names ;
@@ -282,50 +288,49 @@
}
}
-# Define exported copies in $(target-module) of all rules exported
-# from $(source-module). Also make them available in the global
-# module with qualification, so that it is just as though the rules
-# were defined originally in $(target-module).
-rule clone-rules (
- source-module
- target-module
- )
+# Define exported copies in $(target-module) of all rules exported from
+# $(source-module). Also make them available in the global module with
+# qualification, so that it is just as though the rules were defined originally
+# in $(target-module).
+rule clone-rules ( source-module target-module )
{
local rules = [ RULENAMES $(source-module) ] ;
-
+
IMPORT $(source-module) : $(rules) : $(target-module) : $(rules) : LOCALIZE ;
EXPORT $(target-module) : $(rules) ;
IMPORT $(target-module) : $(rules) : : $(target-module).$(rules) ;
}
-# These rules need to be available in all modules to implement
-# module loading itself and other fundamental operations.
+
+# These rules need to be available in all modules to implement module loading
+# itself and other fundamental operations.
local globalize = peek poke record-binding ;
IMPORT modules : $(globalize) : : modules.$(globalize) ;
+
local rule __test__ ( )
{
import assert ;
import modules : normalize-raw-paths ;
-
+
module modules.__test__
{
foo = bar ;
}
-
+
assert.result bar : peek modules.__test__ : foo ;
+
poke modules.__test__ : foo : bar baz ;
assert.result bar baz : peek modules.__test__ : foo ;
+
assert.result c:/foo/bar : normalize-raw-paths c:/x/../foo/./xx/yy/../../bar ;
- assert.result . : normalize-raw-paths . ;
- assert.result .. : normalize-raw-paths .. ;
- assert.result ../.. : normalize-raw-paths ../.. ;
- assert.result .. : normalize-raw-paths ./.. ;
- assert.result / / : normalize-raw-paths / \\ ;
- assert.result a : normalize-raw-paths a ;
- assert.result a : normalize-raw-paths a/ ;
- assert.result /a : normalize-raw-paths /a/ ;
- assert.result / : normalize-raw-paths /a/.. ;
+ assert.result . : normalize-raw-paths . ;
+ assert.result .. : normalize-raw-paths .. ;
+ assert.result ../.. : normalize-raw-paths ../.. ;
+ assert.result .. : normalize-raw-paths ./.. ;
+ assert.result / / : normalize-raw-paths / \\ ;
+ assert.result a : normalize-raw-paths a ;
+ assert.result a : normalize-raw-paths a/ ;
+ assert.result /a : normalize-raw-paths /a/ ;
+ assert.result / : normalize-raw-paths /a/.. ;
}
-
-
Modified: branches/release/tools/build/v2/notes/build_dir_option.txt
==============================================================================
--- branches/release/tools/build/v2/notes/build_dir_option.txt (original)
+++ branches/release/tools/build/v2/notes/build_dir_option.txt 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -1,6 +1,6 @@
-Copyright 2005 Vladimir Prus
-Distributed under the Boost Software License, Version 1.0.
-(See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+Copyright 2005 Vladimir Prus
+Distributed under the Boost Software License, Version 1.0.
+(See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
Summary
@@ -41,15 +41,15 @@
project foo : build-dir /tmp/build/foo/bin.v2 ;
-We can't drop "bin.v2" because it's quite possible that the name of build
-dir have specific meaning. For example, it can be used to
-separate Boost.Build V2 and V1 build results.
+We can't drop "bin.v2" because it's quite possible that the name of build dir
+have specific meaning. For example, it can be used to separate Boost.Build V1
+and V2 build results.
-The --build-dir option has no effect if Jamroot does not define any project
-id. Dowing otherwise can lead to nasty problems if we're building two distinct
+The --build-dir option has no effect if Jamroot does not define any project id.
+Doing otherwise can lead to nasty problems if we're building two distinct
projects (that is with two different Jamroot). They'll get the same build
-directory. Most likely, user will see
-the "duplicate target" error, which is generally confusing.
+directory. Most likely, user will see the "duplicate target" error, which is
+generally confusing.
It is expected that any non-trivial project will have top-level "project"
invocation with non empty id, so the above limitation is not so drastic.
@@ -57,24 +57,21 @@
is specified.
Here's the exact behavior of the --build-dir option. If we're loading a
-Jamfile (either root or non-root), that declare some project id and some
+Jamfile (either root or non-root), that declare some project id and some
build-dir attribute, the following table gives the value of build-dir
that will actually be used.
-
-Root? Id Build-dir attribute Resulting build dir
-yes none * --build-dir is ignored, with warning
-yes 'foo' none /tmp/build/foo
-yes 'foo' 'bin.v2' /tmp/build/foo/bin.v2
-yes 'foo' '/tmp/bar' Error [1]
-no * none --build-dir has no effect, inherited build dir is used
-no * non-empty Error [2]
-
+-------------------------------------------------------------------------------
+Root? Id Build-dir attribute Resulting build dir
+-------------------------------------------------------------------------------
+yes none * --build-dir is ignored, with warning
+yes 'foo' none /tmp/build/foo
+yes 'foo' 'bin.v2' /tmp/build/foo/bin.v2
+yes 'foo' '/tmp/bar' Error [1]
+no * none --build-dir has no effect, inherited
+ build dir is used
+no * non-empty Error [2]
+-------------------------------------------------------------------------------
[1] -- not clear what to do
-[2] -- can be made to work, but non-empty build-dir
+[2] -- can be made to work, but non-empty build-dir
attribute in non-root Jamfile does not make much sense even without --build-dir
-
-
-
-
-
Modified: branches/release/tools/build/v2/options/help.jam
==============================================================================
--- branches/release/tools/build/v2/options/help.jam (original)
+++ branches/release/tools/build/v2/options/help.jam 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -25,15 +25,16 @@
# The help system options are parsed here and handed off to the doc
# module to translate into documentation requests and actions. The
-# understood options are::
+# understood options are:
#
-# --help-all
-# --help-enable-<option>
# --help-disable-<option>
-# --help-output <type>
-# --help-output-file <file>
+# --help-doc-options
+# --help-enable-<option>
+# --help-internal
# --help-options
# --help-usage
+# --help-output <type>
+# --help-output-file <file>
# --help [<module-or-class>]
#
rule process (
Modified: branches/release/tools/build/v2/roll.sh
==============================================================================
--- branches/release/tools/build/v2/roll.sh (original)
+++ branches/release/tools/build/v2/roll.sh 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -23,10 +23,7 @@
rm -rf example/versioned
# Remove unnecessary top-level files
-find . -maxdepth 1 -type f | egrep -v "timestamp.txt|roll.sh|bootstrap.jam|build-system.jam|boost_build_v2.html|boost_build.png|index.html|hacking.txt|site-config.jam|user-config.jam" | xargs rm -f
-
-# Prepare some more files.
-echo -e "boost-build kernel ;\n" > boost-build.jam
+find . -maxdepth 1 -type f | egrep -v "boost-build.jam|timestamp.txt|roll.sh|bootstrap.jam|build-system.jam|boost_build.png|index.html|hacking.txt|site-config.jam|user-config.jam" | xargs rm -f
# Build the documentation
touch doc/project-root.jam
@@ -52,10 +49,21 @@
rm roll.sh
chmod a+x jam_src/build.bat
cd .. && zip -r boost-build.zip boost-build && tar --bzip2 -cf boost-build.tar.bz2 boost-build
+# Copy packages to a location where they are grabbed for beta.boost.org
+cp boost-build.zip boost-build.tar.bz2 ~/public_html/boost_build_nightly
cd boost-build
chmod -R u+w *
# Upload docs to sourceforge
-perl -pi -e 's%<!-- sf logo -->%%' index.html doc/*.html
+x=`cat <<EOF
+<script src="http://www.google-analytics.com/urchin.js" type="text/javascript">
+</script>
+<script type="text/javascript">
+_uacct = "UA-2917240-2";
+urchinTracker();
+</script>
+EOF`
+echo $x
+perl -pi -e "s|</body>|$x</body>|" `find doc -name '*.html'`
scp -r doc example boost_build.png *.html hacking.txt vladimir_prus_at_[hidden]:/home/groups/b/bo/boost/htdocs/boost-build2
scp ../userman.pdf vladimir_prus_at_[hidden]:/home/groups/b/bo/boost/htdocs/boost-build2/doc
Modified: branches/release/tools/build/v2/test/BoostBuild.py
==============================================================================
--- branches/release/tools/build/v2/test/BoostBuild.py (original)
+++ branches/release/tools/build/v2/test/BoostBuild.py 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -6,29 +6,28 @@
# http://www.boost.org/LICENSE_1_0.txt)
import TestCmd
-from tree import build_tree, trees_difference
import copy
import fnmatch
import glob
+import math
import os
import re
import shutil
import string
-import types
-import time
-import tempfile
+import StringIO
import sys
+import tempfile
+import time
import traceback
-import math
-from StringIO import StringIO
+import tree
+import types
-annotation_func = None
annotations = []
+
def print_annotation(name, value):
- """Writes some named bit of information about test
- run.
+ """Writes some named bits of information about test run.
"""
print name + " {{{"
print value
@@ -41,18 +40,23 @@
print_annotation(ann[0], ann[1])
annotations = []
+
defer_annotations = 0
+
def set_defer_annotations(n):
global defer_annotations
defer_annotations = n
+
def annotation(name, value):
- """Records an annotation about test run."""
+ """Records an annotation about the test run.
+ """
annotations.append((name, value))
if not defer_annotations:
flush_annotations()
+
def get_toolset():
toolset = None;
for arg in sys.argv[1:]:
@@ -60,17 +64,22 @@
toolset = arg
return toolset or 'gcc'
-windows = 0
+
+# Detect the host OS.
+windows = False
if os.environ.get('OS','').lower().startswith('windows') or \
os.__dict__.has_key('uname') and \
os.uname()[0].lower().startswith('cygwin'):
- windows = 1
+ windows = True
+
suffixes = {}
+
# Prepare the map of suffixes
def prepare_suffix_map(toolset):
- global windows, suffixes
+ global windows
+ global suffixes
suffixes = {'.exe': '', '.dll': '.so', '.lib': '.a', '.obj': '.o'}
suffixes['.implib'] = '.no_implib_files_on_this_platform'
if windows:
@@ -82,7 +91,8 @@
if os.__dict__.has_key('uname') and os.uname()[0] == 'Darwin':
suffixes['.dll'] = '.dylib'
-def re_remove(sequence,regex):
+
+def re_remove(sequence, regex):
me = re.compile(regex)
result = filter( lambda x: me.match(x), sequence )
if 0 == len(result):
@@ -90,26 +100,29 @@
for r in result:
sequence.remove(r)
-def glob_remove(sequence,pattern):
+
+def glob_remove(sequence, pattern):
result = fnmatch.filter(sequence,pattern)
if 0 == len(result):
raise ValueError()
for r in result:
sequence.remove(r)
-lib_prefix = 1
-dll_prefix = 1
+
+# Configuration stating whether Boost Build is expected to automatically prepend
+# prefixes to built library targets.
+lib_prefix = True
+dll_prefix = True
if windows:
- #~ lib_prefix = 0
- dll_prefix = 0
-
-
+ dll_prefix = False
+
+
#
# FIXME: this is copy-pasted from TestSCons.py
# Should be moved to TestCmd.py?
#
if os.name == 'posix':
- def _failed(self, status = 0):
+ def _failed(self, status=0):
if self.status is None:
return None
if os.WIFSIGNALED(status):
@@ -121,7 +134,7 @@
else:
return -1
elif os.name == 'nt':
- def _failed(self, status = 0):
+ def _failed(self, status=0):
return not self.status is None and self.status != status
def _status(self):
return self.status
@@ -129,28 +142,28 @@
class Tester(TestCmd.TestCmd):
"""Class for testing Boost.Build.
- Optional argument `executable` indicates the name of the
- executable to invoke. Set this to "jam" to test Boost.Build v1
- behavior.
+ Optional argument `executable` indicates the name of the executable to
+ invoke. Set this to "jam" to test Boost.Build v1 behavior.
- Optional argument `work_dir` indicates an absolute directory,
- where the test will run be run.
+ Optional argument `work_dir` indicates an absolute directory, where the test
+ will run be run.
"""
- def __init__(self, arguments="", executable = 'bjam', match =
- TestCmd.match_exact, boost_build_path = None,
- translate_suffixes = 1, pass_toolset = 1,
- workdir = '',
- **keywords):
+ def __init__(self, arguments="", executable="bjam",
+ match=TestCmd.match_exact, boost_build_path=None,
+ translate_suffixes=True, pass_toolset=True, use_test_config=True,
+ ignore_toolset_requirements=True, workdir="", **keywords):
self.original_workdir = os.getcwd()
if workdir != '' and not os.path.isabs(workdir):
- raise "Parameter workdir <"+workdir+"> must point to a absolute directory: "
+ raise "Parameter workdir <"+workdir+"> must point to an absolute directory: "
self.last_build_time = 0
self.translate_suffixes = translate_suffixes
+ self.use_test_config = use_test_config
self.toolset = get_toolset()
self.pass_toolset = pass_toolset
+ self.ignore_toolset_requirements = ignore_toolset_requirements
prepare_suffix_map(pass_toolset and self.toolset or 'gcc')
@@ -183,21 +196,18 @@
elif os.uname()[0] == "OSF1":
jam_build_dir = "bin.osf"
else:
- raise "Don't know directory where jam is build for this system: " + os.name + "/" + os.uname()[0]
+ raise "Don't know directory where Jam is built for this system: " + os.name + "/" + os.uname()[0]
else:
- raise "Don't know directory where jam is build for this system: " + os.name
-
- # Find there jam_src is located.
- # try for the debug version if it's lying around
+ raise "Don't know directory where Jam is built for this system: " + os.name
+ # Find where jam_src is located. Try for the debug version if it's
+ # lying around.
dirs = [os.path.join('../../../jam/src', jam_build_dir + '.debug'),
os.path.join('../../../jam/src', jam_build_dir),
os.path.join('../../jam_src', jam_build_dir + '.debug'),
os.path.join('../../jam_src', jam_build_dir),
os.path.join('../jam_src', jam_build_dir + '.debug'),
- os.path.join('../jam_src', jam_build_dir),
- ]
-
+ os.path.join('../jam_src', jam_build_dir)]
for d in dirs:
if os.path.exists(d):
jam_build_dir = d
@@ -215,7 +225,7 @@
boost_build_path = self.original_workdir
program_list = []
-
+
if '--default-bjam' in sys.argv:
program_list.append(executable)
inpath_bjam = True
@@ -225,7 +235,6 @@
program_list.append('-sBOOST_BUILD_PATH=' + boost_build_path)
if verbosity:
program_list += verbosity
- program_list += ["--ignore-toolset-requirements"]
if arguments:
program_list += arguments.split(" ")
@@ -233,8 +242,8 @@
self
, program=program_list
, match=match
- , workdir = workdir
- , inpath = inpath_bjam
+ , workdir=workdir
+ , inpath=inpath_bjam
, **keywords)
os.chdir(self.workdir)
@@ -244,22 +253,22 @@
TestCmd.TestCmd.cleanup(self)
os.chdir(self.original_workdir)
except AttributeError:
- # Whe this is called during by TestCmd.TestCmd.__del__ we can have both
- # 'TestCmd' and 'os' unavailable in our scope. Do nothing in this case.
+ # When this is called during by TestCmd.TestCmd.__del__ we can have
+ # both 'TestCmd' and 'os' unavailable in our scope. Do nothing in
+ # this case.
pass
-
+
#
- # Methods that change working directory's content
+ # Methods that change the working directory's content.
#
def set_tree(self, tree_location):
- # Seem like it's not possible to remove a directory which is
- # current.
+ # Seems like it's not possible to remove the current a directory.
d = os.getcwd()
os.chdir(os.path.dirname(self.workdir))
- shutil.rmtree(self.workdir, ignore_errors=0)
+ shutil.rmtree(self.workdir, ignore_errors=False)
if not os.path.isabs(tree_location):
- tree_location = os.path.join(self.original_workdir, tree_location)
+ tree_location = os.path.join(self.original_workdir, tree_location)
shutil.copytree(tree_location, self.workdir)
os.chdir(d)
@@ -271,7 +280,6 @@
os.path.walk(".", make_writable, None)
-
def write(self, file, content):
self.wait_for_time_change()
nfile = self.native_file_name(file)
@@ -286,40 +294,40 @@
os.makedirs(os.path.dirname(new))
except:
pass
-
+
try:
os.remove(new)
except:
pass
-
+
os.rename(old, new)
self.touch(new);
def copy(self, src, dst):
self.wait_for_time_change()
try:
- self.write(dst, self.read(src))
+ self.write(dst, self.read(src, 1))
except:
self.fail_test(1)
def copy_preserving_timestamp(self, src, dst):
src_name = self.native_file_name(src)
dst_name = self.native_file_name(dst)
- stats = os.stat(src_name)
- self.write(dst, self.read(src))
+ stats = os.stat(src_name)
+ self.write(dst, self.read(src, 1))
os.utime(dst_name, (stats.st_atime, stats.st_mtime))
-
+
def touch(self, names):
self.wait_for_time_change()
for name in self.adjust_names(names):
- os.utime(self.native_file_name(name), None)
+ os.utime(self.native_file_name(name), None)
def rm(self, names):
self.wait_for_time_change()
if not type(names) == types.ListType:
names = [names]
- # Avoid attempts to remove current dir
+ # Avoid attempts to remove the current directory.
os.chdir(self.original_workdir)
for name in names:
n = self.native_file_name(name)
@@ -329,32 +337,32 @@
n = self.glob_file(string.replace(name, "$toolset", self.toolset+"*"))
if n:
if os.path.isdir(n):
- shutil.rmtree(n, ignore_errors=0)
+ shutil.rmtree(n, ignore_errors=False)
else:
os.unlink(n)
- # Create working dir root again, in case
- # we've removed it
+ # Create working dir root again, in case we've removed it.
if not os.path.exists(self.workdir):
os.mkdir(self.workdir)
os.chdir(self.workdir)
def expand_toolset(self, name):
- """Expands $toolset in the given file to tested toolset"""
+ """Expands $toolset in the given file to tested toolset.
+ """
content = self.read(name)
content = string.replace(content, "$toolset", self.toolset)
self.write(name, content)
-
+
def dump_stdio(self):
annotation("STDOUT", self.stdout())
annotation("STDERR", self.stderr())
-
+
#
# FIXME: Large portion copied from TestSCons.py, should be moved?
#
- def run_build_system(
- self, extra_args='', subdir='', stdout = None, stderr = '',
- status = 0, match = None, pass_toolset = None, **kw):
+ def run_build_system(self, extra_args="", subdir="", stdout=None, stderr="",
+ status=0, match=None, pass_toolset=None, use_test_config=None,
+ ignore_toolset_requirements=None, **kw):
if os.path.isabs(subdir):
if stderr:
@@ -362,21 +370,32 @@
status = 1
return
- self.previous_tree = build_tree(self.workdir)
+ self.previous_tree = tree.build_tree(self.workdir)
if match is None:
match = self.match
if pass_toolset is None:
- pass_toolset = self.pass_toolset
+ pass_toolset = self.pass_toolset
+
+ if use_test_config is None:
+ use_test_config = self.use_test_config
+
+ if ignore_toolset_requirements is None:
+ ignore_toolset_requirements = self.ignore_toolset_requirements
try:
kw['program'] = []
kw['program'] += self.program
if extra_args:
- kw['program'] += extra_args.split(" ")
+ kw['program'] += extra_args.split(" ")
if pass_toolset:
- kw['program'].append(self.toolset)
+ kw['program'].append("toolset=" + self.toolset)
+ if use_test_config:
+ kw['program'].append('--test-config="%s"'
+ % os.path.join(self.original_workdir, "test-config.jam"))
+ if ignore_toolset_requirements:
+ kw['program'].append("--ignore-toolset-requirements")
kw['chdir'] = subdir
apply(TestCmd.TestCmd.run, [self], kw)
except:
@@ -388,42 +407,41 @@
if status != 0:
expect = " (expected %d)" % status
- annotation("failed command", '"%s" returned %d%s' % (
- kw['program'], _status(self), expect))
+ annotation("failure", '"%s" returned %d%s'
+ % (kw['program'], _status(self), expect))
annotation("reason", "error returned by bjam")
self.fail_test(1)
if not stdout is None and not match(self.stdout(), stdout):
- annotation("reason", "Unexpected stdout")
+ annotation("failure", "Unexpected stdout")
annotation("Expected STDOUT", stdout)
annotation("Actual STDOUT", self.stdout())
stderr = self.stderr()
if stderr:
annotation("STDERR", stderr)
self.maybe_do_diff(self.stdout(), stdout)
- self.fail_test(1, dump_stdio = 0)
+ self.fail_test(1, dump_stdio=False)
- # Intel tends to produce some message to stderr, which makes tests
- # fail
+ # Intel tends to produce some message to stderr which makes tests fail.
intel_workaround = re.compile("^xi(link|lib): executing.*\n", re.M)
actual_stderr = re.sub(intel_workaround, "", self.stderr())
if not stderr is None and not match(actual_stderr, stderr):
- annotation("reason", "Unexpected stderr")
+ annotation("failure", "Unexpected stderr")
annotation("Expected STDERR", stderr)
annotation("Actual STDERR", self.stderr())
annotation("STDOUT", self.stdout())
self.maybe_do_diff(actual_stderr, stderr)
- self.fail_test(1, dump_stdio = 0)
+ self.fail_test(1, dump_stdio=False)
- self.tree = build_tree(self.workdir)
- self.difference = trees_difference(self.previous_tree, self.tree)
+ self.tree = tree.build_tree(self.workdir)
+ self.difference = tree.trees_difference(self.previous_tree, self.tree)
self.difference.ignore_directories()
self.unexpected_difference = copy.deepcopy(self.difference)
self.last_build_time = time.time()
-
+
def glob_file(self, name):
result = None
if hasattr(self,'difference'):
@@ -437,14 +455,19 @@
result = result[0]
return result
- def read(self, name):
+ def read(self, name, binary=False):
try:
if self.toolset:
name = string.replace(name, "$toolset", self.toolset+"*")
name = self.glob_file(name)
- return open(name, "rU").read()
+ openMode = "r"
+ if ( binary ):
+ openMode += "b"
+ else:
+ openMode += "U"
+ return open(name, openMode).read()
except:
- annotation("reason", "Could not open '%s'" % name)
+ annotation("failure", "Could not open '%s'" % name)
self.fail_test(1)
return ''
@@ -455,82 +478,82 @@
return result + '\n'
else:
return result
-
- def fail_test(self, condition, dump_stdio = 1, *args):
- # If test failed, print the difference
- if condition and hasattr(self, 'difference'):
- f = StringIO()
+
+ def fail_test(self, condition, dump_stdio=True, *args):
+ if not condition:
+ return
+
+ if hasattr(self, 'difference'):
+ f = StringIO.StringIO()
self.difference.pprint(f)
- annotation("changes causes by the last build command", f.getvalue())
-
- if condition and dump_stdio:
+ annotation("changes caused by the last build command", f.getvalue())
+
+ if dump_stdio:
self.dump_stdio()
- if condition and '--preserve' in sys.argv:
- print
+ if '--preserve' in sys.argv:
+ print
print "*** Copying the state of working dir into 'failed_test' ***"
- print
+ print
path = os.path.join(self.original_workdir, "failed_test")
if os.path.isdir(path):
- shutil.rmtree(path, ignore_errors=0)
+ shutil.rmtree(path, ignore_errors=False)
elif os.path.exists(path):
- raise "The path " + path + " already exists and is not directory";
+ raise "Path " + path + " already exists and is not a directory";
shutil.copytree(self.workdir, path)
- if condition:
- at = TestCmd.caller(traceback.extract_stack(), 0)
- annotation("stacktrace", at)
- sys.exit(1)
-
+ at = TestCmd.caller(traceback.extract_stack(), 0)
+ annotation("stacktrace", at)
+ sys.exit(1)
+
# A number of methods below check expectations with actual difference
- # between directory trees before and after build.
- # All the 'expect*' methods require exact names to be passed.
- # All the 'ignore*' methods allow wildcards.
+ # between directory trees before and after a build. All the 'expect*'
+ # methods require exact names to be passed. All the 'ignore*' methods allow
+ # wildcards.
- # All names can be lists, which are taken to be directory components
- def expect_addition(self, names):
+ # All names can be lists, which are taken to be directory components.
+ def expect_addition(self, names):
for name in self.adjust_names(names):
- try:
- glob_remove(self.unexpected_difference.added_files,name)
- except:
- print "File %s not added as expected" % (name,)
- self.fail_test(1)
+ try:
+ glob_remove(self.unexpected_difference.added_files,name)
+ except:
+ print "File %s not added as expected" % name
+ self.fail_test(1)
def ignore_addition(self, wildcard):
self.ignore_elements(self.unexpected_difference.added_files, wildcard)
def expect_removal(self, names):
for name in self.adjust_names(names):
- try:
- glob_remove(self.unexpected_difference.removed_files,name)
- except:
- print "File %s not removed as expected" % (name,)
- self.fail_test(1)
+ try:
+ glob_remove(self.unexpected_difference.removed_files,name)
+ except:
+ print "File %s not removed as expected" % name
+ self.fail_test(1)
def ignore_removal(self, wildcard):
self.ignore_elements(self.unexpected_difference.removed_files, wildcard)
def expect_modification(self, names):
for name in self.adjust_names(names):
- try:
- glob_remove(self.unexpected_difference.modified_files,name)
- except:
- print "File %s not modified as expected" % (name,)
- self.fail_test(1)
+ try:
+ glob_remove(self.unexpected_difference.modified_files,name)
+ except:
+ print "File %s not modified as expected" % name
+ self.fail_test(1)
def ignore_modification(self, wildcard):
self.ignore_elements(self.unexpected_difference.modified_files, wildcard)
def expect_touch(self, names):
-
d = self.unexpected_difference
for name in self.adjust_names(names):
-
- # We need to check in both touched and modified files.
- # The reason is that:
- # (1) for windows binaries often have slight
- # differences even with identical inputs
- # (2) Intel's compiler for Linux has the same behaviour
+ # We need to check both touched and modified files. The reason is
+ # that:
+ # (1) Windows binaries such as obj, exe or dll files have slight
+ # differences even with identical inputs due to Windows PE
+ # format headers containing an internal timestamp.
+ # (2) Intel's compiler for Linux has the same behaviour.
filesets = [d.modified_files, d.touched_files]
while filesets:
@@ -541,11 +564,9 @@
filesets.pop()
if not filesets:
- annotation("reason",
- "File %s not touched as expected" % (name,))
+ annotation("failure", "File %s not touched as expected" % name)
self.fail_test(1)
-
def ignore_touch(self, wildcard):
self.ignore_elements(self.unexpected_difference.touched_files, wildcard)
@@ -558,68 +579,72 @@
def expect_nothing(self, names):
for name in self.adjust_names(names):
if name in self.difference.added_files:
- annotation("reason",
- "File %s is added, but no action was expected" % (name,))
+ annotation("failure",
+ "File %s added, but no action was expected" % name)
self.fail_test(1)
if name in self.difference.removed_files:
- annotation("reason",
- "File %s is removed, but no action was expected" % (name,))
+ annotation("failure",
+ "File %s removed, but no action was expected" % name)
self.fail_test(1)
pass
if name in self.difference.modified_files:
- annotation("reason",
- "File %s is modified, but no action was expected" % (name,))
+ annotation("failure",
+ "File %s modified, but no action was expected" % name)
self.fail_test(1)
if name in self.difference.touched_files:
- annotation("reason",
- "File %s is touched, but no action was expected" % (name,))
+ annotation("failure",
+ "File %s touched, but no action was expected" % name)
self.fail_test(1)
def expect_nothing_more(self):
-
- # not totally sure about this change, but I don't see a good alternative
+ # Not totally sure about this change, but I don't see a good
+ # alternative.
if windows:
- self.ignore('*.ilk') # msvc incremental linking files
- self.ignore('*.pdb') # msvc program database files
- self.ignore('*.rsp') # response files
- self.ignore('*.tds') # borland debug symbols
- self.ignore('*.manifest') # msvc DLL manifests
+ self.ignore('*.ilk') # MSVC incremental linking files.
+ self.ignore('*.pdb') # MSVC program database files.
+ self.ignore('*.rsp') # Response files.
+ self.ignore('*.tds') # Borland debug symbols.
+ self.ignore('*.manifest') # MSVC DLL manifests.
- # debug builds of bjam built with gcc produce this profiling data
+ # Debug builds of bjam built with gcc produce this profiling data.
self.ignore('gmon.out')
self.ignore('*/gmon.out')
-
+
if not self.unexpected_difference.empty():
- print 'FAILED'
- print '------- The following changes were unexpected ------- '
- self.unexpected_difference.pprint()
- self.fail_test(1)
+ print 'FAILED'
+ print '------- The following changes were unexpected -------'
+ self.unexpected_difference.pprint()
+ self.fail_test(1)
- def _expect_line(self, content, expected):
+ def __expect_line(self, content, expected, expected_to_exist):
expected = expected.strip()
lines = content.splitlines()
- found = 0
+ found = False
for line in lines:
line = line.strip()
if fnmatch.fnmatch(line, expected):
- found = 1
+ found = True
break
- if not found:
- print "Did not found expected line in output:"
- print expected
- print "The output was:"
- print content
+ if expected_to_exist and not found:
+ annotation( "failure",
+ "Did not find expected line:\n%s\nin output:\n%s" %
+ (expected, content))
+ self.fail_test(1)
+ if not expected_to_exist and found:
+ annotation( "failure",
+ "Found an unexpected line:\n%s\nin output:\n%s" %
+ (expected, content))
self.fail_test(1)
- def expect_output_line(self, expected):
- self._expect_line(self.stdout(), expected)
+ def expect_output_line(self, line, expected_to_exist=True):
+ self.__expect_line(self.stdout(), line, expected_to_exist)
- def expect_content_line(self, name, expected):
- content = self._read_file(name)
- self._expect_line(content, expected)
+ def expect_content_line(self, name, line, expected_to_exist=True):
+ content = self.__read_file(name)
+ self.__expect_line(content, line, expected_to_exist)
- def _read_file(self, name, exact=0):
+ def __read_file(self, name, exact=False):
name = self.adjust_names(name)[0]
result = ""
try:
@@ -631,13 +656,12 @@
print "Note: could not open file", name
self.fail_test(1)
return result
-
- def expect_content(self, name, content, exact=0):
- actual = self._read_file(name, exact)
+ def expect_content(self, name, content, exact=False):
+ actual = self.__read_file(name, exact)
content = string.replace(content, "$toolset", self.toolset+"*")
- matched = 0
+ matched = False
if exact:
matched = fnmatch.fnmatch(actual,content)
else:
@@ -662,7 +686,6 @@
def maybe_do_diff(self, actual, expected):
if os.environ.has_key("DO_DIFF") and os.environ["DO_DIFF"] != '':
-
e = tempfile.mktemp("expected")
a = tempfile.mktemp("actual")
open(e, "w").write(expected)
@@ -671,40 +694,39 @@
if os.system("diff -u " + e + " " + a):
print "Unable to compute difference: diff -u %s %s" % (e,a)
os.unlink(e)
- os.unlink(a)
+ os.unlink(a)
else:
- print "Set environmental variable 'DO_DIFF' to examine difference."
+ print "Set environmental variable 'DO_DIFF' to examine difference."
- # Helpers
+ # Helpers.
def mul(self, *arguments):
if len(arguments) == 0:
- return None
- else:
- here = arguments[0]
- if type(here) == type(''):
- here = [here]
-
- if len(arguments) > 1:
- there = apply(self.mul, arguments[1:])
- result = []
- for i in here:
- for j in there:
- result.append(i + j)
- return result
- else:
- return here
+ return None
+ here = arguments[0]
+ if type(here) == type(''):
+ here = [here]
+
+ if len(arguments) > 1:
+ there = apply(self.mul, arguments[1:])
+ result = []
+ for i in here:
+ for j in there:
+ result.append(i + j)
+ return result
+ return here
- # Internal methods
+ # Internal methods.
def ignore_elements(self, list, wildcard):
- """Removes in-place, element of 'list' that match the given wildcard."""
+ """Removes in-place, element of 'list' that match the given wildcard.
+ """
list[:] = filter(lambda x, w=wildcard: not fnmatch.fnmatch(x, w), list)
def adjust_lib_name(self, name):
global lib_prefix
result = name
-
+
pos = string.rfind(name, ".")
if pos != -1:
suffix = name[pos:]
@@ -718,15 +740,15 @@
if dll_prefix:
tail = "lib" + tail
result = os.path.join(head, tail)
- # If we try to use this name in Jamfile, we better
- # convert \ to /, as otherwise we'd have to quote \.
+ # If we want to use this name in a Jamfile, we better convert \ to /, as
+ # otherwise we'd have to quote \.
result = string.replace(result, "\\", "/")
return result
-
+
def adjust_suffix(self, name):
if not self.translate_suffixes:
return name
-
+
pos = string.rfind(name, ".")
if pos != -1:
suffix = name[pos:]
@@ -739,11 +761,11 @@
return name + suffix
- # Acceps either string of list of string and returns list of strings
+ # Acceps either a string or a list of strings and returns a list of strings.
# Adjusts suffixes on all names.
def adjust_names(self, names):
if type(names) == types.StringType:
- names = [names]
+ names = [names]
r = map(self.adjust_lib_name, names)
r = map(self.adjust_suffix, r)
r = map(lambda x, t=self.toolset: string.replace(x, "$toolset", t+"*"), r)
@@ -759,29 +781,27 @@
def wait_for_time_change(self):
while 1:
f = time.time();
- # In fact, I'm not sure why "+ 2" as opposed to "+ 1" is
- # needed but empirically, "+ 1" sometimes causes 'touch'
- # and other functions not to bump file time enough for
- # rebuild to happen.
+ # In fact, I'm not sure why "+ 2" as opposed to "+ 1" is needed but
+ # empirically, "+ 1" sometimes causes 'touch' and other functions
+ # not to bump the file time enough for a rebuild to happen.
if math.floor(f) < math.floor(self.last_build_time) + 2:
time.sleep(0.1)
else:
break
-
-
+
class List:
def __init__(self, s=""):
elements = []
- if isinstance(s, type("")):
- # Have to handle espaced spaces correctly
+ if isinstance(s, type("")):
+ # Have to handle espaced spaces correctly.
s = string.replace(s, "\ ", '\001')
elements = string.split(s)
else:
elements = s;
-
- self.l = []
+
+ self.l = []
for e in elements:
self.l.append(string.replace(e, '\001', ' '))
@@ -805,7 +825,7 @@
+ repr(string.join(self.l, ' '))
+ ')')
- def __mul__(self, other):
+ def __mul__(self, other):
result = List()
if not isinstance(other, List):
other = List(other)
@@ -816,7 +836,7 @@
def __rmul__(self, other):
if not isinstance(other, List):
- other = List(other)
+ other = List(other)
return List.__mul__(other, self)
def __add__(self, other):
@@ -824,12 +844,8 @@
result.l = self.l[:] + other.l[:]
return result
-# quickie tests. Should use doctest instead.
+# Quickie tests. Should use doctest instead.
if __name__ == '__main__':
assert str(List("foo bar") * "/baz") == "['foo/baz', 'bar/baz']"
assert repr("foo/" * List("bar baz")) == "__main__.List('foo/bar foo/baz')"
print 'tests passed'
-
-
-
-
Modified: branches/release/tools/build/v2/test/abs_workdir.py
==============================================================================
--- branches/release/tools/build/v2/test/abs_workdir.py (original)
+++ branches/release/tools/build/v2/test/abs_workdir.py 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -2,30 +2,25 @@
# Testing whether we may run a test in a absolute directories
# There are no tests for temporary directories as this is implictly tested in a lot of other cases
-from BoostBuild import Tester
-import os, string
+import BoostBuild
+import os
+import string
-t = Tester(
- executable="jam"
- , workdir = os.getcwd()
- , pass_toolset=0
- )
+t = BoostBuild.Tester(arguments="pwd", executable="jam", workdir=os.getcwd(),
+ pass_toolset=0)
-jamfile="""
+t.write("Jamroot.jam", """
actions print_pwd { pwd ; }
print_pwd pwd ;
Always pwd ;
-"""
+""")
-t.write("Jamfile", jamfile)
-t.write("project-root.jam", " ")
-
-t.run_build_system(status=0, extra_args = "pwd")
+t.run_build_system(status=0)
if 'TMP' in os.environ:
- tmp_dir =os.environ.get('TMP')
+ tmp_dir = os.environ.get('TMP')
else:
- tmp_dir ="/tmp"
+ tmp_dir = "/tmp"
if string.rfind(t.stdout(), tmp_dir) != -1:
t.fail_test(1)
@@ -33,7 +28,7 @@
if string.rfind(t.stdout(), 'build/v2/test') == -1:
t.fail_test(1)
-t.run_build_system(status=1, extra_args = "pwd", subdir ="/must/fail/with/absolute/path",
- stderr=None)
+t.run_build_system(status=1, subdir="/must/fail/with/absolute/path",
+ stderr=None)
t.cleanup
Modified: branches/release/tools/build/v2/test/alias.py
==============================================================================
--- branches/release/tools/build/v2/test/alias.py (original)
+++ branches/release/tools/build/v2/test/alias.py 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -1,17 +1,25 @@
#!/usr/bin/python
-# Copyright 2003 Dave Abrahams
-# Copyright 2003 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+# Copyright 2003 Dave Abrahams
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
from BoostBuild import Tester, List
-t = Tester()
-# Test that top-level project can affect build dir
-t.write("project-root.jam", "")
-t.write("Jamfile", """
+################################################################################
+#
+# test_alias_rule()
+# -----------------
+#
+################################################################################
+
+def test_alias_rule(t):
+ """Basic alias rule test.
+ """
+
+ t.write("Jamroot.jam", """
exe a : a.cpp ;
exe b : b.cpp ;
exe c : c.cpp ;
@@ -21,60 +29,78 @@
alias src : s.cpp ;
exe hello : hello.cpp src ;
+""")
+ t.write("a.cpp", "int main() { return 0; }\n")
+ t.copy("a.cpp", "b.cpp")
+ t.copy("a.cpp", "c.cpp")
+ t.copy("a.cpp", "hello.cpp")
+ t.write("s.cpp", "")
+
+ # Check that targets to which "bin1" refers are updated, and only those.
+ t.run_build_system("bin1")
+ t.expect_addition(List("bin/$toolset/debug/") * "a.exe a.obj")
+ t.expect_nothing_more()
+
+ # Try again with "bin2"
+ t.run_build_system("bin2")
+ t.expect_addition(List("bin/$toolset/debug/") * "b.exe b.obj")
+ t.expect_nothing_more()
+
+ # Try building everything, making sure 'hello' target is created.
+ t.run_build_system()
+ t.expect_addition(List("bin/$toolset/debug/") * "hello.exe hello.obj")
+ t.expect_addition("bin/$toolset/debug/s.obj")
+ t.expect_addition(List("bin/$toolset/debug/") * "c.exe c.obj")
+ t.expect_nothing_more()
+
+
+################################################################################
+#
+# test_alias_source_usage_requirements()
+# --------------------------------------
+#
+################################################################################
+
+def test_alias_source_usage_requirements(t):
+ """Check whether usage requirements are propagated via "alias". In case they
+ are not, linking will fail as there will be no main() function defined
+ anywhere in the source.
+ """
+
+ t.write("Jamroot.jam", """
+lib l : l.cpp : : : <define>WANT_MAIN ;
+alias la : l ;
+exe main : main.cpp la ;
""")
-t.write("a.cpp", "int main() { return 0; }\n")
-t.copy("a.cpp", "b.cpp")
-t.copy("a.cpp", "c.cpp")
-t.copy("a.cpp", "hello.cpp")
-t.write("s.cpp", "")
-
-# Check that targets to which "bin1" refers are updated,
-# and only those.
-t.run_build_system("bin1")
-t.ignore("*.tds")
-t.expect_addition(List("bin/$toolset/debug/") * "a.exe a.obj")
-t.expect_nothing_more()
-
-# Try again with "bin2"
-t.run_build_system("bin2")
-t.ignore("*.tds")
-t.expect_addition(List("bin/$toolset/debug/") * "b.exe b.obj")
-t.expect_nothing_more()
-
-# Try building everything, making sure 'hello' target is
-# created
-t.run_build_system()
-t.ignore("*.tds")
-t.expect_addition("bin/$toolset/debug/hello.exe")
-
-# Regression test.
-# Check if usage requirements are propagated via "alias"
-
-t.write("l.cpp", """
+
+ t.write("l.cpp", """
void
#if defined(_WIN32)
__declspec(dllexport)
#endif
foo() {}
-
""")
-t.write("Jamfile", """
-lib l : l.cpp : : : <define>WANT_MAIN ;
-alias la : l ;
-exe main : main.cpp la ;
-""")
-
-t.write("main.cpp", """
+ t.write("main.cpp", """
#ifdef WANT_MAIN
int main() { return 0; }
#endif
-
""")
-t.write("project-root.jam", "")
+ t.run_build_system()
+
+
+################################################################################
+#
+# main()
+# ------
+#
+################################################################################
+
+t = Tester()
-t.run_build_system()
+test_alias_rule(t)
+test_alias_source_usage_requirements(t)
t.cleanup()
Modified: branches/release/tools/build/v2/test/conditionals.py
==============================================================================
--- branches/release/tools/build/v2/test/conditionals.py (original)
+++ branches/release/tools/build/v2/test/conditionals.py 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -1,55 +1,47 @@
#!/usr/bin/python
-# Copyright 2003 Dave Abrahams
-# Copyright 2002, 2003, 2004 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Test conditional properties
-
-from BoostBuild import Tester, List
-import os
-from string import strip
-
-t = Tester()
-
-# Arrange a project which will build only if
-# 'a.cpp' is compiled with "STATIC" define.
-t.write("project-root.jam", "import gcc ;")
+# Copyright 2003 Dave Abrahams
+# Copyright 2002, 2003, 2004 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Test conditional properties.
+
+import BoostBuild
+
+t = BoostBuild.Tester()
+
+# Arrange a project which will build only if 'a.cpp' is compiled with "STATIC"
+# define.
t.write("a.cpp", """
#ifdef STATIC
-int main() { return 0; }
+int main() { return 0; }
#endif
""")
-t.write("Jamfile", "exe a : a.cpp : <link>static:<define>STATIC ;")
+
+# Test conditionals in target requirements.
+t.write("Jamroot.jam", "exe a : a.cpp : <link>static:<define>STATIC ;")
t.run_build_system("link=static")
t.expect_addition("bin/$toolset/debug/link-static/a.exe")
+t.rm("bin")
-t.write("Jamfile", """
+# Test conditionals in project requirements.
+t.write("Jamroot.jam", """
project : requirements <link>static:<define>STATIC ;
exe a : a.cpp ;
""")
-t.rm("bin")
t.run_build_system("link=static")
t.expect_addition("bin/$toolset/debug/link-static/a.exe")
+t.rm("bin")
-# Regression test for a bug found by Ali Azarbayejani.
-# Conditionals inside usage requirement were not evaluated.
-# This breaks
-
-t.write("Jamfile", """
+# Regression test for a bug found by Ali Azarbayejani. Conditionals inside usage
+# requirement were not being evaluated.
+t.write("Jamroot.jam", """
lib l : l.cpp : : : <link>static:<define>STATIC ;
exe a : a.cpp l ;
""")
-t.write("l.cpp", "")
-t.write("l.cpp", """
-int i;
-""")
-
-t.rm("bin")
+t.write("l.cpp", "int i;")
t.run_build_system("link=static")
t.expect_addition("bin/$toolset/debug/link-static/a.exe")
-
-
t.cleanup()
Modified: branches/release/tools/build/v2/test/conditionals2.py
==============================================================================
--- branches/release/tools/build/v2/test/conditionals2.py (original)
+++ branches/release/tools/build/v2/test/conditionals2.py 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -1,35 +1,33 @@
#!/usr/bin/python
-# Copyright 2003 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# Regression test: it was possible that due to evaluation of conditional
# requirements, two different values of non-free features were present in
# property set.
-from BoostBuild import Tester, List
+import BoostBuild
-t = Tester()
-
-t.write("project-root.jam", "")
+t = BoostBuild.Tester()
t.write("a.cpp", "")
-t.write("Jamfile", """
-import feature : feature ;
-import common : file-creation-command ;
+t.write("Jamroot.jam", """
+import feature ;
+import common ;
-feature the_feature : false true : propagated ;
+feature.feature the_feature : false true : propagated ;
rule maker ( targets * : sources * : properties * )
{
- if <the_feature>false in $(properties)
- && <the_feature>true in $(properties)
+ if <the_feature>false in $(properties) &&
+ <the_feature>true in $(properties)
{
EXIT "Oops, two different values of non-free feature" ;
- }
- CMD on $(targets) = [ file-creation-command ] ;
+ }
+ CMD on $(targets) = [ common.file-creation-command ] ;
}
actions maker
@@ -37,12 +35,9 @@
$(CMD) $(<) ;
}
-make a : a.cpp : maker : <variant>debug:<the_feature>true ;
+make a : a.cpp : maker : <variant>debug:<the_feature>true ;
""")
t.run_build_system()
t.cleanup()
-
-
-
Modified: branches/release/tools/build/v2/test/conditionals3.py
==============================================================================
--- branches/release/tools/build/v2/test/conditionals3.py (original)
+++ branches/release/tools/build/v2/test/conditionals3.py 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -1,19 +1,17 @@
#!/usr/bin/python
-# Copyright 2003 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-# Test that conditional properties work, even if property is free, and
-# value includes colon.
-from BoostBuild import Tester, List
+# Test that conditional properties work, even if property is free, and value
+# includes a colon.
+import BoostBuild
-t = Tester()
+t = BoostBuild.Tester()
-# Create the needed files
-t.write("project-root.jam", "")
-t.write("Jamfile", """
+t.write("Jamroot.jam", """
exe hello : hello.cpp : <variant>debug:<define>CLASS=Foo::Bar ;
""")
t.write("hello.cpp", """
@@ -21,12 +19,11 @@
int main()
{
CLASS c;
+ c; // Disables the unused variable warning.
return 0;
}
-
""")
-# Don't check stderr, which can include warning about unused 'c'.
t.run_build_system(stdout=None, stderr=None)
t.expect_addition("bin/$toolset/debug/hello.exe")
Modified: branches/release/tools/build/v2/test/double_loading.py
==============================================================================
--- branches/release/tools/build/v2/test/double_loading.py (original)
+++ branches/release/tools/build/v2/test/double_loading.py 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -10,37 +10,22 @@
t = Tester()
# Regression test for double loading of the same Jamfile.
-t.write("Jamfile", """
-build-project subdir ;
-""")
-
-t.write("project-root.jam", """
-""")
-
-t.write("subdir/Jamfile", """
-ECHO "Loaded subdir" ;
-""")
+t.write("Jamfile.jam", "build-project subdir ;")
+t.write("Jamroot.jam", "" )
+t.write("subdir/Jamfile.jam", 'ECHO "Loaded subdir" ;')
t.run_build_system(subdir="subdir")
-t.fail_test(string.count(t.stdout(), "Loaded subdir") != 1)
+t.expect_output_line("Loaded subdir")
-# Regression test for a more contrived case. The top-level
-# jamfile refers to subdir via use-project, while subdir's
-# Jamfile is being loaded. The motivation why use-project
-# referring to subprojects are usefull can be found at
-# http://article.gmane.org/gmane.comp.lib.boost.build/3906/
-t.write("Jamfile", """
-use-project /subdir : subdir ;
-""")
-
-t.write("project-root.jam", """
-""")
-
-t.write("subdir/Jamfile", """
-project subdir ;
-""")
+
+# Regression test for a more contrived case. The top-level Jamfile refers to
+# subdir via use-project, while subdir's Jamfile is being loaded. The motivation
+# why use-project referring to subprojects is useful can be found at
+# http://article.gmane.org/gmane.comp.lib.boost.build/3906/
+t.write("Jamfile.jam", "use-project /subdir : subdir ;")
+t.write("Jamroot.jam", "" )
+t.write("subdir/Jamfile.jam", "project subdir ;")
t.run_build_system(subdir="subdir");
t.cleanup()
-
Modified: branches/release/tools/build/v2/test/empty.jam
==============================================================================
--- branches/release/tools/build/v2/test/empty.jam (original)
+++ branches/release/tools/build/v2/test/empty.jam 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -1,5 +1,5 @@
-# This file is empty; it just suppresses warnings
-
# Copyright 2001 Dave Abrahams
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# This file is empty; it just suppresses warnings
Modified: branches/release/tools/build/v2/test/inherit_toolset.py
==============================================================================
--- branches/release/tools/build/v2/test/inherit_toolset.py (original)
+++ branches/release/tools/build/v2/test/inherit_toolset.py 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -13,10 +13,10 @@
""")
t.write("yfc1.jam", """
-import toolset ;
-import generators ;
+import feature : extend ;
+import generators : register-standard ;
-toolset.register yfc1 ;
+feature.extend toolset : yfc1 ;
rule init ( )
{
@@ -34,14 +34,13 @@
{
yfc1-link
}
-
-
""")
t.write("yfc2.jam", """
-import toolset ;
+import feature : extend ;
+import toolset : inherit ;
-toolset.register yfc2 ;
+feature.extend toolset : yfc2 ;
toolset.inherit yfc2 : yfc1 ;
rule init ( )
Modified: branches/release/tools/build/v2/test/library_property.py
==============================================================================
--- branches/release/tools/build/v2/test/library_property.py (original)
+++ branches/release/tools/build/v2/test/library_property.py 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -11,7 +11,7 @@
# exe a : a.cpp helper ;
# obj helper : helper.cpp : <optimization>off ;
#
-# caused 'foo' to be built with with and without optimization.
+# caused 'foo' to be built with and without optimization.
from BoostBuild import Tester, List
# Create a temporary working directory
Modified: branches/release/tools/build/v2/test/module-actions/bootstrap.jam
==============================================================================
--- branches/release/tools/build/v2/test/module-actions/bootstrap.jam (original)
+++ branches/release/tools/build/v2/test/module-actions/bootstrap.jam 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -1,35 +1,21 @@
-# Copyright 2003 Dave Abrahams
-# Copyright 2006 Rene Rivera
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Demonstration that module variables have the right effect in actions
-
-# Set a variable which says how to dump a file to stdout
-if $(NT)
-{
- CATENATE = type ;
-}
-else
-{
- CATENATE = cat ;
-}
+# Copyright 2003 Dave Abrahams
+# Copyright 2006 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Demonstration that module variables have the right effect in actions.
-# invoke the given action rule `act' to build target from sources
-rule do-make ( target : sources * : act )
-{
- DEPENDS $(target) : $(sources) ;
- $(act) $(target) : $(sources) ;
-}
-# top-level version of do-make which causes target to be built by
-# default
+# Top-level rule that causes a target to be built by invoking the specified
+# action.
rule make ( target : sources * : act )
{
DEPENDS all : $(target) ;
- do-make $(target) : $(sources) : $(act) ;
+ DEPENDS $(target) : $(sources) ;
+ $(act) $(target) : $(sources) ;
}
+
X1 = X1-global ;
X2 = X2-global ;
X3 = X3-global ;
@@ -37,15 +23,15 @@
module A
{
X1 = X1-A ;
-
+
rule act ( target )
{
NOTFILE $(target) ;
ALWAYS $(target) ;
}
-
+
actions act { echo A.act $(<): $(X1) $(X2) $(X3) }
-
+
make t1 : : A.act ;
make t2 : : A.act ;
make t3 : : A.act ;
@@ -54,9 +40,9 @@
module B
{
X2 = X2-B ;
-
+
actions act { echo B.act $(<): $(X1) $(X2) $(X3) }
-
+
make t1 : : B.act ;
make t2 : : B.act ;
make t3 : : B.act ;
Modified: branches/release/tools/build/v2/test/module_actions.py
==============================================================================
--- branches/release/tools/build/v2/test/module_actions.py (original)
+++ branches/release/tools/build/v2/test/module_actions.py 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -6,39 +6,48 @@
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-from BoostBuild import Tester, List
+import BoostBuild
import os
import re
spaces_re = re.compile("\ \ +")
trailing_spaces_re = re.compile("\ +\n")
-t = Tester(pass_toolset=0)
+t = BoostBuild.Tester("-d+1", pass_toolset=0)
t.set_tree('module-actions')
-expected = r'''A.act t1: X1-t1
+# Note that the following string contains some trailing spaces that should not
+# be removed.
+expected_output = """...found 4 targets...
+...updating 3 targets...
+A.act t1
+A.act t1: X1-t1
+B.act t1
B.act t1: X1-t1 X2-B
+act t1
act t1: X1-t1 X2-global X3-global
+A.act t2
A.act t2: X1-A X2-t2
+B.act t2
B.act t2: X2-t2
+act t2
act t2: X1-global X2-t2 X3-global
+A.act t3
A.act t3: X1-A X3-t3
+B.act t3
B.act t3: X2-B X3-t3
+act t3
act t3: X1-global X2-global X3-t3
-'''
+...updated 3 targets...
+"""
-# On Unixes, call to 'echo 1 2 3' produces '1 2 3' (note spacing)
+# On Unixes, call to 'echo 1 2 3' produces '1 2 3' (note the spacing)
# Accomodate for that fact.
if os.name != 'nt':
- expected = re.sub(spaces_re, " ", expected)
- expected = re.sub(trailing_spaces_re, "\n", expected)
-
-# We expect t5 and t7's output to be dumped to stdout
-t.run_build_system(
- stdout = expected
-)
+ expected_output = re.sub(spaces_re, " ", expected_output)
+ expected_output = re.sub(trailing_spaces_re, "\n", expected_output)
+t.run_build_system(stdout=expected_output)
t.expect_nothing_more()
t.cleanup()
-
Modified: branches/release/tools/build/v2/test/project_test3.py
==============================================================================
--- branches/release/tools/build/v2/test/project_test3.py (original)
+++ branches/release/tools/build/v2/test/project_test3.py 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -16,7 +16,7 @@
os.remove("project-root.jam")
t.run_build_system(status=1, stdout=
"""error: Could not find parent for project at '.'
-error: Did not find Jamfile or project-root.jam in any parent directory.
+error: Did not find Jamfile.jam or Jamroot.jam in any parent directory.
""")
Modified: branches/release/tools/build/v2/test/readme.txt
==============================================================================
--- branches/release/tools/build/v2/test/readme.txt (original)
+++ branches/release/tools/build/v2/test/readme.txt 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -1,7 +1,6 @@
-Comprehensive tests for Boost.Build v2; requires Python. To test, execute:
-
- python test_all.py
-
# Copyright 2002 Dave Abrahams
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+ See test_system.html for detailed information on using the Boost Build test
+system.
Modified: branches/release/tools/build/v2/test/searched_lib.py
==============================================================================
--- branches/release/tools/build/v2/test/searched_lib.py (original)
+++ branches/release/tools/build/v2/test/searched_lib.py 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -1,19 +1,19 @@
#!/usr/bin/python
-# Copyright 2003 Dave Abrahams
-# Copyright 2003, 2004, 2005, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+# Copyright 2003 Dave Abrahams
+# Copyright 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# Test usage of searched-libs: one which are found via -l
-# switch to the linker/compiler.
+# switch to the linker/compiler.
from BoostBuild import Tester, get_toolset
import string
import os
t = Tester()
-# To start with, we have to prepate a library to link with
+# To start with, we have to prepare a library to link with.
t.write("lib/project-root.jam", "")
t.write("lib/Jamfile", "lib test_lib : test_lib.cpp ;")
t.write("lib/test_lib.cpp", """
@@ -28,7 +28,6 @@
# Auto adjusting of suffixes does not work, since we need to
# change dll to lib.
-#
if (os.name == 'nt' or os.uname()[0].lower().startswith('cygwin')) and get_toolset() != 'gcc':
t.copy("lib/bin/$toolset/debug/test_lib.implib", "lib/test_lib.implib")
t.copy("lib/bin/$toolset/debug/test_lib.dll", "lib/test_lib.dll")
@@ -104,7 +103,7 @@
# A regression test: <library>property referring to
# searched-lib was mishandled. As the result, we were
# putting target name to the command line!
-# Note that
+# Note that
# g++ ...... <.>z
# works nicely in some cases, sending output from compiler
# to file 'z'.
Modified: branches/release/tools/build/v2/test/tag.py
==============================================================================
--- branches/release/tools/build/v2/test/tag.py (original)
+++ branches/release/tools/build/v2/test/tag.py 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -5,43 +5,70 @@
# all copies. This software is provided "as is" without express or implied
# warranty, and with no claim as to its suitability for any purpose.
-from BoostBuild import Tester, List
-import string
+import BoostBuild
-t = Tester()
-t.write("project-root.jam", "")
-t.write("Jamfile", """
+################################################################################
+#
+# test_folder_with_dot_in_name()
+# ------------------------------
+#
+################################################################################
+
+def test_folder_with_dot_in_name(t):
+ """ Regression test: the 'tag' feature did not work in directories that had
+ a dot in their name.
+ """
+
+ t.write("version-1.32.0/Jamroot.jam", """
+project test : requirements <tag>@$(__name__).tag ;
+
+rule tag ( name : type ? : property-set )
+{
+ # Do nothing, just make sure the rule is invoked OK.
+ ECHO "The tag rule was invoked" ;
+}
+exe a : a.cpp ;
+""")
+ t.write("version-1.32.0/a.cpp", "int main() { return 0; }\n")
+
+ t.run_build_system(subdir="version-1.32.0")
+ t.expect_addition("version-1.32.0/bin/$toolset/debug/a.exe")
+ t.expect_output_line("The tag rule was invoked")
+
+
+################################################################################
+#
+# test_tag_property()
+# -------------------
+#
+################################################################################
+
+def test_tag_property(t):
+ """Basic tag property test.
+ """
+
+ t.write("Jamroot.jam", """
import virtual-target ;
+
rule tag ( name : type ? : property-set )
{
local tags ;
- local v = [ $(property-set).get <variant> ] ;
- if $(v) = debug
- {
- tags += d ;
- }
- else if $(v) = release
- {
- tags += r ;
- }
-
- local l = [ $(property-set).get <link> ] ;
- if $(l) = shared
+ switch [ $(property-set).get <variant> ]
{
- tags += s ;
+ case debug : tags += d ;
+ case release : tags += r ;
}
- else if $(l) = static
+ switch [ $(property-set).get <link> ]
{
- tags += t ;
+ case shared : tags += s ;
+ case static : tags += t ;
}
-
if $(tags)
{
- return [ virtual-target.add-prefix-and-suffix $(name)_$(tags:J="")
+ return [ virtual-target.add-prefix-and-suffix $(name)_$(tags:J="")
: $(type) : $(property-set) ] ;
}
-
}
# Test both fully-qualified and local name of the rule
@@ -50,57 +77,50 @@
stage c : a ;
""")
-t.write("a.cpp", """
+ t.write("a.cpp", """
int main()
{
return 0;
}
#ifdef _MSC_VER
-__declspec (dllexport) void x () {}
+__declspec (dllexport) void x () {}
#endif
""")
-file_list = \
-List("bin/$toolset/debug/a_ds.exe") + \
-List("bin/$toolset/debug/b_ds.dll") + \
-List("c/a_ds.exe") + \
-List("bin/$toolset/release/a_rs.exe") + \
-List("bin/$toolset/release/b_rs.dll") + \
-List("c/a_rs.exe") + \
-List("bin/$toolset/debug/link-static/a_dt.exe") + \
-List("bin/$toolset/debug/link-static/b_dt.lib") + \
-List("c/a_dt.exe") + \
-List("bin/$toolset/release/link-static/a_rt.exe") + \
-List("bin/$toolset/release/link-static/b_rt.lib") + \
-List("c/a_rt.exe")
-
-variants = "debug release link=static,shared"
-
-t.run_build_system(variants)
-t.expect_addition(file_list)
-
-t.run_build_system(variants + " clean")
-t.expect_removal(file_list)
-
-# Regression test: the 'tag' feature did not work in directories that
-# had dot in names.
-t.write("version-1.32.0/Jamroot", """
-project test : requirements <tag>@$(__name__).tag ;
+ file_list = \
+ BoostBuild.List("bin/$toolset/debug/a_ds.exe") + \
+ BoostBuild.List("bin/$toolset/debug/b_ds.dll") + \
+ BoostBuild.List("c/a_ds.exe") + \
+ BoostBuild.List("bin/$toolset/release/a_rs.exe") + \
+ BoostBuild.List("bin/$toolset/release/b_rs.dll") + \
+ BoostBuild.List("c/a_rs.exe") + \
+ BoostBuild.List("bin/$toolset/debug/link-static/a_dt.exe") + \
+ BoostBuild.List("bin/$toolset/debug/link-static/b_dt.lib") + \
+ BoostBuild.List("c/a_dt.exe") + \
+ BoostBuild.List("bin/$toolset/release/link-static/a_rt.exe") + \
+ BoostBuild.List("bin/$toolset/release/link-static/b_rt.lib") + \
+ BoostBuild.List("c/a_rt.exe")
+
+ variants = "debug release link=static,shared"
+
+ t.run_build_system(variants)
+ t.expect_addition(file_list)
+
+ t.run_build_system(variants + " clean")
+ t.expect_removal(file_list)
+
+
+################################################################################
+#
+# main()
+# ------
+#
+################################################################################
-rule tag ( name : type ? : property-set )
-{
- # Do nothing, just make sure the rule is invoked OK.
- ECHO "The tag rule was invoked" ;
-}
-exe a : a.cpp ;
-""")
-
-t.write("version-1.32.0/a.cpp", "int main() { return 0; }\n")
+t = BoostBuild.Tester()
-t.run_build_system(subdir="version-1.32.0")
-t.expect_addition("version-1.32.0/bin/$toolset/debug/a.exe")
-t.fail_test(string.find(t.stdout(), "The tag rule was invoked") == -1)
+test_tag_property(t)
+test_folder_with_dot_in_name(t)
t.cleanup()
-
Modified: branches/release/tools/build/v2/test/test_all.py
==============================================================================
--- branches/release/tools/build/v2/test/test_all.py (original)
+++ branches/release/tools/build/v2/test/test_all.py 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -104,8 +104,14 @@
"no_type",
"chain",
"default_build",
+ "default_toolset",
"use_requirements",
"conditionals",
+ "conditionals2",
+ "conditionals3",
+ "conditionals_multiple",
+ "configuration",
+ "indirect_conditional",
"stage",
"prebuilt",
"project_dependencies",
@@ -124,10 +130,8 @@
"bad_dirname",
"c_file",
"inline",
- "conditionals2",
"property_expansion",
"loop",
- "conditionals3",
"tag",
"suffix",
"inherit_toolset",
@@ -154,7 +158,6 @@
"project_root_rule",
"resolution",
"build_file",
- "indirect_conditional",
"build_no",
"disambiguation",
"clean",
Modified: branches/release/tools/build/v2/test/test_system.html
==============================================================================
--- branches/release/tools/build/v2/test/test_system.html (original)
+++ branches/release/tools/build/v2/test/test_system.html 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -91,7 +91,7 @@
and scripts for automatically testing user-obversable behaviour. It uses
components from testing systems of <a href=
"http://www.scons.org">Scons</a> and <a href=
- "http://subversion.tigris.org">Subverion</a>, together with some
+ "http://subversion.tigris.org">Subversion</a>, together with some
additional functionality.</p>
<p>To run the tests you'd need:</p>
Modified: branches/release/tools/build/v2/test/timedata.py
==============================================================================
--- branches/release/tools/build/v2/test/timedata.py (original)
+++ branches/release/tools/build/v2/test/timedata.py 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -6,25 +6,26 @@
# This tests the build step timing facilities.
import BoostBuild
+import re
t = BoostBuild.Tester(pass_toolset=0)
-t.write('file.jam', '''
+t.write("file.jam", """
rule time
{
DEPENDS $(<) : $(>) ;
__TIMING_RULE__ on $(>) = record_time $(<) ;
DEPENDS all : $(<) ;
}
+
actions time
{
echo $(>) user: $(__USER_TIME__) system: $(__SYSTEM_TIME__)
echo timed from $(>) >> $(<)
}
-rule record_time ( target source : user : system )
+rule record_time ( target : source : start end user system )
{
- ECHO record_time called: $(target) / $(source) / $(user) / $(system) ;
__USER_TIME__ on $(target) = $(user) ;
__SYSTEM_TIME__ on $(target) = $(system) ;
}
@@ -33,6 +34,7 @@
{
DEPENDS $(<) : $(>) ;
}
+
actions make
{
echo made from $(>) >> $(<)
@@ -41,17 +43,22 @@
time foo : bar ;
make bar : baz ;
-''')
+""")
-import re
-t.write('baz', 'nothing\n')
-t.run_build_system(
- '-ffile.jam',
- stdout=r'bar +user: [0-9\.]+ +system: +[0-9\.]+ *$',
- match = lambda actual,expected: re.search(expected,actual,re.DOTALL)
- )
-t.expect_addition('foo')
-t.expect_addition('bar')
+t.write("baz", "nothing\n")
+
+expected_output = """\.\.\.found 4 targets\.\.\.
+\.\.\.updating 2 targets\.\.\.
+make bar
+time foo
+bar +user: [0-9\.]+ +system: +[0-9\.]+ *
+\.\.\.updated 2 targets\.\.\.$
+"""
+
+t.run_build_system("-ffile.jam -d+1", stdout=expected_output,
+ match=lambda actual, expected: re.search(expected, actual, re.DOTALL))
+t.expect_addition("foo")
+t.expect_addition("bar")
t.expect_nothing_more()
t.cleanup()
Modified: branches/release/tools/build/v2/tools/builtin.jam
==============================================================================
--- branches/release/tools/build/v2/tools/builtin.jam (original)
+++ branches/release/tools/build/v2/tools/builtin.jam 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -1,45 +1,47 @@
-# Copyright 2002, 2003, 2004, 2005 Dave Abrahams
-# Copyright 2002, 2005, 2006, 2007 Rene Rivera
-# Copyright 2006 Juergen Hunold
-# Copyright 2005 Toon Knapen
-# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+# Copyright 2002, 2003, 2004, 2005 Dave Abrahams
+# Copyright 2002, 2005, 2006, 2007 Rene Rivera
+# Copyright 2006 Juergen Hunold
+# Copyright 2005 Toon Knapen
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# Defines standard features and rules.
+import alias ;
import "class" : new ;
-
import feature : feature compose ;
import toolset : flags ;
import errors : error ;
-import type ;
-import scanner ;
+import generate ;
import generators ;
-import regex ;
-import virtual-target ;
import os ;
-import symlink ;
-import alias ;
-import property ;
import print ;
-import utility ;
import project ;
-import generate ;
+import property ;
+import regex ;
+import scanner ;
+import stage ;
+import symlink ;
+import type ;
+import utility ;
+import virtual-target ;
+import types/register ;
+
+
+.os-names = amiga aix bsd cygwin darwin dos emx freebsd hpux linux netbsd
+ openbsd osf qnx qnxnto sgi solaris sun sunos svr4 sysv ultrix unix unixware
+ vms windows ;
-# This feature is used to determine which OS we're on.
-# In future, this may become <target-os> and <host-os>
-# The future is now...
+
+# Feature used to determine which OS we're on. New <target-os> and <host-os>
+# features should be used instead.
local os = [ modules.peek : OS ] ;
-feature os : $(os) : propagated link-incompatible ;
+feature.feature os : $(os) : propagated link-incompatible ;
+
-.os-names = amiga aix bsd cygwin darwin dos emx freebsd hpux
- linux netbsd openbsd osf qnx qnxnto sgi solaris sun sunos
- svr4 sysv ultrix unix unixware vms windows ;
-
-# Translates from bjam current OS to the os tags used
-# in host-os and target-os. I.e. it returns the
-# running host-os.
+# Translates from bjam current OS to the os tags used in host-os and target-os,
+# i.e. returns the running host-os.
local rule default-host-os ( )
{
local host-os ;
@@ -51,233 +53,230 @@
{
switch [ os.name ]
{
- case NT : host-os = windows ;
- case AS400 : host-os = unix ;
- case MINGW : host-os = windows ;
- case BSDI : host-os = bsd ;
- case COHERENT : host-os = unix ;
- case DRAGONFLYBSD : host-os = bsd ;
- case IRIX : host-os = sgi ;
- case MACOSX : host-os = darwin ;
- case KFREEBSD : host-os = freebsd ;
- case LINUX : host-os = linux ;
- case * : host-os = unix ;
+ case NT : host-os = windows ;
+ case AS400 : host-os = unix ;
+ case MINGW : host-os = windows ;
+ case BSDI : host-os = bsd ;
+ case COHERENT : host-os = unix ;
+ case DRAGONFLYBSD : host-os = bsd ;
+ case IRIX : host-os = sgi ;
+ case MACOSX : host-os = darwin ;
+ case KFREEBSD : host-os = freebsd ;
+ case LINUX : host-os = linux ;
+ case * : host-os = unix ;
}
}
return $(host-os:L) ;
}
-# The two OS features define a known set of abstract OS
-# names. The host-os is the OS under which bjam is running.
-# Even though this should really be a fixed property we need
-# to list all the values to prevent unkown value errors.
-# Both set the default value to the current OS to account for
-# the default use case of building on the target OS.
-feature host-os : $(.os-names) ;
+
+# The two OS features define a known set of abstract OS names. The host-os is
+# the OS under which bjam is running. Even though this should really be a fixed
+# property we need to list all the values to prevent unknown value errors. Both
+# set the default value to the current OS to account for the default use case of
+# building on the target OS.
+feature.feature host-os : $(.os-names) ;
feature.set-default host-os : [ default-host-os ] ;
-feature target-os
- : $(.os-names)
- : propagated link-incompatible ;
+feature.feature target-os : $(.os-names) : propagated link-incompatible ;
feature.set-default target-os : [ default-host-os ] ;
-feature toolset : : implicit propagated symmetric ;
-
-feature stdlib : native : propagated composite ;
-
-feature link : shared static : propagated ;
-feature runtime-link : shared static : propagated ;
-feature runtime-debugging : on off : propagated ;
-
-
-feature optimization : off speed space : propagated ;
-feature profiling : off on : propagated ;
-feature inlining : off on full : propagated ;
-
-feature threading : single multi : propagated ;
-feature rtti : on off : propagated ;
-feature exception-handling : on off : propagated ;
-# Whether there is support for asynchronous EH (e.g. catching SEGVs)
-feature asynch-exceptions : off on : propagated ;
-# Whether all extern "C" functions are considered nothrow by default
-feature extern-c-nothrow : off on : propagated ;
-feature debug-symbols : on off : propagated ;
-feature define : : free ;
-feature undef : : free ;
-feature "include" : : free path ; #order-sensitive ;
-feature cflags : : free ;
-feature cxxflags : : free ;
-feature fflags : : free ;
-feature asmflags : : free ;
-feature linkflags : : free ;
-feature archiveflags : : free ;
-feature version : : free ;
-
-# Generic, i.e. non-lanugage specific, flags for tools.
-feature flags : : free ;
+feature.feature toolset : : implicit propagated symmetric ;
+feature.feature stdlib : native : propagated composite ;
+feature.feature link : shared static : propagated ;
+feature.feature runtime-link : shared static : propagated ;
+feature.feature runtime-debugging : on off : propagated ;
+feature.feature optimization : off speed space : propagated ;
+feature.feature profiling : off on : propagated ;
+feature.feature inlining : off on full : propagated ;
+feature.feature threading : single multi : propagated ;
+feature.feature rtti : on off : propagated ;
+feature.feature exception-handling : on off : propagated ;
+
+# Whether there is support for asynchronous EH (e.g. catching SEGVs).
+feature.feature asynch-exceptions : off on : propagated ;
+
+# Whether all extern "C" functions are considered nothrow by default.
+feature.feature extern-c-nothrow : off on : propagated ;
+
+feature.feature debug-symbols : on off : propagated ;
+feature.feature define : : free ;
+feature.feature undef : : free ;
+feature.feature "include" : : free path ; #order-sensitive ;
+feature.feature cflags : : free ;
+feature.feature cxxflags : : free ;
+feature.feature fflags : : free ;
+feature.feature asmflags : : free ;
+feature.feature linkflags : : free ;
+feature.feature archiveflags : : free ;
+feature.feature version : : free ;
+# Generic, i.e. non-language specific, flags for tools.
+feature.feature flags : : free ;
feature.feature location-prefix : : free ;
-# The following features are incidental, since
-# in themself they have no effect on build products.
-# Not making them incidental will result in problems in corner
-# cases, for example:
-#
+# The following features are incidental since they have no effect on built
+# products. Not making them incidental will result in problems in corner cases,
+# e.g.:
+#
# unit-test a : a.cpp : <use>b ;
# lib b : a.cpp b ;
-#
-# Here, if <use> is not incidental, we'll decide we have two
-# targets for a.obj with different properties, and will complain.
#
-# Note that making feature incidental does not mean it's ignored. It may
-# be ignored when creating the virtual target, but the rest of build process
-# will use them.
-feature use : : free dependency incidental ;
-feature dependency : : free dependency incidental ;
-feature implicit-dependency : : free dependency incidental ;
-
-feature warnings :
- on # enable default/"reasonable" warning level for the tool
- all # enable all possible warnings issued by the tool
- off # disable all warnings issued by the tool
+# Here, if <use> is not incidental, we'll decide we have two targets for a.obj
+# with different properties, and will complain.
+#
+# Note that making feature incidental does not mean it's ignored. It may be
+# ignored when creating the virtual target, but the rest of build process will
+# use them.
+feature.feature use : : free dependency incidental ;
+feature.feature dependency : : free dependency incidental ;
+feature.feature implicit-dependency : : free dependency incidental ;
+
+feature.feature warnings :
+ on # Enable default/"reasonable" warning level for the tool.
+ all # Enable all possible warnings issued by the tool.
+ off # Disable all warnings issued by the tool.
: incidental propagated ;
-feature warnings-as-errors :
- off # do not fail the compilation if there are warnings
- on # fail the compilation if there are warnings
+feature.feature warnings-as-errors :
+ off # Do not fail the compilation if there are warnings.
+ on # Fail the compilation if there are warnings.
: incidental propagated ;
-feature source : : free dependency incidental ;
-feature library : : free dependency incidental ;
-feature file : : free dependency incidental ;
-feature find-shared-library : : free ; #order-sensitive ;
-feature find-static-library : : free ; #order-sensitive ;
-feature library-path : : free path ; #order-sensitive ;
-# Internal feature.
-feature library-file : : free dependency ;
+feature.feature source : : free dependency incidental ;
+feature.feature library : : free dependency incidental ;
+feature.feature file : : free dependency incidental ;
+feature.feature find-shared-library : : free ; #order-sensitive ;
+feature.feature find-static-library : : free ; #order-sensitive ;
+feature.feature library-path : : free path ; #order-sensitive ;
-feature name : : free ;
-feature tag : : free ;
-feature search : : free path ; #order-sensitive ;
-feature location : : free path ;
+# Internal feature.
+feature.feature library-file : : free dependency ;
-feature dll-path : : free path ;
-feature hardcode-dll-paths : true false : incidental ;
+feature.feature name : : free ;
+feature.feature tag : : free ;
+feature.feature search : : free path ; #order-sensitive ;
+feature.feature location : : free path ;
+feature.feature dll-path : : free path ;
+feature.feature hardcode-dll-paths : true false : incidental ;
-# An internal feature that holds the paths of all dependency
-# dynamic libraries. On Windows, it's needed so that we can all
-# those paths to PATH when running applications.
-# On Linux, it's needed to add proper -rpath-link command line options.
-feature xdll-path : : free path ;
+# An internal feature that holds the paths of all dependency shared libraries.
+# On Windows, it's needed so that we can add all those paths to PATH when
+# running applications. On Linux, it's needed to add proper -rpath-link command
+# line options.
+feature.feature xdll-path : : free path ;
-#provides means to specify def-file for windows dlls.
-feature def-file : : free dependency ;
+# Provides means to specify def-file for windows DLLs.
+feature.feature def-file : : free dependency ;
-feature.feature suppress-import-lib : false true : incidental ;
+feature.feature suppress-import-lib : false true : incidental ;
-# This is internal feature which is used to store the name of
-# bjam action to call when building a target.
+# Internal feature used to store the name of a bjam action to call when building
+# a target.
feature.feature action : : free ;
-# This feature is used to allow specific generators to run.
-# For example, QT tools can only be invoked when QT library
-# is used. In that case, <allow>qt will be in usage requirement
-# of the library.
-feature allow : : free ;
-
-# The addressing model to generate code for.
-# Currently a limited set only specifying the bit size of pointers.
-feature address-model : 16 32 64
- : propagated optional ;
+# This feature is used to allow specific generators to run. For example, QT
+# tools can only be invoked when QT library is used. In that case, <allow>qt
+# will be in usage requirement of the library.
+feature.feature allow : : free ;
+
+# The addressing model to generate code for. Currently a limited set only
+# specifying the bit size of pointers.
+feature.feature address-model : 16 32 64 : propagated optional ;
# Type of CPU architecture to compile for.
-feature architecture :
+feature.feature architecture :
# x86 and x86-64
x86
+
# ia64
ia64
+
# Sparc
sparc
+
# RS/6000 & PowerPC
power
+
# MIPS/SGI
mips1 mips2 mips3 mips4 mips32 mips32r2 mips64
+
# HP/PA-RISC
parisc
- #
+
+ # Combined architectures for platforms/toolsets that support building for
+ # multiple architectures at once. "combined" would be the default multi-arch
+ # for the toolset.
+ combined
+ combined-x86-power
+
: propagated optional ;
# The specific instruction set in an architecture to compile.
-feature instruction-set :
+feature.feature instruction-set :
# x86 and x86-64
- i386 i486 i586 i686
- pentium pentium-mmx pentiumpro pentium2 pentium3 pentium3m pentium-m pentium4 pentium4m
- prescott nocona
- conroe conroe-xe conroe-l allendale mermon mermon-xe kentsfield kentsfield-xe
- penryn wolfdale yorksfield nehalem
- k6 k6-2 k6-3 athlon athlon-tbird athlon-4 athlon-xp athlon-mp
- k8 opteron athlon64 athlon-fx
- winchip-c6 winchip2
- c3 c3-2
+ i386 i486 i586 i686 pentium pentium-mmx pentiumpro pentium2 pentium3
+ pentium3m pentium-m pentium4 pentium4m prescott nocona conroe conroe-xe
+ conroe-l allendale mermon mermon-xe kentsfield kentsfield-xe penryn wolfdale
+ yorksfield nehalem k6 k6-2 k6-3 athlon athlon-tbird athlon-4 athlon-xp
+ athlon-mp k8 opteron athlon64 athlon-fx winchip-c6 winchip2 c3 c3-2
+
# ia64
itanium itanium1 merced itanium2 mckinley
+
# Sparc
- v7 cypress v8 supersparc sparclite hypersparc sparclite86x
- f930 f934 sparclet tsc701 v9 ultrasparc ultrasparc3
+ v7 cypress v8 supersparc sparclite hypersparc sparclite86x f930 f934
+ sparclet tsc701 v9 ultrasparc ultrasparc3
+
# RS/6000 & PowerPC
- 401 403 405 405fp 440 440fp 505
- 601 602 603 603e 604 604e 620 630 740 7400 7450 750
- 801 821 823 860 970 8540
- power-common ec603e g3 g4 g5
- power power2 power3 power4 power5 powerpc powerpc64
- rios rios1 rsc rios2 rs64a
+ 401 403 405 405fp 440 440fp 505 601 602 603 603e 604 604e 620 630 740 7400
+ 7450 750 801 821 823 860 970 8540 power-common ec603e g3 g4 g5 power power2
+ power3 power4 power5 powerpc powerpc64 rios rios1 rsc rios2 rs64a
+
# MIPS
- 4kc 4kp 5kc 20kc m4k
- r2000 r3000 r3900 r4000 r4100 r4300 r4400 r4600 r4650 r6000 r8000
- rm7000 rm9000 orion sb1
- vr4100 vr4111 vr4120 vr4130 vr4300 vr5000 vr5400 vr5500
+ 4kc 4kp 5kc 20kc m4k r2000 r3000 r3900 r4000 r4100 r4300 r4400 r4600 r4650
+ r6000 r8000 rm7000 rm9000 orion sb1 vr4100 vr4111 vr4120 vr4130 vr4300
+ vr5000 vr5400 vr5500
+
# HP/PA-RISC
700 7100 7100lc 7200 7300 8000
- #
+
: propagated optional ;
-
-
-# Used to select specific variant of C++ ABI is the compiler
-# supports several.
-feature c++abi : : propagated optional ;
-
-
-feature conditional : : incidental free ;
+
+# Used to select a specific variant of C++ ABI if the compiler supports several.
+feature.feature c++abi : : propagated optional ;
+
+feature.feature conditional : : incidental free ;
# The value of 'no' prevents building of a target.
-feature build : yes no : optional ;
+feature.feature build : yes no : optional ;
# Windows-specific features
-feature user-interface : console gui wince native auto ;
+feature.feature user-interface : console gui wince native auto ;
+
+feature.feature variant : : implicit composite propagated symmetric ;
-feature variant : : implicit composite propagated symmetric ;
# Declares a new variant.
-# First determines explicit properties for this variant, by
-# refining parents' explicit properties with the passed explicit
-# properties. The result is remembered and will be used if
-# this variant is used as parent.
#
-# Second, determines the full property set for this variant by
-# adding to the explicit properties default values for all properties
-# which neither present nor are symmetric.
+# First determines explicit properties for this variant, by refining parents'
+# explicit properties with the passed explicit properties. The result is
+# remembered and will be used if this variant is used as parent.
+#
+# Second, determines the full property set for this variant by adding to the
+# explicit properties default values for all missing non-symmetric properties.
#
-# Lastly, makes appropriate value of 'variant' property expand
-# to the full property set.
-rule variant ( name # Name of the variant
- : parents-or-properties * # Specifies parent variants, if
- # 'explicit-properties' are given,
- # and explicit-properties otherwise.
- : explicit-properties * # Explicit properties.
+# Lastly, makes appropriate value of 'variant' property expand to the full
+# property set.
+rule variant ( name # Name of the variant
+ : parents-or-properties * # Specifies parent variants, if
+ # 'explicit-properties' are given, and
+ # explicit-properties or parents otherwise.
+ : explicit-properties * # Explicit properties.
)
{
local parents ;
@@ -297,97 +296,99 @@
parents = $(parents-or-properties) ;
}
- # The problem is that we have to check for conflicts
- # between base variants.
+ # The problem is that we have to check for conflicts between base variants.
if $(parents[2])
{
- error "multiple base variants are not yet supported" ;
+ errors.error "multiple base variants are not yet supported" ;
}
-
+
local inherited ;
- # Add explicitly specified properties for parents
+ # Add explicitly specified properties for parents.
for local p in $(parents)
{
- # TODO: the check may be sticter
+ # TODO: This check may be made stricter.
if ! [ feature.is-implicit-value $(p) ]
{
- error "Invalid base varaint" $(p) ;
+ errors.error "Invalid base variant" $(p) ;
}
-
+
inherited += $(.explicit-properties.$(p)) ;
}
property.validate $(explicit-properties) ;
- explicit-properties = [ property.refine $(inherited) : $(explicit-properties) ] ;
-
- # Record explicitly specified properties for this variant
- # We do this after inheriting parents' properties, so that
- # they affect other variants, derived from this one.
+ explicit-properties = [ property.refine $(inherited)
+ : $(explicit-properties) ] ;
+
+ # Record explicitly specified properties for this variant. We do this after
+ # inheriting parents' properties so they affect other variants derived from
+ # this one.
.explicit-properties.$(name) = $(explicit-properties) ;
-
+
feature.extend variant : $(name) ;
- feature.compose <variant>$(name) : $(explicit-properties) ;
+ feature.compose <variant>$(name) : $(explicit-properties) ;
}
IMPORT $(__name__) : variant : : variant ;
-variant debug : <optimization>off <debug-symbols>on <inlining>off <runtime-debugging>on ;
-variant release : <optimization>speed <debug-symbols>off <inlining>full
+
+variant debug : <optimization>off <debug-symbols>on <inlining>off
+ <runtime-debugging>on ;
+variant release : <optimization>speed <debug-symbols>off <inlining>full
<runtime-debugging>off <define>NDEBUG ;
variant profile : release : <profiling>on <debug-symbols>on ;
+
class searched-lib-target : abstract-file-target
{
- rule __init__ ( name
- : project
- : shared ?
+ rule __init__ ( name
+ : project
+ : shared ?
: search *
: action
)
{
- abstract-file-target.__init__ $(name) : SEARCHED_LIB : $(project)
+ abstract-file-target.__init__ $(name) : SEARCHED_LIB : $(project)
: $(action) : ;
-
+
self.shared = $(shared) ;
self.search = $(search) ;
}
-
+
rule shared ( )
{
return $(self.shared) ;
}
-
+
rule search ( )
{
return $(self.search) ;
}
-
+
rule actualize-location ( target )
{
NOTFILE $(target) ;
- }
-
+ }
+
rule path ( )
{
}
-}
-
-import types/register ;
-import stage ;
+}
-class c-scanner : scanner
+class c-scanner : scanner
{
- import regex virtual-target path scanner ;
-
+ import path ;
+ import regex ;
+ import scanner ;
+ import virtual-target ;
+
rule __init__ ( includes * )
{
scanner.__init__ ;
-
+
for local i in $(includes)
- {
+ {
self.includes += [ path.native $(i:G=) ] ;
}
-
- }
+ }
rule pattern ( )
{
@@ -396,70 +397,79 @@
rule process ( target : matches * : binding )
{
- local angle = [ regex.transform $(matches) : "<(.*)>" ] ;
+ local angle = [ regex.transform $(matches) : "<(.*)>" ] ;
local quoted = [ regex.transform $(matches) : "\"(.*)\"" ] ;
# CONSIDER: the new scoping rule seem to defeat "on target" variables.
- local g = [ on $(target) return $(HDRGRIST) ] ;
+ local g = [ on $(target) return $(HDRGRIST) ] ;
local b = [ NORMALIZE_PATH $(binding:D) ] ;
- # Attach binding of including file to included targets.
- # When target is directly created from virtual target
- # this extra information is unnecessary. But in other
- # cases, it allows to distinguish between two headers of the
- # same name included from different places.
- # We don't need this extra information for angle includes,
- # since they should not depend on including file (we can't
- # get literal "." in include path).
+ # Attach binding of including file to included targets. When a target is
+ # directly created from virtual target this extra information is
+ # unnecessary. But in other cases, it allows us to distinguish between
+ # two headers of the same name included from different places. We don't
+ # need this extra information for angle includes, since they should not
+ # depend on including file (we can't get literal "." in include path).
local g2 = $(g)"#"$(b) ;
-
+
angle = $(angle:G=$(g)) ;
quoted = $(quoted:G=$(g2)) ;
-
+
local all = $(angle) $(quoted) ;
INCLUDES $(target) : $(all) ;
NOCARE $(all) ;
SEARCH on $(angle) = $(self.includes:G=) ;
SEARCH on $(quoted) = $(b) $(self.includes:G=) ;
-
- # Just propagate current scanner to includes, in a hope
- # that includes do not change scanners.
+
+ # Just propagate the current scanner to includes in hope that includes
+ # do not change scanners.
scanner.propagate $(__name__) : $(angle) $(quoted) : $(target) ;
- }
+
+ ISFILE $(angle) $(quoted) ;
+ }
}
-scanner.register c-scanner : include ;
-type.set-scanner CPP : c-scanner ;
+type.register H : h ;
+type.register HPP : hpp : H ;
+type.register C : c ;
+scanner.register c-scanner : include ;
-type.register H : h ;
-type.register HPP : hpp : H ;
-type.register C : c ;
+# It most cases where a CPP file or a H file is a source of some action,
+# we should rebuild the result if any of files included by CPP/H
+# are changed. One case when this is not needed is installation,
+# which is handled specifically.
+type.set-scanner CPP : c-scanner ;
+type.set-scanner C : c-scanner ;
+# One case where scanning of H/HPP files is necessary is PCH generation --
+# if any header included by HPP being precompiled changes, we need to
+# recompile the header.
+type.set-scanner H : c-scanner ;
+type.set-scanner HPP : c-scanner ;
-type.set-scanner C : c-scanner ;
-# The generator class for libraries (target type LIB). Depending on properties it will
-# request building of the approapriate specific type -- SHARED_LIB, STATIC_LIB or
-# SHARED_LIB.
+# The generator class for libraries (target type LIB). Depending on properties
+# it will request building of the appropriate specific library type --
+# -- SHARED_LIB, STATIC_LIB or SHARED_LIB.
class lib-generator : generator
{
rule __init__ ( * : * )
{
generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
}
-
+
rule run ( project name ? : property-set : sources * )
{
- # The lib generator is composing, and can be only invoked with
+ # The lib generator is composing, and can be only invoked with an
# explicit name. This check is present in generator.run (and so in
- # builtin.linking-generator), but duplicate it here to avoid doing
- # extra work.
+ # builtin.linking-generator) but duplicated here to avoid doing extra
+ # work.
if $(name)
- {
+ {
local properties = [ $(property-set).raw ] ;
- # Determine the needed target type
+ # Determine the needed target type.
local actual-type ;
# <source>files can be generated by <conditional>@rule feature
# in which case we don't consider it a SEARCHED_LIB type.
@@ -470,129 +480,129 @@
}
else if <file> in $(properties:G)
{
- # The generator for
actual-type = LIB ;
- }
+ }
else if <link>shared in $(properties)
{
actual-type = SHARED_LIB ;
}
- else
+ else
{
actual-type = STATIC_LIB ;
}
property-set = [ $(property-set).add-raw <main-target-type>LIB ] ;
# Construct the target.
- return [ generators.construct $(project) $(name) : $(actual-type)
- : $(property-set) : $(sources) ] ;
- }
- }
-
+ return [ generators.construct $(project) $(name) : $(actual-type)
+ : $(property-set) : $(sources) ] ;
+ }
+ }
+
rule viable-source-types ( )
{
return * ;
- }
+ }
}
+
generators.register [ new lib-generator builtin.lib-generator : : LIB ] ;
+
# The implementation of the 'lib' rule. Beyond standard syntax that rule allows
-# simplified:
-# lib a b c ;
-# so we need to write code to handle that syntax.
-rule lib ( names + : sources * : requirements * : default-build *
+# simplified: "lib a b c ;".
+rule lib ( names + : sources * : requirements * : default-build *
: usage-requirements * )
{
- local result ;
- local project = [ project.current ] ;
-
- # This is a circular module dependency, so it must be imported here
- import targets ;
-
if $(names[2])
{
if <name> in $(requirements:G)
{
errors.user-error "When several names are given to the 'lib' rule" :
- "it's not allowed to specify the <name> feature. " ;
- }
+ "it's not allowed to specify the <name> feature." ;
+ }
if $(sources)
{
errors.user-error "When several names are given to the 'lib' rule" :
- "it's not allowed to specify sources. " ;
- }
+ "it's not allowed to specify sources." ;
+ }
}
-
+
+ # This is a circular module dependency so it must be imported here.
+ import targets ;
+
+ local project = [ project.current ] ;
+ local result ;
+
for local name in $(names)
- {
+ {
local r = $(requirements) ;
- # Support " lib a ; " and " lib a b c ; " syntaxes.
+ # Support " lib a ; " and " lib a b c ; " syntax.
if ! $(sources) && ! <name> in $(requirements:G)
&& ! <file> in $(requirements:G)
{
r += <name>$(name) ;
- }
+ }
result += [ targets.main-target-alternative
- [ new typed-target $(name) : $(project) : LIB
- : [ targets.main-target-sources $(sources) : $(name) ]
- : [ targets.main-target-requirements $(r) : $(project) ]
- : [ targets.main-target-default-build $(default-build) : $(project) ]
- : [ targets.main-target-usage-requirements $(usage-requirements) : $(project) ]
- ] ] ;
- }
+ [ new typed-target $(name) : $(project) : LIB
+ : [ targets.main-target-sources $(sources) : $(name) ]
+ : [ targets.main-target-requirements $(r) : $(project) ]
+ : [ targets.main-target-default-build $(default-build) : $(project) ]
+ : [ targets.main-target-usage-requirements $(usage-requirements) : $(project) ]
+ ] ] ;
+ }
return $(result) ;
}
IMPORT $(__name__) : lib : : lib ;
+
class searched-lib-generator : generator
{
import property-set ;
-
+
rule __init__ ( )
{
- # The requirements cause the generators to be tried *only* when we're building
- # lib target and there's 'search' feature. This seems ugly --- all we want
- # is make sure searched-lib-generator is not invoced deep in transformation
- # search.
+ # The requirements cause the generators to be tried *only* when we're
+ # building a lib target with a 'search' feature. This seems ugly --- all
+ # we want is to make sure searched-lib-generator is not invoked deep
+ # inside transformation search to produce intermediate targets.
generator.__init__ searched-lib-generator : : SEARCHED_LIB ;
}
-
+
rule run ( project name ? : property-set : sources * )
{
if $(name)
{
- # If name is empty, it means we're called not from top-level.
- # In this case, we just fail immediately, because searched-lib-generator
- # cannot be used to produce intermediate targets.
-
- local properties = [ $(property-set).raw ] ;
+ # If 'name' is empty, it means we have not been called to build a
+ # top-level target. In this case, we just fail immediately, because
+ # searched-lib-generator cannot be used to produce intermediate
+ # targets.
+
+ local properties = [ $(property-set).raw ] ;
local shared ;
if <link>shared in $(properties)
{
shared = true ;
- }
-
+ }
+
local search = [ feature.get-values <search> : $(properties) ] ;
- a = [ new null-action $(property-set) ] ;
+ local a = [ new null-action $(property-set) ] ;
local lib-name = [ feature.get-values <name> : $(properties) ] ;
lib-name ?= $(name) ;
- local t = [ new searched-lib-target $(lib-name) : $(project) : $(shared)
- : $(search)
- : $(a)
- ] ;
+ local t = [ new searched-lib-target $(lib-name) : $(project)
+ : $(shared) : $(search) : $(a) ] ;
# We return sources for a simple reason. If there's
- # lib png : z : <name>png ;
- # the 'z' target should be returned, so that apps linking to
- # 'png' will link to 'z', too.
+ # lib png : z : <name>png ;
+ # the 'z' target should be returned, so that apps linking to 'png'
+ # will link to 'z', too.
return [ property-set.create <xdll-path>$(search) ]
[ virtual-target.register $(t) ] $(sources) ;
}
- }
+ }
}
generators.register [ new searched-lib-generator ] ;
+
class prebuilt-lib-generator : generator
{
rule __init__ ( * : * )
@@ -604,94 +614,95 @@
{
local f = [ $(property-set).get <file> ] ;
return $(f) $(sources) ;
- }
+ }
}
-generators.register
+generators.register
[ new prebuilt-lib-generator builtin.prebuilt : : LIB : <file> ] ;
generators.override builtin.prebuilt : builtin.lib-generator ;
-
-class compile-action : action
+
+class compile-action : action
{
import sequence ;
-
+
rule __init__ ( targets * : sources * : action-name : properties * )
{
action.__init__ $(targets) : $(sources) : $(action-name) : $(properties) ;
}
-
-
- # For all virtual targets for the same dependency graph as self,
- # i.e. which belong to the same main target, add their directories
- # to include path.
+
+ # For all virtual targets for the same dependency graph as self, i.e. which
+ # belong to the same main target, add their directories to the include path.
rule adjust-properties ( property-set )
- {
+ {
local s = [ $(self.targets[1]).creating-subvariant ] ;
- return [ $(property-set).add-raw
+ return [ $(property-set).add-raw
[ $(s).implicit-includes "include" : H ] ] ;
- }
+ }
}
-# Declare a special compiler generator.
-# The only thing it does is changing the type used to represent
-# 'action' in the constructed dependency graph to 'compile-action'.
-# That class in turn adds additional include paths to handle a case
-# when a source file includes headers which are generated themselfs.
+
+# Declare a special compiler generator. The only thing it does is changing the
+# type used to represent 'action' in the constructed dependency graph to
+# 'compile-action'. That class in turn adds additional include paths to handle
+# cases when a source file includes headers which are generated themselves.
class C-compiling-generator : generator
{
- rule __init__ ( id : source-types + : target-types + :
- requirements * : optional-properties * )
+ rule __init__ ( id : source-types + : target-types + : requirements *
+ : optional-properties * )
{
generator.__init__ $(id) : $(source-types) : $(target-types) :
- $(requirements) : $(optional-properties) ;
+ $(requirements) : $(optional-properties) ;
}
-
+
rule action-class ( )
{
return compile-action ;
}
}
-rule register-c-compiler ( id : source-types + : target-types + :
- requirements * : optional-properties * )
+
+rule register-c-compiler ( id : source-types + : target-types + : requirements *
+ : optional-properties * )
{
- local g = [ new C-compiling-generator $(id) : $(source-types)
- : $(target-types) : $(requirements) : $(optional-properties) ] ;
- generators.register $(g) ;
+ generators.register [ new C-compiling-generator $(id) : $(source-types)
+ : $(target-types) : $(requirements) : $(optional-properties) ] ;
}
-# FIXME: this is ugly, should find a better way (we'd want client code to
-# register all generators as "generator.some-rule", not with "some-module.some-rule".)
+# FIXME: this is ugly, should find a better way (we'd like client code to
+# register all generators as "generators.some-rule" instead of
+# "some-module.some-rule".)
IMPORT $(__name__) : register-c-compiler : : generators.register-c-compiler ;
+
# The generator class for handling EXE and SHARED_LIB creation.
class linking-generator : generator
{
- import property-set ;
- import type ;
import path ;
import project ;
-
- rule __init__ ( id
- composing ? : # Specify if generator is composing. The generator will be
- # composing if non-empty string is passed, or parameter is
- # not given. To make generator non-composing, pass empty
- # string ("")
- source-types + : target-types + :
+ import property-set ;
+ import type ;
+
+ rule __init__ ( id
+ composing ? : # The generator will be composing if a non-empty
+ # string is passed or the parameter is not given. To
+ # make the generator non-composing, pass an empty
+ # string ("").
+ source-types + :
+ target-types + :
requirements * )
{
composing ?= true ;
- generator.__init__ $(id) $(composing) : $(source-types) : $(target-types) :
- $(requirements) ;
+ generator.__init__ $(id) $(composing) : $(source-types)
+ : $(target-types) : $(requirements) ;
}
-
+
rule run ( project name ? : property-set : sources + )
- {
- sources += [ $(property-set).get <library> ] ;
-
- # Add <library-path> properties for all searched libraries
+ {
+ sources += [ $(property-set).get <library> ] ;
+
+ # Add <library-path> properties for all searched libraries.
local extra ;
for local s in $(sources)
{
@@ -701,62 +712,60 @@
extra += <library-path>$(search) ;
}
}
-
- # It's possible that sources include shared libraries that
- # did not came from 'lib' targets. For example, .so files
- # specified as sources.
- # In this case we have
- # - add extra dll-path properties
- # - propagate extra xdll-path properties so that application
- # linking to use will get xdll-path to those libraries.
+
+ # It's possible that sources include shared libraries that did not came
+ # from 'lib' targets, e.g. .so files specified as sources. In this case
+ # we have to add extra dll-path properties and propagate extra xdll-path
+ # properties so that application linking to use will get xdll-path to
+ # those libraries.
local extra-xdll-paths ;
for local s in $(sources)
{
- if [ type.is-derived [ $(s).type ] SHARED_LIB ] && ! [ $(s).action ]
+ if [ type.is-derived [ $(s).type ] SHARED_LIB ] && ! [ $(s).action ]
{
- # Unfortunately, we don't have a good way to find the path
- # to a file, so use this nasty approach.
+ # Unfortunately, we don't have a good way to find the path to a
+ # file, so use this nasty approach.
local p = [ $(s).project ] ;
local location = [ path.root [ $(s).name ]
- [ $(p).get source-location ] ] ;
+ [ $(p).get source-location ] ] ;
extra-xdll-paths += [ path.parent $(location) ] ;
- }
+ }
}
-
- # Hardcode dll paths only when linking executables.
+
+ # Hardcode DLL paths only when linking executables.
# Pros: don't need to relink libraries when installing.
- # Cons: "standalone" libraries (plugins, python extensions)
- # can't hardcode paths to dependent libraries.
+ # Cons: "standalone" libraries (plugins, python extensions) can't
+ # hardcode paths to dependent libraries.
if [ $(property-set).get <hardcode-dll-paths> ] = true
- && [ type.is-derived $(self.target-types[1]) EXE ]
+ && [ type.is-derived $(self.target-types[1]) EXE ]
{
local xdll-path = [ $(property-set).get <xdll-path> ] ;
extra += <dll-path>$(xdll-path) <dll-path>$(extra-xdll-paths) ;
}
-
+
if $(extra)
{
property-set = [ $(property-set).add-raw $(extra) ] ;
- }
-
+ }
+
local result = [ generator.run $(project) $(name) : $(property-set)
- : $(sources) ] ;
-
+ : $(sources) ] ;
+
local ur ;
if $(result)
- {
- ur = [ extra-usage-requirements $(result) : $(property-set) ] ;
- ur = [ $(ur).add
- [ property-set.create <xdll-path>$(extra-xdll-paths) ] ] ;
- }
+ {
+ ur = [ extra-usage-requirements $(result) : $(property-set) ] ;
+ ur = [ $(ur).add
+ [ property-set.create <xdll-path>$(extra-xdll-paths) ] ] ;
+ }
return $(ur) $(result) ;
}
-
+
rule extra-usage-requirements ( created-targets * : property-set )
- {
- local result = [ property-set.empty ] ;
+ {
+ local result = [ property-set.empty ] ;
local extra ;
-
+
# Add appropricate <xdll-path> usage requirements.
local raw = [ $(property-set).raw ] ;
if <link>shared in $(raw)
@@ -765,41 +774,40 @@
local pwd = [ path.pwd ] ;
for local t in $(created-targets)
{
- if [ type.is-derived [ $(t).type ] SHARED_LIB ]
+ if [ type.is-derived [ $(t).type ] SHARED_LIB ]
{
paths += [ path.root [ path.make [ $(t).path ] ] $(pwd) ] ;
- }
- }
+ }
+ }
extra += $(paths:G=<xdll-path>) ;
}
-
+
# We need to pass <xdll-path> features that we've got from sources,
- # because if shared library is built, exe which uses it must know paths
- # to other shared libraries this one depends on, to be able to find them
- # all at runtime.
-
- # Just pass all features in property-set, it's theorically possible
- # that we'll propagate <xdll-path> features explicitly specified by
- # the user, but then the user's to blaim for using internal feature.
+ # because if a shared library is built, exe using it needs to know paths
+ # to other shared libraries this one depends on in order to be able to
+ # find them all at runtime.
+
+ # Just pass all features in property-set, it's theorically possible that
+ # we'll propagate <xdll-path> features explicitly specified by the user,
+ # but then the user's to blaim for using an internal feature.
local values = [ $(property-set).get <xdll-path> ] ;
extra += $(values:G=<xdll-path>) ;
-
+
if $(extra)
{
result = [ property-set.create $(extra) ] ;
}
return $(result) ;
}
-
+
rule generated-targets ( sources + : property-set : project name ? )
{
- local sources2 ; # sources to pass to inherited rule
- local properties2 ; # properties to pass to inherited rule
- local libraries ; # sources which are libraries
-
- # Searched libraries are not passed as argument to linker
- # but via some option. So, we pass them to the action
- # via property.
+ local sources2 ; # Sources to pass to inherited rule.
+ local properties2 ; # Properties to pass to inherited rule.
+ local libraries ; # Library sources.
+
+ # Searched libraries are not passed as arguments to the linker but via
+ # some option. So, we pass them to the action using a property.
properties2 = [ $(property-set).raw ] ;
local fsa ;
local fst ;
@@ -808,75 +816,73 @@
if [ type.is-derived [ $(s).type ] SEARCHED_LIB ]
{
local name = [ $(s).name ] ;
- if [ $(s).shared ]
- {
- fsa += $(name) ;
+ if [ $(s).shared ]
+ {
+ fsa += $(name) ;
}
else
{
fst += $(name) ;
- }
+ }
}
else
{
sources2 += $(s) ;
}
}
- properties2 += <find-shared-library>$(fsa:J=&&)
+ properties2 += <find-shared-library>$(fsa:J=&&)
<find-static-library>$(fst:J=&&) ;
-
- local spawn = [ generator.generated-targets $(sources2)
- : [ property-set.create $(properties2) ] : $(project) $(name) ] ;
-
- return $(spawn) ;
- }
-}
-
-rule register-linker ( id composing ? : source-types + : target-types + :
- requirements * )
-{
- local g = [ new linking-generator $(id) $(composing) : $(source-types)
- : $(target-types) : $(requirements) ] ;
- generators.register $(g) ;
+
+ return [ generator.generated-targets $(sources2)
+ : [ property-set.create $(properties2) ] : $(project) $(name) ] ;
+ }
}
+
+rule register-linker ( id composing ? : source-types + : target-types +
+ : requirements * )
+{
+ generators.register [ new linking-generator $(id) $(composing)
+ : $(source-types) : $(target-types) : $(requirements) ] ;
+}
+
+
# The generator class for handling STATIC_LIB creation.
class archive-generator : generator
{
- import property-set ;
+ import property-set ;
- rule __init__ ( id composing ? : source-types + : target-types + :
- requirements * )
+ rule __init__ ( id composing ? : source-types + : target-types +
+ : requirements * )
{
composing ?= true ;
- generator.__init__ $(id) $(composing) : $(source-types) : $(target-types) :
- $(requirements) ;
+ generator.__init__ $(id) $(composing) : $(source-types)
+ : $(target-types) : $(requirements) ;
}
-
+
rule run ( project name ? : property-set : sources + )
- {
- sources += [ $(property-set).get <library> ] ;
-
+ {
+ sources += [ $(property-set).get <library> ] ;
+
local result = [ generator.run $(project) $(name) : $(property-set)
- : $(sources) ] ;
-
- # For static linking, if we get a library in source, we can't
- # directly link to it. So, we need to cause our dependencies
- # to link to that library. There are two approaches:
+ : $(sources) ] ;
+
+ # For static linking, if we get a library in source, we can't directly
+ # link to it so we need to cause our dependencies to link to that
+ # library. There are two approaches:
# - adding the library to the list of returned targets.
# - using the <library> usage requirements.
# The problem with the first is:
- #
+ #
# lib a1 : : <file>liba1.a ;
# lib a2 : a2.cpp a1 : <link>static ;
# install dist : a2 ;
#
- # here we'll try to install 'a1', even though it's not necessary in
- # the general case.
- # With the second approaches, even indirect dependents will link to
- # the library, but it should not cause any harm.
- # So, return all LIB sources together with created targets,
- # so that dependents link to them.
+ # here we'll try to install 'a1', even though it's not necessary in the
+ # general case. With the second approach, even indirect dependants will
+ # link to the library, but it should not cause any harm. So, return all
+ # LIB sources together with created targets, so that dependants link to
+ # them.
local usage-requirements ;
if [ $(property-set).get <link> ] = static
{
@@ -885,41 +891,36 @@
if [ type.is-derived [ $(t).type ] LIB ]
{
usage-requirements += <library>$(t) ;
- }
- }
+ }
+ }
}
-
+
usage-requirements = [ property-set.create $(usage-requirements) ] ;
-
+
return $(usage-requirements) $(result) ;
- }
+ }
}
-rule register-archiver ( id composing ? : source-types + : target-types + :
- requirements * )
+
+rule register-archiver ( id composing ? : source-types + : target-types +
+ : requirements * )
{
- local g = [ new archive-generator $(id) $(composing) : $(source-types)
- : $(target-types) : $(requirements) ] ;
- generators.register $(g) ;
+ generators.register [ new archive-generator $(id) $(composing)
+ : $(source-types) : $(target-types) : $(requirements) ] ;
}
-# Generators that accepts everything, and produces nothing.
-# Usefull as general fallback for toolset-specific actions, like
-# PCH generation.
+
+# Generator that accepts everything and produces nothing. Useful as a general
+# fallback for toolset-specific actions like PCH generation.
class dummy-generator : generator
{
import property-set ;
-
+
rule run ( project name ? : property-set : sources + )
{
return [ property-set.empty ] ;
}
}
-
-
-IMPORT $(__name__) : register-linker register-archiver
- : : generators.register-linker generators.register-archiver ;
-
-
-
+IMPORT $(__name__) : register-linker register-archiver
+ : : generators.register-linker generators.register-archiver ;
Modified: branches/release/tools/build/v2/tools/cast.jam
==============================================================================
--- branches/release/tools/build/v2/tools/cast.jam (original)
+++ branches/release/tools/build/v2/tools/cast.jam 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -10,7 +10,7 @@
# This is done with:
#
# exe main : main.cpp [ cast _ moccable-cpp : widget.cpp ] ;
-#
+#
# Boost.Build will assing target type CPP to both main.cpp and widget.cpp.
# Then, the cast rule will change target type of widget.cpp to
# MOCCABLE-CPP, and Qt support will run MOC tool as part of build process.
@@ -22,22 +22,25 @@
# cast, as defining new target type + generator for that type is somewhat
# simpler then defining main target rule.
-import project type targets ;
import "class" : new ;
import errors ;
+import project ;
import property-set ;
+import targets ;
+import type ;
+
class cast-target-class : typed-target
{
import type ;
-
- rule __init__ ( name : project : type
- : sources * : requirements * : default-build * : usage-requirements * )
+
+ rule __init__ ( name : project : type : sources * : requirements *
+ : default-build * : usage-requirements * )
{
- typed-target.__init__ $(name) : $(project) : $(type)
- : $(sources) : $(requirements) : $(default-build) : $(usage-requirements) ;
+ typed-target.__init__ $(name) : $(project) : $(type) : $(sources)
+ : $(requirements) : $(default-build) : $(usage-requirements) ;
}
-
+
rule construct ( name : source-targets * : property-set )
{
local result ;
@@ -47,43 +50,42 @@
{
ECHO "error: source to the 'cast' rule is not a file!" ;
EXIT ;
- }
+ }
if [ $(s).action ]
{
ECHO "error: only non-derived target are allowed for 'cast'." ;
ECHO "error: when building " [ full-name ] ;
EXIT ;
- }
+ }
local r = [ $(s).clone-with-different-type $(self.type) ] ;
result += [ virtual-target.register $(r) ] ;
}
-
return [ property-set.empty ] $(result) ;
- }
-
+ }
}
-rule cast ( name type : sources * : requirements * : default-build *
+
+rule cast ( name type : sources * : requirements * : default-build *
: usage-requirements * )
{
local project = [ project.current ] ;
-
+
local real-type = [ type.type-from-rule-name $(type) ] ;
if ! $(real-type)
{
- errors.user-error "No type corresponds to main target rule nam '$(type)'"
- : "Hint: try lowercase name" ;
+ errors.user-error "No type corresponds to main target rule name '$(type)'"
+ : "Hint: try lowercase name" ;
}
-
-
- # This is a circular module dependency, so it must be imported here
+
+ # This is a circular module dependency so it must be imported here.
import targets ;
targets.main-target-alternative
- [ new cast-target-class $(name) : $(project) : $(real-type)
- : [ targets.main-target-sources $(sources) : $(name) ]
- : [ targets.main-target-requirements $(requirements) : $(project) ]
- : [ targets.main-target-default-build $(default-build) : $(project) ]
- : [ targets.main-target-usage-requirements $(usage-requirements) : $(project) ]
- ] ;
+ [ new cast-target-class $(name) : $(project) : $(real-type)
+ : [ targets.main-target-sources $(sources) : $(name) ]
+ : [ targets.main-target-requirements $(requirements) : $(project) ]
+ : [ targets.main-target-default-build $(default-build) : $(project) ]
+ : [ targets.main-target-usage-requirements $(usage-requirements) : $(project) ]
+ ] ;
}
+
IMPORT $(__name__) : cast : : cast ;
Modified: branches/release/tools/build/v2/tools/common.jam
==============================================================================
--- branches/release/tools/build/v2/tools/common.jam (original)
+++ branches/release/tools/build/v2/tools/common.jam 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -1,9 +1,9 @@
-# Copyright 2003, 2005 Dave Abrahams
-# Copyright 2005, 2006 Rene Rivera
-# Copyright 2005 Toon Knapen
-# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+# Copyright 2003, 2005 Dave Abrahams
+# Copyright 2005, 2006 Rene Rivera
+# Copyright 2005 Toon Knapen
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# Provides actions common to all toolsets, for as making directoies and
# removing files.
@@ -31,41 +31,43 @@
# Configurations
#
-# The following class helps to manage toolset configurations. Each configuration
-# has unique ID and one or more parameters. A typical example of unique ID is
-# a condition generated by 'common.check-init-parameters' rule. Other kinds of
-# ID can be used. Parameters may include any details about the configuration like
-# 'command', 'path', etc.
-#
-# A configuration may be in one of two states:
-#
-# - registered - a toolset configuration is registered (by autodetection code
-# for instance) but is not used. I.e. 'toolset.using' wasn't yet been called
-# for this configuration.
-# - used - once called 'toolset.using' marks the configuration as 'used'.
-#
-# The main difference between the states is that while a configuration is
-# 'registered' its options can be freely changed. This is useful in particular
-# for autodetection code - all detected configurations may be safely overwritten
-# by a user.
+# The following class helps to manage toolset configurations. Each configuration
+# has a unique ID and one or more parameters. A typical example of a unique ID
+# is a condition generated by 'common.check-init-parameters' rule. Other kinds
+# of IDs can be used. Parameters may include any details about the configuration
+# like 'command', 'path', etc.
+#
+# A toolset configuration may be in one of the following states:
+#
+# - registered
+# Configuration has been registered (e.g. by autodetection code) but has
+# not yet been marked as used, i.e. 'toolset.using' rule has not yet been
+# called for it.
+# - used
+# Once called 'toolset.using' rule marks the configuration as 'used'.
+#
+# The main difference between the states above is that while a configuration is
+# 'registered' its options can be freely changed. This is useful in particular
+# for autodetection code - all detected configurations may be safely overwritten
+# by user code.
class configurations
{
- import errors : error ;
+ import errors ;
rule __init__ ( )
{
}
-
- # Registers a configuration.
+
+ # Registers a configuration.
#
- # Returns 'true' if the configuration has been added and an empty value if
+ # Returns 'true' if the configuration has been added and an empty value if
# it already exists. Reports an error if the configuration is 'used'.
rule register ( id )
{
- if $(id) in $(self.used)
+ if $(id) in $(self.used)
{
- error "common: the configuration '$(id)' is in use" ;
+ errors.error "common: the configuration '$(id)' is in use" ;
}
local retval ;
@@ -74,28 +76,28 @@
{
self.all += $(id) ;
- # indicate that a new configuration has been added
+ # Indicate that a new configuration has been added.
retval = true ;
}
return $(retval) ;
}
- # Mark a configuration as 'used'.
+ # Mark a configuration as 'used'.
#
- # Returns 'true' if the state of the configuration has been changed to
+ # Returns 'true' if the state of the configuration has been changed to
# 'used' and an empty value if it the state wasn't changed. Reports an error
# if the configuration isn't known.
rule use ( id )
{
if ! $(id) in $(self.all)
{
- error "common: the configuration '$(id)' is not known" ;
+ errors.error "common: the configuration '$(id)' is not known" ;
}
local retval ;
- if ! $(id) in $(self.used)
+ if ! $(id) in $(self.used)
{
self.used += $(id) ;
@@ -117,31 +119,30 @@
{
return $(self.used) ;
}
-
+
# Returns the value of a configuration parameter.
rule get ( id : param )
{
- return $(self.$(param).$(id)) ;
+ return $(self.$(param).$(id)) ;
}
# Sets the value of a configuration parameter.
rule set ( id : param : value * )
{
- self.$(param).$(id) = $(value) ;
+ self.$(param).$(id) = $(value) ;
}
}
-# The rule checks toolset parameters. Each trailing parameter
-# should be a pair of parameter name and parameter value.
-# The rule will check that each parameter either has value in
-# each invocation, or has no value in each invocation. Also,
-# the rule will check that the combination of all parameter values is
-# unique in all invocations.
+# The rule for checking toolset parameters. Trailing parameters should all be
+# parameter name/value pairs. The rule will check that each parameter either has
+# a value in each invocation or has no value in each invocation. Also, the rule
+# will check that the combination of all parameter values is unique in all
+# invocations.
#
-# Each parameter name corresponds to subfeature. This rule will declare subfeature
-# the first time non-empty parameter value is passed, and will extend it with
-# all the values.
+# Each parameter name corresponds to a subfeature. This rule will declare
+# a subfeature the first time a non-empty parameter value is passed and will
+# extend it with all the values.
#
# The return value from this rule is a condition to be used for flags settings.
rule check-init-parameters ( toolset : * )
@@ -152,8 +153,8 @@
{
local name = $($(index)[1]) ;
local value = $($(index)[2]) ;
-
- if $(value)-is-specified
+
+ if $(value)-is-not-empty
{
condition = $(condition)-$(value) ;
if $(.had-unspecified-value.$(toolset).$(name))
@@ -163,37 +164,36 @@
"no value was specified in earlier initialization" :
"an explicit value is specified now" ;
}
- # The below logic is for intel compiler. It calls this rule
- # with 'intel-linux' and 'intel-win' as toolset, so we need to
- # get the base part of toolset name.
- # We can't pass 'intel' as toolset, because it that case it will
- # be impossible to register versionles intel-linux and
- # intel-win of specific version.
+ # The below logic is for intel compiler. It calls this rule with
+ # 'intel-linux' and 'intel-win' as toolset, so we need to get the
+ # base part of toolset name. We can't pass 'intel' as toolset
+ # because in that case it will be impossible to register versionless
+ # intel-linux and intel-win toolsets of a specific version.
local t = $(toolset) ;
local m = [ MATCH ([^-]*)- : $(toolset) ] ;
if $(m)
{
t = $(m[1]) ;
- }
- if ! $(.had-value.$(toolset).$(name))
+ }
+ if ! $(.had-value.$(toolset).$(name))
{
if ! $(.declared-subfeature.$(t).$(name))
{
feature.subfeature toolset $(t) : $(name) : : propagated ;
.declared-subfeature.$(t).$(name) = true ;
- }
+ }
.had-value.$(toolset).$(name) = true ;
}
feature.extend-subfeature toolset $(t) : $(name) : $(value) ;
}
else
{
- if $(.had-value.$(toolset).$(name))
+ if $(.had-value.$(toolset).$(name))
{
- errors.user-error
+ errors.user-error
"$(toolset) initialization: parameter '$(name)' inconsistent" :
"an explicit value was specified in an earlier initialization" :
- "no value is specified now" ;
+ "no value is specified now" ;
}
.had-unspecified-value.$(toolset).$(name) = true ;
}
@@ -201,23 +201,24 @@
}
if $(sig) in $(.all-signatures)
{
- local message =
- "duplicate initialization of $(toolset) with the following parameters: " ;
+ local message =
+ "duplicate initialization of $(toolset) with the following parameters: " ;
for local index in 2 3 4 5 6 7 8 9
{
local p = $($(index)) ;
if $(p)
{
message += "$(p[1]) = $(p[2]:E=<unspecified>)" ;
- }
+ }
}
message += "previous initialization at $(.init-loc.$(sig))" ;
- errors.user-error $(message[1]) : $(message[2]) : $(message[3]) : $(message[4])
- : $(message[5]) : $(message[6]) : $(message[7]) : $(message[8]) ;
- }
+ errors.user-error
+ $(message[1]) : $(message[2]) : $(message[3]) : $(message[4]) :
+ $(message[5]) : $(message[6]) : $(message[7]) : $(message[8]) ;
+ }
.all-signatures += $(sig) ;
.init-loc.$(sig) = [ errors.nearest-user-location ] ;
-
+
if $(.show-configuration)
{
ECHO notice: $(condition) ;
@@ -225,57 +226,68 @@
return $(condition) ;
}
-# A helper rule to get the command to invoke some tool.
-# In 'user-provided-command' is not given, tries to find binary
-# named 'tool' in PATH and in the passed 'additional-path'. Otherwise,
-# verified that the first element of 'user-provided-command' is an
-# existing program.
-#
+
+# A helper rule to get the command to invoke some tool. If
+# 'user-provided-command' is not given, tries to find binary named 'tool' in
+# PATH and in the passed 'additional-path'. Otherwise, verifies that the first
+# element of 'user-provided-command' is an existing program.
#
# This rule returns the command to be used when invoking the tool. If we can't
-# find the tool, a warning is issued.
-# If 'path-last' is specified, PATH is checked after 'additional-paths' when
-# searching to 'tool'.
-rule get-invocation-command (
+# find the tool, a warning is issued. If 'path-last' is specified, PATH is
+# checked after 'additional-paths' when searching for 'tool'.
+rule get-invocation-command-nodefault (
toolset : tool : user-provided-command * : additional-paths * : path-last ? )
{
local command ;
if ! $(user-provided-command)
{
command = [ common.find-tool $(tool) : $(additional-paths) : $(path-last) ] ;
- if ! $(command)
+ if ! $(command) && $(.debug-configuration)
{
- if $(.debug-configuration)
- {
- ECHO "warning: toolset $(toolset) initialization: can't find tool $(tool)" ;
- ECHO "warning: initialized from" [ errors.nearest-user-location ] ;
- }
- command = $(tool) ;
- }
+ ECHO "warning: toolset $(toolset) initialization: can't find tool $(tool)" ;
+ ECHO "warning: initialized from" [ errors.nearest-user-location ] ;
+ }
}
else
{
- command = [ common.check-tool $(user-provided-command) ] ;
- if ! $(command)
+ command = [ common.check-tool $(user-provided-command) ] ;
+ if ! $(command) && $(.debug-configuration)
{
- if $(.debug-configuration)
- {
- ECHO "warning: toolset $(toolset) initialization: " ;
- ECHO "warning: can't find user-provided command " '$(user-provided-command)' ;
- ECHO "warning: initialized from" [ errors.nearest-user-location ] ;
- }
- # It's possible, in theory, that user-provided command is OK, but we're
- # not smart enough to understand that.
- command = $(user-provided-command) ;
- }
+ ECHO "warning: toolset $(toolset) initialization: " ;
+ ECHO "warning: can't find user-provided command " '$(user-provided-command)' ;
+ ECHO "warning: initialized from" [ errors.nearest-user-location ] ;
+ }
}
return $(command) ;
}
-# Given an invocation command,
-# return the absolute path to the command. This works even if commnad
-# has not path element and is present in PATH.
+
+# Same as get-invocation-command-nodefault, except that if no tool is found,
+# returns either the user-provided-command, if present, or the 'tool' parameter.
+rule get-invocation-command (
+ toolset : tool : user-provided-command * : additional-paths * : path-last ? )
+{
+ local result = [ get-invocation-command-nodefault $(toolset) : $(tool)
+ : $(user-provided-command) : $(additional-paths) : $(path-last) ] ;
+
+ if ! $(result)
+ {
+ if $(user-provided-command)
+ {
+ result = $(user-provided-command) ;
+ }
+ else
+ {
+ result = $(tool) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Given an invocation command return the absolute path to the command. This
+# works even if command has no path element and was found on the PATH.
rule get-absolute-tool-path ( command )
{
if $(command:D)
@@ -286,22 +298,20 @@
{
local m = [ GLOB [ modules.peek : PATH Path path ] : $(command) $(command).exe ] ;
return $(m[1]:D) ;
- }
+ }
}
-
# Attempts to find tool (binary) named 'name' in PATH and in 'additional-paths'.
-# If found in PATH, returns 'name'.
-# If found in additional paths, returns absolute name. If the tool is found
-# in several directories, return all paths.
-# Otherwise, returns empty string.
-# If 'path-last' is specified, PATH is searched after 'additional-paths'.
+# If found in PATH, returns 'name' and if found in additional paths, returns
+# absolute name. If the tool is found in several directories, returns all paths.
+# Otherwise, returns an empty string. If 'path-last' is specified, PATH is
+# searched after 'additional-paths'.
rule find-tool ( name : additional-paths * : path-last ? )
{
local path = [ path.programs-path ] ;
local match = [ path.glob $(path) : $(name) $(name).exe ] ;
- local additional-match = [ path.glob $(additional-paths) : $(name) $(name).exe ] ;
+ local additional-match = [ path.glob $(additional-paths) : $(name) $(name).exe ] ;
local result ;
if $(path-last)
@@ -324,65 +334,65 @@
}
}
if $(result)
- {
+ {
return [ path.native $(result[1]) ] ;
- }
+ }
}
-# Checks if 'command' can be found either in path
-# or is a full name to an existing file.
+
+# Checks if 'command' can be found either in path or is a full name to an
+# existing file.
rule check-tool-aux ( command )
{
if $(command:D)
{
if [ path.exists $(command) ]
- # Both NT and Cygwin will run .exe files by their unqualified names
- || [ os.on-windows ] && [ path.exists $(command).exe ]
- # Only NT will run .bat files by their unqualified names
+ # Both NT and Cygwin will run .exe files by their unqualified names.
+ || [ os.on-windows ] && [ path.exists $(command).exe ]
+ # Only NT will run .bat files by their unqualified names.
|| [ os.name ] = NT && [ path.exists $(command).bat ]
{
return $(command) ;
- }
+ }
}
else
{
if [ GLOB [ modules.peek : PATH Path path ] : $(command) ]
{
return $(command) ;
- }
- }
+ }
+ }
}
-# Checks that a tool can be invoked by 'command'.
-# If command is not an absolute path, checks if it can be found in 'path'.
-# If comand is absolute path, check that it exists. Returns 'command'
-# if ok and empty string otherwise.
+# Checks that a tool can be invoked by 'command'. If command is not an absolute
+# path, checks if it can be found in 'path'. If comand is an absolute path,
+# check that it exists. Returns 'command' if ok or empty string otherwise.
rule check-tool ( xcommand + )
{
- if [ check-tool-aux $(xcommand[1]) ]
- || [ check-tool-aux $(xcommand[-1]) ]
+ if [ check-tool-aux $(xcommand[1]) ] ||
+ [ check-tool-aux $(xcommand[-1]) ]
{
return $(xcommand) ;
}
}
-# Handle common options for toolset, specifically sets the following
-# flag variables:
+
+# Handle common options for toolset, specifically sets the following flag
+# variables:
# - CONFIG_COMMAND to 'command'
-# - OPTIONS for compile.c to the value of <cflags> in options
-# - OPTIONS for compile.c++ to the value of <cxxflags> in options
-# - OPTIOns for compile to the value of <compileflags> in options
-# - OPTIONs for link to the value of <linkflags> in options
+# - OPTIONS for compile.c to the value of <cflags> in options
+# - OPTIONS for compile.c++ to the value of <cxxflags> in options
+# - OPTIOns for compile to the value of <compileflags> in options
+# - OPTIONs for link to the value of <linkflags> in options
rule handle-options ( toolset : condition * : command * : options * )
{
if $(.debug-configuration)
{
ECHO "notice: will use '$(command)' for $(toolset), condition $(condition:E=(empty))" ;
}
-
- # The last parameter ('true') says it's OK to set flags for another
- # module,
+
+ # The last parameter ('true') says it's OK to set flags for another module.
toolset.flags $(toolset) CONFIG_COMMAND $(condition) : $(command) : unchecked ;
toolset.flags $(toolset).compile OPTIONS $(condition) :
[ feature.get-values <compileflags> : $(options) ] : unchecked ;
@@ -393,12 +403,11 @@
toolset.flags $(toolset).compile.fortran OPTIONS $(condition) :
[ feature.get-values <fflags> : $(options) ] : unchecked ;
toolset.flags $(toolset).link OPTIONS $(condition) :
- [ feature.get-values <linkflags> : $(options) ] : unchecked ;
+ [ feature.get-values <linkflags> : $(options) ] : unchecked ;
}
-# returns the location of the "program files" directory on a windows
-# platform
+# Returns the location of the "program files" directory on a windows platform.
rule get-program-files-dir ( )
{
local ProgramFiles = [ modules.peek : ProgramFiles ] ;
@@ -413,12 +422,13 @@
return $(ProgramFiles) ;
}
+
if [ os.name ] = NT
{
RM = del /f /q ;
CP = copy ;
IGNORE = "2>nul >nul & setlocal" ;
- LN ?= $(CP) ;
+ LN ?= $(CP) ;
}
else
{
@@ -427,28 +437,29 @@
LN = ln ;
}
-nl = "
-" ;
-rule rm-command ( )
+rule rm-command ( )
{
- return $(RM) ;
+ return $(RM) ;
}
+
rule copy-command ( )
{
return $(CP) ;
}
-# Returns the command needed to set the environment variable on the
-# current platform. The variable setting persists through all
-# following commands and is visible in the environment seen by
-# subsequently executed commands. In other words, on Unix systems,
-# the variable is exported, which is consistent with the only possible
-# behavior on Windows systems.
+# Returns the command needed to set an environment variable on the current
+# platform. The variable setting persists through all following commands and is
+# visible in the environment seen by subsequently executed commands. In other
+# words, on Unix systems, the variable is exported, which is consistent with the
+# only possible behavior on Windows systems.
rule variable-setting-command ( variable : value )
-{
+{
+ local nl = "
+" ;
+
if [ os.name ] = NT
{
return "set $(variable)=$(value)$(nl)" ;
@@ -459,30 +470,28 @@
}
}
-# Returns a command that sets the named shell path variable to the
-# given NATIVE paths to on the current platform.
+
+# Returns a command to sets a named shell path variable to the given NATIVE
+# paths on the current platform.
rule path-variable-setting-command ( variable : paths * )
-{
+{
local sep = [ os.path-separator ] ;
return [ variable-setting-command $(variable) : $(paths:J=$(sep)) ] ;
}
-# Returns a command that prepends the given paths to the named path
-# variable on the current platform.
+
+# Returns a command that prepends the given paths to the named path variable on
+# the current platform.
rule prepend-path-variable-command ( variable : paths * )
-{
- return [
- path-variable-setting-command $(variable)
- : $(paths) [ os.expand-variable $(variable) ]
- ] ;
+{
+ return [ path-variable-setting-command $(variable)
+ : $(paths) [ os.expand-variable $(variable) ] ] ;
}
-# Return a command which can create a file. If 'r' is result of invocation,
-# then
-# r foobar
-# will create foobar with unspecified content. What happens if file already
-# exists is unspecified.
+# Return a command which can create a file. If 'r' is result of invocation, then
+# 'r foobar' will create foobar with unspecified content. What happens if file
+# already exists is unspecified.
rule file-creation-command ( )
{
if [ modules.peek : NT ]
@@ -495,10 +504,10 @@
}
}
-
-# Returns a command that may be used for 'touching' files.
-# It is not a real 'touch' command on NT because it adds an empty line at
-# the end of file but it works with source files
+
+# Returns a command that may be used for 'touching' files. It is not a real
+# 'touch' command on NT because it adds an empty line at the end of file but it
+# works with source files.
rule file-touch-command ( )
{
if [ os.name ] in NT
@@ -514,18 +523,14 @@
rule MkDir
{
- # If dir exists, don't update it
- # Do this even for $(DOT).
-
+ # If dir exists, don't update it. Do this even for $(DOT).
NOUPDATE $(<) ;
if $(<) != $(DOT) && ! $($(<)-mkdir)
{
- local s ;
-
- # Cheesy gate to prevent multiple invocations on same dir
- # MkDir1 has the actions
- # Arrange for jam dirs
+ # Cheesy gate to prevent multiple invocations on same dir.
+ # MkDir1 has the actions.
+ # Arrange for jam dirs.
$(<)-mkdir = true ;
MkDir1 $(<) ;
@@ -534,7 +539,7 @@
# Recursively make parent directories.
# $(<:P) = $(<)'s parent, & we recurse until root
- s = $(<:P) ;
+ local s = $(<:P) ;
if $(NT)
{
@@ -544,7 +549,7 @@
case *:\\ : s = ;
}
}
-
+
if $(s) && $(s) != $(<)
{
Depends $(<) : $(s) ;
@@ -557,18 +562,21 @@
}
}
+
actions MkDir1
{
mkdir "$(<)"
}
+
actions piecemeal together existing Clean
{
$(RM) "$(>)"
}
-rule copy
-{
+
+rule copy
+{
}
@@ -577,28 +585,32 @@
$(CP) "$(>)" "$(<)"
}
+
rule RmTemps
{
}
+
+
actions quietly updated piecemeal together RmTemps
{
$(RM) "$(>)" $(IGNORE)
}
+
actions hard-link
{
$(RM) "$(<)" 2$(NULL_OUT) $(NULL_OUT)
- $(LN) "$(>)" "$(<)" $(NULL_OUT)
+ $(LN) "$(>)" "$(<)" $(NULL_OUT)
}
# Given a target, as given to a custom tag rule, returns a string formatted
# according to the passed format. Format is a list of properties that is
-# represented in the result. For each element of format the corresponding
-# target information is obtained and added to the result string.
-# For all, but the literal, the format value is taken as the as string to
-# prepend to the output to join the item to the rest of the result. If not
-# given "-" is used as a joiner.
+# represented in the result. For each element of format the corresponding target
+# information is obtained and added to the result string. For all, but the
+# literal, the format value is taken as the as string to prepend to the output
+# to join the item to the rest of the result. If not given "-" is used as a
+# joiner.
#
# The format options can be:
#
@@ -629,8 +641,8 @@
# boost_thread-vc80-mt-gd-1_33.dll, or
# boost_regex-vc80-gd-1_33.dll
#
-# The returned name also has the target type specific prefix and suffix
-# which puts it in a ready form to use as the value from a custom tag rule.
+# The returned name also has the target type specific prefix and suffix which
+# puts it in a ready form to use as the value from a custom tag rule.
#
rule format-name ( format * : name : type ? : property-set )
{
@@ -643,19 +655,19 @@
{
case <base> :
result += $(name:B) ;
-
+
case <toolset> :
result += [ join-tag $(f:G=) :
[ toolset-tag $(name) : $(type) : $(property-set) ] ] ;
-
+
case <threading> :
result += [ join-tag $(f:G=) :
[ threading-tag $(name) : $(type) : $(property-set) ] ] ;
-
+
case <runtime> :
result += [ join-tag $(f:G=) :
[ runtime-tag $(name) : $(type) : $(property-set) ] ] ;
-
+
case <version:*> :
local key = [ MATCH <version:(.*)> : $(f:G) ] ;
local version = [ $(property-set).get <$(key)> ] ;
@@ -663,7 +675,7 @@
version = [ MATCH "^([^.]+)[.]([^.]+)[.]?([^.]*)"
: $(version) ] ;
result += [ join-tag $(f:G=) : $(version[1])_$(version[2]) ] ;
-
+
case <property:*> :
local key = [ MATCH <property:(.*)> : $(f:G) ] ;
local p = [ $(property-set).get [ MATCH <property:(.*)> : $(f:G) ] ] ;
@@ -671,7 +683,7 @@
{
result += [ join-tag $(f:G=) : $(p) ] ;
}
-
+
case * :
result += $(f:G=) ;
}
@@ -682,16 +694,18 @@
}
}
+
local rule join-tag ( joiner ? : tag ? )
{
if ! $(joinder) { joiner = - ; }
return $(joiner)$(tag) ;
}
+
local rule toolset-tag ( name : type ? : property-set )
{
local tag = ;
-
+
local properties = [ $(property-set).raw ] ;
switch [ $(property-set).get <toolset> ]
{
@@ -708,7 +722,7 @@
case * : tag += gcc ;
}
}
- case intel :
+ case intel :
if [ $(property-set).get <toolset-intel:platform> ] = win
{
tag += iw ;
@@ -729,8 +743,7 @@
}
local version = [ MATCH "<toolset.*version>([0123456789]+)[.]([0123456789]*)"
: $(properties) ] ;
- # For historical reasons, vc6.0 and vc7.0 use different
- # naming.
+ # For historical reasons, vc6.0 and vc7.0 use different naming.
if $(tag) = vc
{
if $(version[1]) = 6
@@ -743,88 +756,88 @@
version = 7 ;
}
}
- # On intel, version is not added, because it does not
- # matter and it's the version of vc used as backend
- # that matters. Ideally, we'd encode the backend
- # version but that will break compatibility with
- # V1.
+ # On intel, version is not added, because it does not matter and it's the
+ # version of vc used as backend that matters. Ideally, we'd encode the
+ # backend version but that would break compatibility with V1.
if $(tag) = iw
{
version = ;
}
-
- # On borland, version is not added for compatibility
- # with V1.
+
+ # On borland, version is not added for compatibility with V1.
if $(tag) = bcb
{
version = ;
}
-
+
tag += $(version) ;
-
+
return $(tag:J=) ;
}
+
local rule threading-tag ( name : type ? : property-set )
{
local tag = ;
local properties = [ $(property-set).raw ] ;
if <threading>multi in $(properties) { tag = mt ; }
-
+
return $(tag:J=) ;
}
+
local rule runtime-tag ( name : type ? : property-set )
{
local tag = ;
-
+
local properties = [ $(property-set).raw ] ;
if <runtime-link>static in $(properties) { tag += s ; }
-
- # This is ugly thing. In V1, there's a code to automatically
- # detect which properties affect a target. So, if
- # <runtime-debugging> does not affect gcc toolset, the
- # tag rules won't even see <runtime-debugging>.
- # Similar functionality in V2 is not implemented yet, so we just
- # check for toolsets which are know to care about runtime debug
- if <toolset>msvc in $(properties)
+
+ # This is an ugly thing. In V1, there's a code to automatically detect which
+ # properties affect a target. So, if <runtime-debugging> does not affect gcc
+ # toolset, the tag rules won't even see <runtime-debugging>. Similar
+ # functionality in V2 is not implemented yet, so we just check for toolsets
+ # which are known to care about runtime debug.
+ if <toolset>msvc in $(properties)
|| <stdlib>stlport in $(properties)
+ || <toolset-intel:platform>win in $(properties)
{
- if <runtime-debugging>on in $(properties) { tag += g ; }
+ if <runtime-debugging>on in $(properties) { tag += g ; }
}
-
+
if <python-debugging>on in $(properties) { tag += y ; }
if <variant>debug in $(properties) { tag += d ; }
if <stdlib>stlport in $(properties) { tag += p ; }
if <stdlib-stlport:iostream>hostios in $(properties) { tag += n ; }
-
+
return $(tag:J=) ;
}
-rule __test__ ( ) {
+rule __test__ ( )
+{
import assert ;
-
- local save-os = [ modules.peek os : name ] ;
-
- modules.poke os : .name : LINUX ;
-
+
local nl = "
" ;
-
+
+ local save-os = [ modules.peek os : name ] ;
+
+ modules.poke os : .name : LINUX ;
+
assert.result "PATH=foo:bar:baz$(nl)export PATH$(nl)"
- : path-variable-setting-command PATH : foo bar baz ;
-
+ : path-variable-setting-command PATH : foo bar baz ;
+
assert.result "PATH=foo:bar:$PATH$(nl)export PATH$(nl)"
- : prepend-path-variable-command PATH : foo bar ;
-
+ : prepend-path-variable-command PATH : foo bar ;
+
modules.poke os : .name : NT ;
-
+
assert.result "set PATH=foo;bar;baz$(nl)"
- : path-variable-setting-command PATH : foo bar baz ;
-
+ : path-variable-setting-command PATH : foo bar baz ;
+
assert.result "set PATH=foo;bar;%PATH%$(nl)"
- : prepend-path-variable-command PATH : foo bar ;
+ : prepend-path-variable-command PATH : foo bar ;
- modules.poke os : .name : $(save-os) ;
-}
+ modules.poke os : .name : $(save-os) ;
+}
Modified: branches/release/tools/build/v2/tools/darwin.jam
==============================================================================
--- branches/release/tools/build/v2/tools/darwin.jam (original)
+++ branches/release/tools/build/v2/tools/darwin.jam 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -1,6 +1,9 @@
# Copyright 2003 Christopher Currie
# Copyright 2006 Dave Abrahams
# Copyright 2003, 2004, 2005, 2006 Vladimir Prus
+# Copyright 2005-2007 Mat Marcus
+# Copyright 2005-2007 Adobe Systems Incorporated
+# Copyright 2007 Rene Rivera
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
@@ -12,6 +15,7 @@
import type ;
import common ;
import generators ;
+import path : basename ;
feature.extend toolset : darwin ;
import gcc ;
@@ -20,12 +24,19 @@
generators.override darwin.prebuilt : builtin.prebuilt ;
generators.override darwin.searched-lib-generator : searched-lib-generator ;
+# Override default do-nothing generators.
+generators.override darwin.compile.c.pch : pch.default-c-pch-generator ;
+generators.override darwin.compile.c++.pch : pch.default-cpp-pch-generator ;
+
toolset.inherit-rules darwin : gcc ;
toolset.inherit-flags darwin : gcc
- : # On Darwin, static runtime is just not supported. So don't inherit
- # any flags settings for <runtime-link>static
- <runtime-link>static
- ;
+ : <runtime-link>static
+ <architecture>x86/<address-model>32
+ <architecture>x86/<address-model>64
+ <architecture>x86/<instruction-set>
+ <architecture>power/<address-model>32
+ <architecture>power/<address-model>64
+ <architecture>power/<instruction-set> ;
# No additional initialization should be necessary
rule init ( version ? : command * : options * )
@@ -49,8 +60,109 @@
feature framework : : free ;
+# The following adds objective-c support to darwin.
+# Thanks to http://thread.gmane.org/gmane.comp.lib.boost.build/13759
+
+type.register OBJECTIVE_C : m ;
+type.register OBJECTIVE_CPP : mm ;
+
+generators.register-c-compiler darwin.compile.m : OBJECTIVE_C : OBJ : <toolset>darwin ;
+generators.register-c-compiler darwin.compile.mm : OBJECTIVE_CPP : OBJ : <toolset>darwin ;
+
+rule compile.m
+{
+ LANG on $(<) = "-x objective-c" ;
+}
+
+actions compile.m
+{
+ "$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+rule compile.mm
+{
+ LANG on $(<) = "-x objective-c++" ;
+}
+
+actions compile.mm
+{
+ "$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+# Determine the MacOSX SDK versions installed and their locations.
+local rule available-macosx-versions ( )
+{
+ local sdks = [ GLOB /Developer/SDKs : MacOSX* ] ;
+ for local sdk in $(sdks)
+ {
+ local sdk-version = [ MATCH ([0-9]+)[.]([0-9]+)[.]?([0-9]+)? : $(sdk:D=) ] ;
+ sdk-version = $(sdk-version:J=.) ;
+ if $(sdk-version)
+ {
+ .macosx-sdk = $(sdk-version) $(.macosx-sdk) ;
+ .macosx-sdk.$(sdk-version) = $(sdk) ;
+ }
+ }
+ return $(.macosx-sdk) ;
+}
+
+# Add the found SDK version only to the allowed set. The "latests" SDKs
+# wil be first in the list, and hence the default.
+feature macosx-version
+ : [ available-macosx-versions ]
+ : propagated link-incompatible symmetric ;
+if 10.4 in [ feature.values macosx-version ]
+{
+ feature.set-default macosx-version : 10.4 ;
+}
+
+# Add the options for all the found SDKs.
+for local sdk in $(.macosx-sdk)
+{
+ flags darwin.compile OPTIONS <macosx-version>$(sdk) :
+ -isysroot $(.macosx-sdk.$(sdk))
+ -mmacosx-version-min=$(sdk)
+ ;
+ flags darwin.link OPTIONS <macosx-version>$(sdk) :
+ -isysroot $(.macosx-sdk.$(sdk))
+ -mmacosx-version-min=$(sdk)
+ ;
+}
+
+# Add option selection for combined and specific architecture combinations.
+
+local rule arch-addr-flags ( toolset variable
+ : architecture : address-model + : values + : default ? )
+{
+ if $(default)
+ {
+ flags $(toolset) $(variable)
+ <architecture>$(architecture)/<address-model>
+ : $(values) ;
+ }
+ flags $(toolset) $(variable)
+ <architecture>/<address-model>$(address-model)
+ <architecture>$(architecture)/<address-model>$(address-model)
+ : $(values) ;
+}
+
+arch-addr-flags darwin OPTIONS : combined : 32 : -arch i386 -arch ppc : default ;
+arch-addr-flags darwin OPTIONS : combined : 64 : -arch x86_64 -arch ppc64 ;
+
+arch-addr-flags darwin OPTIONS : x86 : 32 : -arch i386 : default ;
+arch-addr-flags darwin OPTIONS : x86 : 64 : -arch x86_64 ;
+
+arch-addr-flags darwin OPTIONS : power : 32 : -arch ppc : default ;
+arch-addr-flags darwin OPTIONS : power : 64 : -arch ppc64 ;
+
+
+flags darwin.link OPTIONS <runtime-link>static
+ : -nodefaultlibs -shared-libgcc -lstdc++-static -lgcc_eh -lgcc -lSystem ;
+
+flags darwin.link OPTIONS <variant>release : -Wl,-dead_strip -no_dead_strip_inits_and_terms ;
+
flags darwin.compile OPTIONS <link>shared : -dynamic ;
-flags darwin.compile OPTIONS : -Wno-long-double -no-cpp-precomp ;
+flags darwin.compile OPTIONS : -Wno-long-double -no-cpp-precomp -gdwarf-2 ;
flags darwin.link FRAMEWORK <framework> ;
@@ -63,9 +175,17 @@
# set up the -F option to include the paths to any frameworks used.
local rule prepare-framework-path ( target + )
{
+ # The -framework option only takes basename of the framework.
+ # The -F option specifies the directories where a framework
+ # is searched for. So, if we find <framework> feature
+ # with some path, we need to generate property -F option.
local framework-path = [ on $(target) return $(FRAMEWORK:D) ] ;
-
- FRAMEWORK_PATH on $(target) += -F$(framework-path) ;
+
+ # Be sure to generate no -F if there's no path.
+ if $(framework-path) != ""
+ {
+ FRAMEWORK_PATH on $(target) += -F$(framework-path) ;
+ }
}
rule link
@@ -86,11 +206,12 @@
actions link.dll bind LIBRARIES
{
- $(CONFIG_COMMAND) -dynamiclib -L"$(LINKPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(FRAMEWORK_PATH) -framework$(_)$(FRAMEWORK:D=:S=) $(OPTIONS) $(USER_OPTIONS)
+ $(CONFIG_COMMAND) -dynamiclib -install_name "$(<:B)$(<:S)" -L"$(LINKPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(FRAMEWORK_PATH) -framework$(_)$(FRAMEWORK:D=:S=) $(OPTIONS) $(USER_OPTIONS)
}
+# We use libtool instead of ar to support universal binary linking
+# TODO: Find a way to use the underlying tools, i.e. lipo, to do this.
actions piecemeal archive
{
- ar -c -r -s $(ARFLAGS) "$(<:T)" "$(>:T)"
+ libtool -static -o "$(<:T)" $(ARFLAGS) "$(>:T)"
}
-
Modified: branches/release/tools/build/v2/tools/docutils.jam
==============================================================================
--- branches/release/tools/build/v2/tools/docutils.jam (original)
+++ branches/release/tools/build/v2/tools/docutils.jam 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -12,6 +12,7 @@
import toolset ;
import path ;
import feature : feature ;
+import property ;
.initialized = ;
@@ -58,10 +59,10 @@
rule html ( target : source : properties * )
{
- local command-prefix = "python "$(.docutils-dir)/tools/ ;
if ! [ on $(target) return $(RST2XXX) ]
{
- RST2XXX on $(target) = $(command-prefix:E="")rst2html.py ;
+ local python-cmd = [ property.select <python.interpreter> : $(properties) ] ;
+ RST2XXX on $(target) = $(python-cmd:G=:E="python") $(.docutils-dir)/tools/rst2html.py ;
}
}
@@ -76,6 +77,6 @@
actions html
{
$(.setup)
- $(RST2XXX) $(COMMON-FLAGS) $(HTML-FLAGS) $(>) $(<)
+ "$(RST2XXX)" $(COMMON-FLAGS) $(HTML-FLAGS) $(>) $(<)
}
Modified: branches/release/tools/build/v2/tools/doxproc.py
==============================================================================
--- branches/release/tools/build/v2/tools/doxproc.py (original)
+++ branches/release/tools/build/v2/tools/doxproc.py 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -530,6 +530,12 @@
def _translate_sectiondef_private_func( self, sectiondef, target=None, **kwargs ):
return self._translate_sectiondef_func_(sectiondef,
name='private member functions',target=target,**kwargs)
+
+ #~ Translate:
+ #~ <sectiondef kind="user-defined"><header>...</header>...</sectiondef>
+ def _translate_sectiondef_user_defined( self, sectiondef, target=None, **kwargs ):
+ return self._translate_sectiondef_func_(sectiondef,
+ name=self._getChildData('header', root=sectiondef),target=target,**kwargs)
#~ Translate:
#~ <memberdef kind="typedef" id="?">
@@ -829,11 +835,11 @@
#~ and definitions so that lookup is unambiguous when reading in the definitions.
namespace_files = filter(
lambda x:
- os.path.basename(x).startswith('namespace_'),
+ os.path.basename(x).startswith('namespace'),
input)
decl_files = filter(
lambda x:
- not os.path.basename(x).startswith('namespace_') and not os.path.basename(x).startswith('_'),
+ not os.path.basename(x).startswith('namespace') and not os.path.basename(x).startswith('_'),
input)
for dox in namespace_files:
#~ print '--|',os.path.basename(dox)
Modified: branches/release/tools/build/v2/tools/doxygen.jam
==============================================================================
--- branches/release/tools/build/v2/tools/doxygen.jam (original)
+++ branches/release/tools/build/v2/tools/doxygen.jam 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -352,7 +352,7 @@
# Build an HTML directory from the sources.
local html-location = [ feature.get-values <location> : $(requirements) ] ;
local output-dir = [ path.root
- [ path.join $(html-location:E=html) [ $(project).get build-dir ] ]
+ [ path.join [ $(project).get build-dir ] $(html-location:E=html) ]
[ path.pwd ]
] ;
local output-dir-native = [ path.native $(output-dir) ] ;
Modified: branches/release/tools/build/v2/tools/gcc.jam
==============================================================================
--- branches/release/tools/build/v2/tools/gcc.jam (original)
+++ branches/release/tools/build/v2/tools/gcc.jam 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -56,7 +56,7 @@
# libxxx.a static library
# xxx.dll DLL
# libxxx.dll.a import library
-#
+#
# Note: user can always override by using the <tag>@rule
# This settings have been choosen, so that mingw
# is in line with msvc naming conventions. For
@@ -72,13 +72,12 @@
import rc ;
-# Initializes the gcc toolset for the given version.
-# If necessary, command may be used to specify where the compiler
-# is located.
-# The parameter 'options' is a space-delimited list of options, each
-# one being specified as <option-name>option-value. Valid option names
-# are: cxxflags, linkflags and linker-type. Accepted values for linker-type
-# are gnu and sun, gnu being the default.
+# Initializes the gcc toolset for the given version. If necessary, command may
+# be used to specify where the compiler is located. The parameter 'options' is a
+# space-delimited list of options, each one specified as
+# <option-name>option-value. Valid option names are: cxxflags, linkflags and
+# linker-type. Accepted linker-type values are gnu, darwin, osf, hpux or sun
+# and the default value will be selected based on the current OS.
# Example:
# using gcc : 3.4 : : <cxxflags>foo <linkflags>bar <linker-type>sun ;
rule init ( version ? : command * : options * )
@@ -128,12 +127,12 @@
: version $(version)
] ;
}
-
+
common.handle-options gcc : $(condition) : $(command) : $(options) ;
-
+
local linker = [ feature.get-values <linker-type> : $(options) ] ;
- if ! $(linker) {
-
+ if ! $(linker)
+ {
if [ os.name ] = OSF
{
linker = osf ;
@@ -143,36 +142,35 @@
linker = hpux ;
}
else
- {
+ {
linker = gnu ;
- }
+ }
}
init-link-flags gcc $(linker) $(condition) ;
-
-
- # If gcc is installed in non-standard location, we'd need to
- # add LD_LIBRARY_PATH when running programs created with it
- # (for unit-test/run rules).
+
+
+ # If gcc is installed in non-standard location, we'd need to add
+ # LD_LIBRARY_PATH when running programs created with it (for unit-test/run
+ # rules).
if $(command)
{
- # On multilib 64-bit boxes, there are both 32-bit and 64-bit
- # libraries and all must be added to LD_LIBRARY_PATH. The linker
- # will pick the right onces.
- # Note that we don't provide a clean way to build 32-bit binary
- # with 64-bit compiler, but user can always pass -m32 manually.
+ # On multilib 64-bit boxes, there are both 32-bit and 64-bit libraries
+ # and all must be added to LD_LIBRARY_PATH. The linker will pick the
+ # right onces. Note that we don't provide a clean way to build 32-bit
+ # binary with 64-bit compiler, but user can always pass -m32 manually.
local lib_path = $(root)/bin $(root)/lib $(root)/lib32 $(root)/lib64 ;
if $(.debug-configuration)
{
ECHO notice: using gcc libraries :: $(condition) :: $(lib_path) ;
}
flags gcc.link RUN_PATH $(condition) : $(lib_path) ;
- }
+ }
+
+ # If it's not a system gcc install we should adjust the various programs as
+ # needed to prefer using the install specific versions. This is essential
+ # for correct use of MinGW and for cross-compiling.
- #~ If it's not a system gcc install we should adjust the various
- #~ programs as needed to prefer using the install specific versions.
- #~ This is essential for correct use of MinGW and for cross-compiling.
-
- #~ - The archive builder.
+ # - The archive builder.
local archiver =
[ common.get-invocation-command gcc
: ar : [ feature.get-values <archiver> : $(options) ] : $(bin) : search-path ] ;
@@ -181,23 +179,21 @@
{
ECHO notice: using gcc archiver :: $(condition) :: $(archiver[1]) ;
}
-
- #~ - The resource compiler.
+
+ # - The resource compiler.
local rc =
- [ common.get-invocation-command gcc
+ [ common.get-invocation-command-nodefault gcc
: windres : [ feature.get-values <rc> : $(options) ] : $(bin) : search-path ] ;
local rc-type =
[ feature.get-values <rc-type> : $(options) ] ;
rc-type ?= windres ;
if ! $(rc)
{
- #~ If we can't find an RC compiler we fallback to a null RC compiler
- #~ that creates empty object files. This allows the same Jamfiles
- #~ to work across the board. The null RC uses the assembler to create
- #~ the empty objects, so configure that.
- rc =
- [ common.get-invocation-command gcc
- : as : : $(bin) : search-path ] ;
+ # If we can't find an RC compiler we fallback to a null RC compiler that
+ # creates empty object files. This allows the same Jamfiles to work
+ # across the board. The null RC uses the assembler to create the empty
+ # objects, so configure that.
+ rc = [ common.get-invocation-command gcc : as : : $(bin) : search-path ] ;
rc-type = null ;
}
rc.configure $(rc) : $(condition) : <rc-type>$(rc-type) ;
@@ -205,23 +201,22 @@
if [ os.name ] = NT
{
- # This causes single-line command invocation to not go through
- # .bat files, thus avoiding command-line length limitations
- JAMSHELL = % ;
+ # This causes single-line command invocation to not go through .bat files,
+ # thus avoiding command-line length limitations.
+ JAMSHELL = % ;
}
generators.register-c-compiler gcc.compile.c++ : CPP : OBJ : <toolset>gcc ;
-generators.register-c-compiler gcc.compile.c : C : OBJ : <toolset>gcc ;
+generators.register-c-compiler gcc.compile.c : C : OBJ : <toolset>gcc ;
generators.register-c-compiler gcc.compile.asm : ASM : OBJ : <toolset>gcc ;
# pch support
-# The compiler looks for a precompiled header in each directory just
-# before it looks for the include file in that directory.
-# The name searched for is the name specified in the #include directive
-# with ".gch" suffix appended.
-# The logic in gcc-pch-generator will make sure that BASE_PCH suffix is
-# appended to full name of the header.
+# The compiler looks for a precompiled header in each directory just before it
+# looks for the include file in that directory. The name searched for is the
+# name specified in the #include directive with ".gch" suffix appended. The
+# logic in gcc-pch-generator will make sure that BASE_PCH suffix is appended to
+# full name of the header.
type.set-generated-target-suffix PCH : <toolset>gcc : gch ;
@@ -238,15 +233,14 @@
local header ;
for local s in $(sources)
{
- if [ type.is-derived [ $(s).type ] H ]
+ if [ type.is-derived [ $(s).type ] H ]
{
header = $(s) ;
- }
+ }
}
-
- # error handling
- # base name of header file should be the same as the base name
- # of precompiled header.
+
+ # Error handling: Base header file name should be the same as the base
+ # precompiled header name.
local header-name = [ $(header).name ] ;
local header-basename = $(header-name:B) ;
if $(header-basename) != $(name)
@@ -255,13 +249,8 @@
errors.user-error "in" $(location)": pch target name `"$(name)"' should be the same as the base name of header file `"$(header-name)"'" ;
}
- local pch-file =
- [
- generator.run $(project) $(name)
- : $(property-set)
- : $(header)
- ]
- ;
+ local pch-file = [ generator.run $(project) $(name) : $(property-set)
+ : $(header) ] ;
# return result of base class and pch-file property as usage-requirements
return
@@ -269,21 +258,21 @@
$(pch-file)
;
}
-
- # Calls the base version specifying source's name as the
- # name of the created target. As result, the PCH will be named
- # whatever.hpp.gch, and not whatever.gch.
+
+ # Calls the base version specifying source's name as the name of the created
+ # target. As result, the PCH will be named whatever.hpp.gch, and not
+ # whatever.gch.
rule generated-targets ( sources + : property-set : project name ? )
{
name = [ $(sources[1]).name ] ;
- return [ generator.generated-targets $(sources)
+ return [ generator.generated-targets $(sources)
: $(property-set) : $(project) $(name) ] ;
- }
+ }
}
-# Note: the 'H' source type will catch both '.h' header and '.hpp' header. The latter
-# have HPP type, but HPP type is derived from H. The type of compilation is determined
-# entirely by the destination type.
+# Note: the 'H' source type will catch both '.h' header and '.hpp' header. The
+# latter have HPP type, but HPP type is derived from H. The type of compilation
+# is determined entirely by the destination type.
generators.register [ new gcc-pch-generator gcc.compile.c.pch : H : C_PCH : <pch>on <toolset>gcc ] ;
generators.register [ new gcc-pch-generator gcc.compile.c++.pch : H : CPP_PCH : <pch>on <toolset>gcc ] ;
@@ -294,60 +283,59 @@
flags gcc.compile PCH_FILE <pch>on : <pch-file> ;
# Declare flags and action for compilation
-flags gcc.compile OPTIONS <optimization>off : -O0 ;
+flags gcc.compile OPTIONS <optimization>off : -O0 ;
flags gcc.compile OPTIONS <optimization>speed : -O3 ;
flags gcc.compile OPTIONS <optimization>space : -Os ;
-flags gcc.compile OPTIONS <inlining>off : -fno-inline ;
-flags gcc.compile OPTIONS <inlining>on : -Wno-inline ;
+flags gcc.compile OPTIONS <inlining>off : -fno-inline ;
+flags gcc.compile OPTIONS <inlining>on : -Wno-inline ;
flags gcc.compile OPTIONS <inlining>full : -finline-functions -Wno-inline ;
flags gcc.compile OPTIONS <warnings>off : -w ;
-flags gcc.compile OPTIONS <warnings>on : -Wall ;
+flags gcc.compile OPTIONS <warnings>on : -Wall ;
flags gcc.compile OPTIONS <warnings>all : -Wall -pedantic ;
flags gcc.compile OPTIONS <warnings-as-errors>on : -Werror ;
flags gcc.compile OPTIONS <debug-symbols>on : -g ;
flags gcc.compile OPTIONS <profiling>on : -pg ;
-# On cygwin and mingw, gcc generates position independent code by default,
-# and warns if -fPIC is specified. This might not be the right way
-# of checking if we're using cygwin. For example, it's possible
-# to run cygwin gcc from NT shell, or using crosscompiling.
-# But we'll solve that problem when it's time. In that case
-# we'll just add another parameter to 'init' and move this login
+flags gcc.compile OPTIONS <rtti>off : -fno-rtti ;
+
+# On cygwin and mingw, gcc generates position independent code by default, and
+# warns if -fPIC is specified. This might not be the right way of checking if
+# we're using cygwin. For example, it's possible to run cygwin gcc from NT
+# shell, or using crosscompiling. But we'll solve that problem when it's time.
+# In that case we'll just add another parameter to 'init' and move this login
# inside 'init'.
if [ os.name ] != CYGWIN && [ os.name ] != NT
-{
+{
# This logic will add -fPIC for all compilations:
#
# lib a : a.cpp b ;
# obj b : b.cpp ;
# exe c : c.cpp a d ;
# obj d : d.cpp ;
- #
- # This all is fine, except that 'd' will be compiled with
- # -fPIC even though it's not needed, as 'd' is used only in
- # exe. However, it's hard to detect where a target is going to
- # be used. Alternative, we can set -fPIC only when main target type
- # is LIB but than 'b' will be compiled without -fPIC. In x86-64 that
- # will lead to link errors. So, compile everything with -fPIC.
#
- # Yet another alternative would be to create propagated <sharedable>
- # feature, and set it when building shared libraries, but that's hard
- # to implement and will increase target path length even more.
+ # This all is fine, except that 'd' will be compiled with -fPIC even though
+ # it's not needed, as 'd' is used only in exe. However, it's hard to detect
+ # where a target is going to be used. Alternative, we can set -fPIC only
+ # when main target type is LIB but than 'b' will be compiled without -fPIC.
+ # In x86-64 that will lead to link errors. So, compile everything with
+ # -fPIC.
+ #
+ # Yet another alternative would be to create propagated <sharedable>
+ # feature, and set it when building shared libraries, but that's hard to
+ # implement and will increase target path length even more.
flags gcc.compile OPTIONS <link>shared : -fPIC ;
-}
+}
if [ os.name ] != NT && [ os.name ] != OSF && [ os.name ] != HPUX
{
# OSF does have an option called -soname but it doesn't seem to work as
# expected, therefore it has been disabled.
-
HAVE_SONAME = "" ;
SONAME_OPTION = -h ;
}
-
flags gcc.compile USER_OPTIONS <cflags> ;
flags gcc.compile.c++ USER_OPTIONS <cxxflags> ;
flags gcc.compile DEFINES <define> ;
@@ -355,35 +343,34 @@
actions compile.c++.pch
{
- "$(CONFIG_COMMAND)" -x c++-header $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+ "$(CONFIG_COMMAND)" -x c++-header $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
}
actions compile.c.pch
{
- "$(CONFIG_COMMAND)" -x c-header $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+ "$(CONFIG_COMMAND)" -x c-header $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
}
rule compile.c++
{
- # Some extensions are compiled as C++ by default. For others, we need
- # to pass -x c++.
- # We could always pass -x c++ but distcc does not work with it.
+ # Some extensions are compiled as C++ by default. For others, we need to
+ # pass -x c++. We could always pass -x c++ but distcc does not work with it.
if ! $(>:S) in .cc .cp .cxx .cpp .c++ .C
{
LANG on $(<) = "-x c++" ;
- }
+ }
DEPENDS $(<) : [ on $(<) return $(PCH_FILE) ] ;
}
rule compile.c
{
- # If we use the name g++ then default file suffix -> language mapping
- # does not work. So have to pass -x option. Maybe, we can work around this
- # by allowing the user to specify both C and C++ compiler names.
+ # If we use the name g++ then default file suffix -> language mapping does
+ # not work. So have to pass -x option. Maybe, we can work around this by
+ # allowing the user to specify both C and C++ compiler names.
#if $(>:S) != .c
#{
LANG on $(<) = "-x c" ;
- #}
+ #}
DEPENDS $(<) : [ on $(<) return $(PCH_FILE) ] ;
}
@@ -394,7 +381,7 @@
actions compile.c bind PCH_FILE
{
- "$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(PCH_FILE:D)" -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+ "$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(PCH_FILE:D)" -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
}
rule compile.asm
@@ -404,17 +391,17 @@
actions compile.asm
{
- "$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+ "$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
}
-# The class which check that we don't try to use
-# the <runtime-link>static property while creating or using shared library,
-# since it's not supported by gcc/libc.
+# The class which check that we don't try to use the <runtime-link>static
+# property while creating or using shared library, since it's not supported by
+# gcc/libc.
class gcc-linking-generator : unix-linking-generator
{
rule run ( project name ? : property-set : sources + )
{
- #~ TODO: Replace this with the use of a target-os property.
+ # TODO: Replace this with the use of a target-os property.
local no-static-link = ;
if [ modules.peek : UNIX ]
{
@@ -452,15 +439,14 @@
ECHO warning:
$(reason) ;
ECHO warning:
- "It's suggested to use '<runtime-link>static' together"
- "with the '<link>static'." ;
+ "It is suggested to use '<runtime-link>static' together"
+ "with '<link>static'." ;
return ;
}
else
{
- local generated-targets = [ unix-linking-generator.run $(project) $(name)
- : $(property-set) : $(sources) ] ;
-
+ local generated-targets = [ unix-linking-generator.run $(project)
+ $(name) : $(property-set) : $(sources) ] ;
return $(generated-targets) ;
}
}
@@ -495,8 +481,8 @@
: <toolset>gcc ] ;
}
-# Declare flags for linking
-# First, the common flags
+# Declare flags for linking.
+# First, the common flags.
flags gcc.link OPTIONS <debug-symbols>on : -g ;
flags gcc.link OPTIONS <profiling>on : -pg ;
flags gcc.link USER_OPTIONS <linkflags> ;
@@ -505,39 +491,37 @@
flags gcc.link FINDLIBS-SA <find-shared-library> ;
flags gcc.link LIBRARIES <library-file> ;
-# For <runtime-link>static we made sure there are no dynamic libraries
-# in the link.
-# On HP-UX not all system libraries exist as archived libraries (for example,
-# there is no libunwind.a), so, on this platform, the -static option cannot
-# be specified.
-
+# For <runtime-link>static we made sure there are no dynamic libraries in the
+# link. On HP-UX not all system libraries exist as archived libraries (for
+# example, there is no libunwind.a), so, on this platform, the -static option
+# cannot be specified.
if [ os.name ] != HPUX
{
flags gcc.link OPTIONS <runtime-link>static : -static ;
}
-# Now, the vendor specific flags
-# The parameter linker can be either gnu or sun
+# Now, the vendor specific flags.
+# The parameter linker can be either gnu, darwin, osf, hpux or sun.
rule init-link-flags ( toolset linker condition )
{
switch $(linker)
{
case gnu :
{
- # Strip the binary when no debugging is needed.
- # We use --strip-all flag as opposed to -s since icc
- # (intel's compiler) is generally option-compatible with
- # and inherits from gcc toolset, but does not support -s
- flags $(toolset).link OPTIONS $(condition)/<debug-symbols>off : -Wl,--strip-all
- : unchecked ;
- flags $(toolset).link RPATH $(condition) : <dll-path> : unchecked ;
- flags $(toolset).link RPATH_LINK $(condition) : <xdll-path> : unchecked ;
- flags $(toolset).link START-GROUP $(condition) : -Wl,--start-group : unchecked ;
- flags $(toolset).link END-GROUP $(condition) : -Wl,--end-group : unchecked ;
-
- # gnu ld has the ability to change the search behaviour for libraries referenced
- # by -l switch. These modifiers are -Bstatic and -Bdynamic and change search
- # for -l switches that follow them. The following list shows the tried variants.
+ # Strip the binary when no debugging is needed. We use --strip-all flag
+ # as opposed to -s since icc (intel's compiler) is generally
+ # option-compatible with and inherits from the gcc toolset, but does not
+ # support -s.
+ flags $(toolset).link OPTIONS $(condition)/<debug-symbols>off : -Wl,--strip-all : unchecked ;
+ flags $(toolset).link RPATH $(condition) : <dll-path> : unchecked ;
+ flags $(toolset).link RPATH_LINK $(condition) : <xdll-path> : unchecked ;
+ flags $(toolset).link START-GROUP $(condition) : -Wl,--start-group : unchecked ;
+ flags $(toolset).link END-GROUP $(condition) : -Wl,--end-group : unchecked ;
+
+ # gnu ld has the ability to change the search behaviour for libraries
+ # referenced by -l switch. These modifiers are -Bstatic and -Bdynamic
+ # and change search for -l switches that follow them. The following list
+ # shows the tried variants.
# The search stops at the first variant that has a match.
# *nix: -Bstatic -lxxx
# libxxx.a
@@ -561,72 +545,71 @@
# libxxx.a
#
# (*) This is for cygwin
- # Please note that -Bstatic and -Bdynamic are not a guarantee that a static
- # or dynamic lib indeed gets linked in. The switches only change search
- # patterns!
-
+ # Please note that -Bstatic and -Bdynamic are not a guarantee that a
+ # static or dynamic lib indeed gets linked in. The switches only change
+ # search patterns!
+
# On *nix mixing shared libs with static runtime is not a good idea.
- flags $(toolset).link FINDLIBS-ST-PFX $(condition)/<runtime-link>shared
+ flags $(toolset).link FINDLIBS-ST-PFX $(condition)/<runtime-link>shared
: -Wl,-Bstatic : unchecked ;
- flags $(toolset).link FINDLIBS-SA-PFX $(condition)/<runtime-link>shared
+ flags $(toolset).link FINDLIBS-SA-PFX $(condition)/<runtime-link>shared
: -Wl,-Bdynamic : unchecked ;
-
- # On windows allow mixing of static and dynamic libs with static runtime
+
+ # On windows allow mixing of static and dynamic libs with static
+ # runtime.
flags $(toolset).link FINDLIBS-ST-PFX $(condition)/<runtime-link>static/<target-os>windows
: -Wl,-Bstatic : unchecked ;
- flags $(toolset).link FINDLIBS-SA-PFX $(condition)/<runtime-link>static/<target-os>windows
+ flags $(toolset).link FINDLIBS-SA-PFX $(condition)/<runtime-link>static/<target-os>windows
: -Wl,-Bdynamic : unchecked ;
- flags $(toolset).link OPTIONS $(condition)/<runtime-link>static/<target-os>windows
+ flags $(toolset).link OPTIONS $(condition)/<runtime-link>static/<target-os>windows
: -Wl,-Bstatic : unchecked ;
}
+
case darwin :
{
- # On Darwin, the -s option to ld does not work unless we pass
- # -static, and passing -static unconditionally is a bad idea.
- # So, don't pass -s at all, darwin.jam will use separate 'strip'
- # invocation.
+ # On Darwin, the -s option to ld does not work unless we pass -static,
+ # and passing -static unconditionally is a bad idea. So, don't pass -s.
+ # at all, darwin.jam will use separate 'strip' invocation.
flags $(toolset).link RPATH $(condition) : <dll-path> : unchecked ;
flags $(toolset).link RPATH_LINK $(condition) : <xdll-path> : unchecked ;
}
-
+
case osf :
{
- # No --strip-all, just -s
- flags $(toolset).link OPTIONS $(condition)/<debug-symbols>off : -Wl,-s
+ # No --strip-all, just -s.
+ flags $(toolset).link OPTIONS $(condition)/<debug-symbols>off : -Wl,-s
: unchecked ;
-
flags $(toolset).link RPATH $(condition) : <dll-path> : unchecked ;
- # This does not supports -R
+ # This does not supports -R.
flags $(toolset).link RPATH_OPTION $(condition) : -rpath : unchecked ;
# -rpath-link is not supported at all.
}
-
case sun :
{
- flags $(toolset).link OPTIONS $(condition)/<debug-symbols>off : -Wl,-s
+ flags $(toolset).link OPTIONS $(condition)/<debug-symbols>off : -Wl,-s
: unchecked ;
flags $(toolset).link RPATH $(condition) : <dll-path> : unchecked ;
- # Solaris linker does not have a separate -rpath-link, but
- # allows to use -L for the same purpose.
+ # Solaris linker does not have a separate -rpath-link, but allows to use
+ # -L for the same purpose.
flags $(toolset).link LINKPATH $(condition) : <xdll-path> : unchecked ;
- # This permits shared libraries with non-PIC code on Solaris
- # VP, 2004/09/07: Now that we have -fPIC hardcode in link.dll,
- # the following is not needed. Whether -fPIC should be hardcoded,
- # is a separate question.
- # AH, 2004/10/16: it is still necessary because some tests link
- # against static libraries that were compiled without PIC.
- flags $(toolset).link OPTIONS $(condition)/<link>shared : -mimpure-text
- : unchecked ;
+ # This permits shared libraries with non-PIC code on Solaris.
+ # VP, 2004/09/07: Now that we have -fPIC hardcode in link.dll, the
+ # following is not needed. Whether -fPIC should be hardcoded, is a
+ # separate question.
+ # AH, 2004/10/16: it is still necessary because some tests link against
+ # static libraries that were compiled without PIC.
+ flags $(toolset).link OPTIONS $(condition)/<link>shared : -mimpure-text
+ : unchecked ;
}
case hpux :
{
- flags $(toolset).link OPTIONS $(condition)/<debug-symbols>off : -Wl,-s
- : unchecked ;
- flags $(toolset).link OPTIONS $(condition)/<link>shared : -fPIC
- : unchecked ;
+ flags $(toolset).link OPTIONS $(condition)/<debug-symbols>off
+ : -Wl,-s : unchecked ;
+ flags $(toolset).link OPTIONS $(condition)/<link>shared
+ : -fPIC : unchecked ;
}
case * :
@@ -634,19 +617,18 @@
errors.user-error
"$(toolset) initialization: invalid linker '$(linker)'" :
"The value '$(linker)' specified for <linker> is not recognized." :
- "Possible values are 'sun', 'gnu'" ;
+ "Possible values are 'gnu', 'darwin', 'osf', 'hpux' or 'sun'" ;
}
}
}
-# Declare actions for linking
+# Declare actions for linking.
rule link ( targets * : sources * : properties * )
{
- SPACE on $(targets) = " " ;
- # Serialize execution of the 'link' action, since
- # running N links in parallel is just slower.
- # For now, serialize only gcc links, it might be a good
- # idea to serialize all links.
+ SPACE on $(targets) = " " ;
+ # Serialize execution of the 'link' action, since running N links in
+ # parallel is just slower. For now, serialize only gcc links, it might be a
+ # good idea to serialize all links.
JAM_SEMAPHORE on $(targets) = <s>gcc-link-semaphore ;
}
@@ -656,12 +638,11 @@
}
-# Default value. Mostly for the sake of intel-linux
-# that inherits from gcc, but does not has the same
-# logic to set the .AR variable. We can put the same
-# logic in intel-linux, but that's hardly worth the trouble
-# as on Linux, 'ar' is always available.
-.AR = ar ;
+# Default value. Mostly for the sake of intel-linux that inherits from gcc, but
+# does not have the same logic to set the .AR variable. We can put the same
+# logic in intel-linux, but that's hardly worth the trouble as on Linux, 'ar' is
+# always available.
+.AR = ar ;
flags gcc.archive AROPTIONS <archiveflags> ;
@@ -671,20 +652,18 @@
#
# Andre Hentz:
#
- # I had a file, say a1.c, that was included into liba.a.
- # I moved a1.c to a2.c, updated my Jamfiles and rebuilt.
- # My program was crashing with absurd errors.
- # After some debugging I traced it back to the fact that a1.o was *still*
- # in liba.a
+ # I had a file, say a1.c, that was included into liba.a. I moved a1.c to
+ # a2.c, updated my Jamfiles and rebuilt. My program was crashing with absurd
+ # errors. After some debugging I traced it back to the fact that a1.o was
+ # *still* in liba.a
#
# Rene Rivera:
#
- # Originally removing the archive was done by splicing an RM
- # onto the archive action. That makes archives fail to build on NT
- # when they have many files because it will no longer execute the
- # action directly and blow the line length limit. Instead we
- # remove the file in a different action, just before the building
- # of the archive.
+ # Originally removing the archive was done by splicing an RM onto the
+ # archive action. That makes archives fail to build on NT when they have
+ # many files because it will no longer execute the action directly and blow
+ # the line length limit. Instead we remove the file in a different action,
+ # just before building the archive.
#
local clean.a = $(targets[1])(clean) ;
TEMPORARY $(clean.a) ;
@@ -695,14 +674,13 @@
common.RmTemps $(clean.a) : $(targets) ;
}
-# Declare action for creating static libraries
-# The 'r' letter means to add files to the archive with replacement
-# Since we remove archive, we don't care about replacement, but
-# there's no option "add without replacement".
-# The 'c' letter means suppresses warning in case the archive
-# does not exists yet. That warning is produced only on
-# some platforms, for whatever reasons.
-actions piecemeal archive
+# Declare action for creating static libraries.
+# The letter 'r' means to add files to the archive with replacement. Since we
+# remove archive, we don't care about replacement, but there's no option "add
+# without replacement".
+# The letter 'c' suppresses the warning in case the archive does not exists yet.
+# That warning is produced only on some platforms, for whatever reasons.
+actions piecemeal archive
{
"$(.AR)" $(AROPTIONS) rc "$(<)" "$(>)"
}
@@ -710,7 +688,7 @@
rule link.dll ( targets * : sources * : properties * )
{
- SPACE on $(targets) = " " ;
+ SPACE on $(targets) = " " ;
JAM_SEMAPHORE on $(targets) = <s>gcc-link-semaphore ;
}
@@ -723,11 +701,11 @@
# Set up threading support. It's somewhat contrived, so perform it at the end,
# to avoid cluttering other code.
-if [ os.on-windows ]
+if [ os.on-windows ]
{
flags gcc OPTIONS <threading>multi : -mthreads ;
}
-else if [ modules.peek : UNIX ]
+else if [ modules.peek : UNIX ]
{
switch [ modules.peek : JAMUNAME ]
{
Modified: branches/release/tools/build/v2/tools/generate.jam
==============================================================================
--- branches/release/tools/build/v2/tools/generate.jam (original)
+++ branches/release/tools/build/v2/tools/generate.jam 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -1,6 +1,6 @@
-# Copyright 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+# Copyright 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# Declares main target 'generate' that can be used to produce targets
# by calling a user-provides rule, that takes virtual target and produces
@@ -9,13 +9,13 @@
import targets ;
import "class" : new ;
import property ;
-import errors : error ;
-import type : type ;
+import errors ;
import regex ;
import property-set ;
import project ;
import feature ;
+
feature.feature generating-rule : : free ;
@@ -24,28 +24,28 @@
import errors ;
import indirect ;
import virtual-target ;
-
+
rule __init__ ( name : project : sources * : requirements *
: default-build * : usage-requirements * )
- {
- basic-target.__init__ $(name) : $(project) : $(sources)
- : $(requirements) : $(default-build) : $(usage-requirements) ;
-
+ {
+ basic-target.__init__ $(name) : $(project) : $(sources)
+ : $(requirements) : $(default-build) : $(usage-requirements) ;
+
local r = [ $(self.requirements).get <generating-rule> ] ;
if ! $(r)
{
- errors.user-error
- "The generate rule requires <generating-rule> property to be set" ;
- }
+ errors.user-error
+ "The generate rule requires <generating-rule> property to be set" ;
+ }
}
-
+
rule construct ( name : sources * : property-set )
{
local result ;
local gr = [ $(property-set).get <generating-rule> ] ;
-
+
# FIXME: this is copy-paste from virtual-target.jam. Must
- # have an utilty rule to call a rule like this.
+ # have n utilty rule to call a rule like this.
local rule-name = [ MATCH ^@(.*) : $(gr) ] ;
if $(rule-name)
{
@@ -53,22 +53,21 @@
{
errors.error "<tag>@rulename is present but is not the only <tag> feature" ;
}
-
-
- result = [ indirect.call $(rule-name) $(self.project) $(name)
- : $(property-set) : $(sources) ] ;
-
+
+ result = [ indirect.call $(rule-name) $(self.project) $(name)
+ : $(property-set) : $(sources) ] ;
+
if ! $(result)
{
ECHO "warning: Unable to construct" [ full-name ] ;
- }
- }
-
+ }
+ }
+
local ur ;
local targets ;
if $(result)
- {
+ {
if [ class.is-a $(result[1]) : property-set ]
{
ur = $(result[1]) ;
@@ -78,31 +77,30 @@
{
ur = [ property-set.empty ] ;
targets = $(result) ;
- }
- }
+ }
+ }
local rt ;
for t in $(targets)
{
rt += [ virtual-target.register $(t) ] ;
}
return $(ur) $(rt) ;
- }
+ }
}
-rule generate ( name : sources * : requirements * : default-build *
+
+rule generate ( name : sources * : requirements * : default-build *
: usage-requirements * )
{
local project = [ project.current ] ;
-
+
targets.main-target-alternative
- [ new generated-target-class $(name) : $(project)
- : [ targets.main-target-sources $(sources) : $(name) ]
- : [ targets.main-target-requirements $(requirements) : $(project) ]
- : [ targets.main-target-default-build $(default-build) : $(project) ]
- : [ targets.main-target-usage-requirements $(usage-requirements) : $(project) ]
- ] ;
+ [ new generated-target-class $(name) : $(project)
+ : [ targets.main-target-sources $(sources) : $(name) ]
+ : [ targets.main-target-requirements $(requirements) : $(project) ]
+ : [ targets.main-target-default-build $(default-build) : $(project) ]
+ : [ targets.main-target-usage-requirements $(usage-requirements) : $(project) ]
+ ] ;
}
IMPORT $(__name__) : generate : : generate ;
-
-
Modified: branches/release/tools/build/v2/tools/gettext.jam
==============================================================================
--- branches/release/tools/build/v2/tools/gettext.jam (original)
+++ branches/release/tools/build/v2/tools/gettext.jam 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -1,47 +1,47 @@
-# Copyright 2003, 2004, 2005, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+# Copyright 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# This module support GNU gettext internationalization utilities.
-#
+#
# It provides two main target rules: 'gettext.catalog', used for
# creating machine-readable catalogs from translations files, and
# 'gettext.update', used for update translation files from modified
# sources.
-#
+#
# To add i18n support to your application you should follow these
# steps.
-#
+#
# - Decide on a file name which will contain translations and
# what main target name will be used to update it. For example::
-#
+#
# gettext.update update-russian : russian.po a.cpp my_app ;
-#
+#
# - Create the initial translation file by running::
#
# bjam update-russian
#
# - Edit russian.po. For example, you might change fields like LastTranslator.
-#
+#
# - Create a main target for final message catalog::
#
# gettext.catalog russian : russian.po ;
#
-# The machine-readable catalog will be updated whenever you update
+# The machine-readable catalog will be updated whenever you update
# "russian.po". The "russian.po" file will be updated only on explicit
# request. When you're ready to update translations, you should
-#
+#
# - Run::
-#
+#
# bjam update-russian
#
# - Edit "russian.po" in appropriate editor.
-#
+#
# The next bjam run will convert "russian.po" into machine-readable form.
#
# By default, translations are marked by 'i18n' call. The 'gettext.keyword'
# feature can be used to alter this.
-
+
import targets ;
import property-set ;
@@ -59,7 +59,7 @@
# Initializes the gettext module.
rule init ( path ? # Path where all tools are located. If not specified,
- # they should be in PATH.
+ # they should be in PATH.
)
{
if $(.initialized) && $(.path) != $(path)
@@ -68,9 +68,9 @@
}
.initialized = true ;
if $(path)
- {
+ {
.path = $(path)/ ;
- }
+ }
}
# Creates a main target 'name', which, when updated, will cause
@@ -80,36 +80,36 @@
# of those main targets will be scanned, provided they are of
# appropricate type. The 'gettext.types' feature can be used to
# control the types.
-#
+#
# The target will be updated only if explicitly requested on the
# command line.
rule update ( name : existing-translation sources + : requirements * )
{
local project = [ project.current ] ;
-
+
targets.main-target-alternative
[ new typed-target $(name) : $(project) : gettext.UPDATE :
$(existing-translation) $(sources)
- : [ targets.main-target-requirements $(requirements) : $(project) ]
+ : [ targets.main-target-requirements $(requirements) : $(project) ]
] ;
$(project).mark-target-as-explicit $(name) ;
}
# The human editable source, containing translation.
-type.register gettext.PO : po ;
+type.register gettext.PO : po ;
# The machine readable message catalog.
type.register gettext.catalog : mo ;
# Intermediate type produce by extracting translations from
# sources.
-type.register gettext.POT : pot ;
+type.register gettext.POT : pot ;
# Pseudo type used to invoke update-translations generator
type.register gettext.UPDATE ;
# Identifies the keyword that should be used when scanning sources.
# Default: i18n
feature gettext.keyword : : free ;
-# Contains space-separated list of sources types which should be scanned.
+# Contains space-separated list of sources types which should be scanned.
# Default: "C CPP"
feature gettext.types : : free ;
@@ -119,7 +119,7 @@
{
import regex : split ;
import property-set ;
-
+
rule __init__ ( * : * )
{
generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
@@ -135,11 +135,11 @@
local types = [ $(property-set).get <gettext.types> ] ;
types ?= "C CPP" ;
types = [ regex.split $(types) " " ] ;
-
+
local keywords = [ $(property-set).get <gettext.keyword> ] ;
property-set = [ property-set.create $(keywords:G=<gettext.keyword>) ] ;
- # First deterime the list of sources that must be scanned for
+ # First deterime the list of sources that must be scanned for
# messages.
local all-sources ;
# CONSIDER: I'm not sure if the logic should be the same as for 'stage':
@@ -154,22 +154,22 @@
if [ $(s).type ] in $(types)
{
right-sources += $(s) ;
- }
+ }
}
-
+
local .constructed ;
if $(right-sources)
- {
+ {
# Create the POT file, which will contain list of messages extracted
# from the sources.
- local extract =
+ local extract =
[ new action $(right-sources) : gettext.extract : $(property-set) ] ;
- local new-messages = [ new file-target $(name) : gettext.POT
+ local new-messages = [ new file-target $(name) : gettext.POT
: $(project) : $(extract) ] ;
-
+
# Create a notfile target which will update the existing translation file
- # with new messages.
- local a = [ new action $(sources[1]) $(new-messages)
+ # with new messages.
+ local a = [ new action $(sources[1]) $(new-messages)
: gettext.update-po-dispatch ] ;
local r = [ new notfile-target $(name) : $(project) : $(a) ] ;
.constructed = [ virtual-target.register $(r) ] ;
@@ -177,9 +177,9 @@
else
{
errors.error "No source could be scanned by gettext tools" ;
- }
- return $(.constructed) ;
- }
+ }
+ return $(.constructed) ;
+ }
}
generators.register [ new update-translations-generator gettext.update : : gettext.UPDATE ] ;
@@ -199,23 +199,22 @@
# first run), we need to copy the file created from sources.
# In all other cases, we need to update the file.
rule update-po-dispatch
-{
+{
NOCARE $(>[1]) ;
gettext.create-po $(<) : $(>) ;
gettext.update-po $(<) : $(>) ;
- _ on $(<) = " " ;
+ _ on $(<) = " " ;
ok on $(<) = "" ;
EXISTING_PO on $(<) = $(>[1]) ;
}
-# Due to fancy interaction of existing and updated, this rule
-# can be called with with one source, in which case we copy
-# the lonely source into EXISTING_PO, or with two sources,
-# in which case the action body expands to nothing.
-# I'd really like to have "missing" action modifier.
+# Due to fancy interaction of existing and updated, this rule can be called with
+# one source, in which case we copy the lonely source into EXISTING_PO, or with
+# two sources, in which case the action body expands to nothing. I'd really like
+# to have "missing" action modifier.
actions quietly existing updated create-po bind EXISTING_PO
{
- cp$(_)"$(>[1])"$(_)"$(EXISTING_PO)"$($(>[2]:E=ok))
+ cp$(_)"$(>[1])"$(_)"$(EXISTING_PO)"$($(>[2]:E=ok))
}
actions updated update-po bind EXISTING_PO
@@ -223,12 +222,9 @@
$(.path)msgmerge$(_)-U$(_)"$(EXISTING_PO)"$(_)"$(>[1])"
}
-actions gettext.compile
+actions gettext.compile
{
- $(.path)msgfmt -o $(<) $(>)
+ $(.path)msgfmt -o $(<) $(>)
}
IMPORT $(__name__) : update : : gettext.update ;
-
-
-
Modified: branches/release/tools/build/v2/tools/intel.jam
==============================================================================
--- branches/release/tools/build/v2/tools/intel.jam (original)
+++ branches/release/tools/build/v2/tools/intel.jam 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -16,15 +16,19 @@
rule init ( * : * )
{
- if [ os.name ] = LINUX || [ os.name ] = MACOSX
+ if [ os.name ] = LINUX
{
toolset.using intel-linux :
$(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
}
+ else if [ os.name ] = MACOSX
+ {
+ toolset.using intel-darwin :
+ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ }
else
{
toolset.using intel-win :
$(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
-
}
}
Modified: branches/release/tools/build/v2/tools/mpi.jam
==============================================================================
--- branches/release/tools/build/v2/tools/mpi.jam (original)
+++ branches/release/tools/build/v2/tools/mpi.jam 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -271,6 +271,12 @@
command = [ common.get-invocation-command mpi : mpiCC ] ;
}
+ if ! $(mpicxx) && ! $(command)
+ {
+ # Try "mpicxx", which is used by OpenMPI and MPICH2
+ command = [ common.get-invocation-command mpi : mpicxx ] ;
+ }
+
local result ;
local compile_flags ;
local link_flags ;
Modified: branches/release/tools/build/v2/tools/msvc.jam
==============================================================================
--- branches/release/tools/build/v2/tools/msvc.jam (original)
+++ branches/release/tools/build/v2/tools/msvc.jam 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -9,17 +9,16 @@
# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
# http://www.boost.org/LICENSE_1_0.txt)
+import "class" : new ;
import property ;
import generators ;
import os ;
import type ;
import toolset : flags ;
-import errors : error ;
-import feature : feature get-values ;
+import errors ;
+import feature ;
import path ;
-import sequence : unique ;
import common ;
-import "class" : new ;
import rc ;
import midl ;
import mc ;
@@ -51,15 +50,14 @@
# Dynamic runtime comes only in MT flavour.
toolset.add-requirements <toolset>msvc,<runtime-link>shared:<threading>multi ;
-
-RM = [ common.rm-command ] ;
+RM = [ common.rm-command ] ;
nl = "
" ;
-# Initialize the toolset for a specific version. As the result, path to
-# compiler and, possible, program names are set up, and will be used when
-# that version of compiler is requested. For example, you might have:
+# Initialize the toolset for a specific version. As the result, path to compiler
+# and, possible, program names are set up, and will be used when that version of
+# compiler is requested. For example, you might have:
#
# using msvc : 6.5 : cl.exe ;
# using msvc : 7.0 : Y:/foo/bar/cl.exe ;
@@ -72,36 +70,36 @@
# - all - all detected versions will be registered;
# - default - this is an equivalent to an empty version.
#
-# Depending on a supplied version, detected configurations and presence
-# 'cl.exe' in the path different results may be achieved. The following
-# table describes all possible cases:
+# Depending on a supplied version, detected configurations and presence 'cl.exe'
+# in the path different results may be achieved. The following table describes
+# all possible cases:
#
# Nothing "x.y"
# Passed Nothing "x.y" detected, detected,
# version detected detected cl.exe in path cl.exe in path
-#
+#
# default Error Use "x.y" Create "default" Use "x.y"
# all None Use all None Use all
# x.y - Use "x.y" - Use "x.y"
# a.b Error Error Create "a.b" Create "a.b"
-#
+#
# "x.y" - refers to a detected version;
# "a.b" - refers to an undetected version.
#
# Note: for free VC7.1 tools, we don't correctly find vcvars32.bar when user
# explicitly provides a path.
-rule init (
+rule init (
version ? # the msvc version which is being configured. When omitted
# the tools invoked when no explicit version is given will be configured.
- : command *
+ : command *
# the command to invoke the compiler. If not specified:
# - if version is given, default location for that version will be searched
- #
- # - if version is not given, default locations for 7.1, 7.0 and 6.* will
- # be searched
- #
- # - if compiler is not found in default locations, PATH will be searched.
- : options *
+ #
+ # - if version is not given, default locations for 7.1, 7.0 and 6.* will
+ # be searched
+ #
+ # - if compiler is not found in default locations, PATH will be searched.
+ : options *
# options can include <setup>, <compiler>, <assembler>, <linker> and <resource-compiler>
#
# <compiler-filter>
@@ -118,22 +116,19 @@
}
-# 'configure' is a newer version of 'init'. The parameter 'command' is passed as
+# 'configure' is a newer version of 'init'. The parameter 'command' is passed as
# a part of the 'options' list.
-rule configure (
- version ? :
- options *
- )
+rule configure ( version ? : options * )
{
switch $(version)
{
case all :
if $(options)
{
- error "msvc: options should be empty when 'all' is specified" ;
+ errors.error "msvc: options should be empty when 'all' is specified" ;
}
- # use all detected versions
+ # Use all detected versions.
for local v in [ $(.versions).all ]
{
configure-really $(v) ;
@@ -149,27 +144,24 @@
# Supported CPU architectures
-cpu-arch-i386 =
+cpu-arch-i386 =
<architecture>/<address-model>
<architecture>/<address-model>32
- <architecture>x86/<address-model>
+ <architecture>x86/<address-model>
<architecture>x86/<address-model>32 ;
-cpu-arch-amd64 =
- <architecture>/<address-model>64
+cpu-arch-amd64 =
+ <architecture>/<address-model>64
<architecture>x86/<address-model>64 ;
cpu-arch-ia64 =
- <architecture>ia64/<address-model>
+ <architecture>ia64/<address-model>
<architecture>ia64/<address-model>64 ;
-local rule configure-really (
- version ? :
- options *
- )
+local rule configure-really ( version ? : options * )
{
- # If no version supplied use the default configuration. Note that condition
+ # If no version supplied use the default configuration. Note that condition
# remains versionless.
local v = $(version) ;
if ! $(v)
@@ -179,7 +171,7 @@
version = $(version[1]) ;
v = $(version) ;
- # Note: 'version' can still be empty at this point if no versions were
+ # Note: 'version' can still be empty at this point if no versions were
# detected.
version ?= "default" ;
}
@@ -194,15 +186,15 @@
if $(version) in [ $(.versions).used ]
{
# Allow multiple 'toolset.usage' calls for the same configuration
- # if the identical sets of options are used
+ # if the identical sets of options are used
if $(options) && ( $(options) != [ $(.versions).get $(version) : options ] )
{
- error "msvc: the toolset version '$(version)' is configured already" ;
+ errors.error "msvc: the toolset version '$(version)' is configured already" ;
}
}
else
{
- # Register a new configuration
+ # Register a new configuration
$(.versions).register $(version) ;
# Add user-supplied to auto-detected options
@@ -212,26 +204,23 @@
$(.versions).use $(version) ;
# Generate condition and save it
- local condition = [ common.check-init-parameters msvc :
- version $(v) ] ;
+ local condition = [ common.check-init-parameters msvc : version $(v) ] ;
$(.versions).set $(version) : condition : $(condition) ;
+ local command = [ feature.get-values <command> : $(options) ] ;
- local command = [ get-values <command> : $(options) ] ;
-
- # If version is specified, we try to search first in default paths,
- # and only then in PATH.
+ # If version is specified, we try to search first in default paths, and
+ # only then in PATH.
command = [ common.get-invocation-command msvc : cl.exe : $(command)
: [ default-paths $(version) ] : $(version) ] ;
common.handle-options msvc : $(condition) : $(command) : $(options) ;
-
- if ! $(version)
+ if ! $(version)
{
- # Even if version is not explicitly specified, try to detect the version
- # from the path.
+ # Even if version is not explicitly specified, try to detect the
+ # version from the path.
if [ MATCH "(Microsoft Visual Studio 9)" : $(command) ]
{
version = 9.0 ;
@@ -239,7 +228,7 @@
if [ MATCH "(Microsoft Visual Studio 8)" : $(command) ]
{
version = 8.0 ;
- }
+ }
else if [ MATCH "(NET 2003[\/\\]VC7)" : $(command) ]
{
version = 7.1 ;
@@ -255,11 +244,10 @@
else
{
version = 6.0 ;
- }
+ }
}
-
- # Generate and register setup command
+ # Generate and register setup command.
local below-8.0 = [ MATCH ^([67]\\.) : $(version) ] ;
@@ -276,9 +264,9 @@
parent = [ path.parent $(parent) ] ;
parent = [ path.native $(parent) ] ;
- # setup will be used if the script name has been specified.
- # If setup is not specified, a default script will be used instead.
- setup = [ get-values <setup> : $(options) ] ;
+ # Setup will be used if the script name has been specified. If setup
+ # is not specified, a default script will be used instead.
+ setup = [ feature.get-values <setup> : $(options) ] ;
if ! $(setup)
{
@@ -291,14 +279,14 @@
setup ?= vcvarsall.bat ;
}
- # The vccars32.bat is actually in "bin" directory.
- # (except for free VC7.1 tools)
+ # The vccars32.bat is actually in "bin" directory except for
+ # free VC7.1 tools.
setup = [ GLOB $(command) $(parent) : $(setup) ] ;
}
if $(setup)
{
- # Note Cygwin to Windows translation
+ # Note Cygwin to Windows translation.
setup = "\""$(setup[1]:W)"\"" ;
if ! $(below-8.0)
@@ -309,9 +297,15 @@
# say about x86_IPF, that seem to be doc bug,
# and x86_ia64 is right one.
setup-option = x86 x86_amd64 x86_ia64 ;
-
- # Use a native x64 compiler if possible
- if [ MATCH ^(AMD64) : [ os.environ PROCESSOR_IDENTIFIER ] ]
+
+ # When using 64-bit Windows, and targeting 64-bit, it's
+ # possible to use native 64-bit compiler, which is selected
+ # by the "amd64" parameter to vcvarsall.bat. There are two
+ # variables we can use -- PROCESSOR_ARCHITECTURE and
+ # PROCESSOR_IDENTIFIER. The first is 'x86' when running
+ # 32-bit windows, no matter what processor is, and 'AMD64'
+ # on 64-bit windows on x86 (either AMD64 or EM64T) windows.
+ if [ MATCH ^(AMD64) : [ os.environ PROCESSOR_ARCHITECTURE ] ]
{
setup-option = x86 amd64 x86_ia64 ;
}
@@ -330,32 +324,32 @@
command = $(prefix)$(setup)" "$(setup-option:E="")$(suffix) ;
- # Setup script is not required in some configurations
+ # Setup script is not required in some configurations.
command ?= "" ;
- # Get tool names (if any) and finish setup
+ # Get tool names (if any) and finish setup.
- compiler = [ get-values <compiler> : $(options) ] ;
+ compiler = [ feature.get-values <compiler> : $(options) ] ;
compiler ?= cl ;
- linker = [ get-values <linker> : $(options) ] ;
+ linker = [ feature.get-values <linker> : $(options) ] ;
linker ?= link ;
- resource-compiler = [ get-values <resource-compiler> : $(options) ] ;
+ resource-compiler = [ feature.get-values <resource-compiler> : $(options) ] ;
resource-compiler ?= rc ;
- assembler = [ get-values <assembler> : $(options) ] ;
+ assembler = [ feature.get-values <assembler> : $(options) ] ;
assembler ?= ml ;
- idl-compiler = [ get-values <idl-compiler> : $(options) ] ;
+ idl-compiler = [ feature.get-values <idl-compiler> : $(options) ] ;
idl-compiler ?= midl ;
- mc-compiler = [ get-values <mc-compiler> : $(options) ] ;
+ mc-compiler = [ feature.get-values <mc-compiler> : $(options) ] ;
mc-compiler ?= mc ;
manifest-tool = mt ;
-
- local cc-filter = [ get-values <compiler-filter> : $(options) ] ;
+
+ local cc-filter = [ feature.get-values <compiler-filter> : $(options) ] ;
for local i in 1 2 3
{
@@ -370,31 +364,31 @@
ECHO "msvc: condition: '$(cond)', "
"command: '$(command[$(i)])'" ;
}
-
- flags msvc.compile .CC $(cond) : $(command[$(i)])$(compiler) /Zm800 -nologo ;
- flags msvc.compile .RC $(cond) : $(command[$(i)])$(resource-compiler) ;
- flags msvc.compile .ASM $(cond) : $(command[$(i)])$(assembler) ;
- flags msvc.link .LD $(cond) : $(command[$(i)])$(linker) /NOLOGO /INCREMENTAL:NO ;
- flags msvc.archive .LD $(cond) : $(command[$(i)])$(linker) /lib /NOLOGO ;
- flags msvc.compile .IDL $(cond) : $(command[$(i)])$(idl-compiler) ;
- flags msvc.compile .MC $(cond) : $(command[$(i)])$(mc-compiler) ;
+
+ toolset.flags msvc.compile .CC $(cond) : $(command[$(i)])$(compiler) /Zm800 -nologo ;
+ toolset.flags msvc.compile .RC $(cond) : $(command[$(i)])$(resource-compiler) ;
+ toolset.flags msvc.compile .ASM $(cond) : $(command[$(i)])$(assembler) ;
+ toolset.flags msvc.link .LD $(cond) : $(command[$(i)])$(linker) /NOLOGO /INCREMENTAL:NO ;
+ toolset.flags msvc.archive .LD $(cond) : $(command[$(i)])$(linker) /lib /NOLOGO ;
+ toolset.flags msvc.compile .IDL $(cond) : $(command[$(i)])$(idl-compiler) ;
+ toolset.flags msvc.compile .MC $(cond) : $(command[$(i)])$(mc-compiler) ;
if ! [ os.name ] in NT
{
- flags msvc.link .MT $(cond) : $(command[$(i)])$(manifest-tool) -nologo ;
+ toolset.flags msvc.link .MT $(cond) : $(command[$(i)])$(manifest-tool) -nologo ;
}
else
{
- flags msvc.link .MT $(cond) : $(manifest-tool) -nologo ;
+ toolset.flags msvc.link .MT $(cond) : $(manifest-tool) -nologo ;
}
-
+
if $(cc-filter)
{
- flags msvc .CC.FILTER $(cond) : "|" $(cc-filter) ;
+ toolset.flags msvc .CC.FILTER $(cond) : "|" $(cc-filter) ;
}
}
}
- # Set version-specific flags
+ # Set version-specific flags.
configure-version-specific msvc : $(version) : $(condition) ;
}
}
@@ -403,95 +397,94 @@
# Supported CPU types (only Itanium optimization options are supported from
# VC++ 2005 on). See http://msdn2.microsoft.com/en-us/library/h66s5s0e(vs.90).aspx
cpu-type-g5 = i586 pentium pentium-mmx ;
-cpu-type-g6 =
- i686 pentiumpro pentium2 pentium3 pentium3m pentium-m k6 k6-2 k6-3
+cpu-type-g6 =
+ i686 pentiumpro pentium2 pentium3 pentium3m pentium-m k6 k6-2 k6-3
winchip-c6 winchip2 c3 c3-2 ;
-cpu-type-em64t = prescott nocona
+cpu-type-em64t = prescott nocona
conroe conroe-xe conroe-l allendale mermon mermon-xe kentsfield kentsfield-xe
penryn wolfdale yorksfield nehalem ;
cpu-type-amd64 = k8 opteron athlon64 athlon-fx ;
-cpu-type-g7 =
- pentium4 pentium4m athlon athlon-tbird athlon-4 athlon-xp athlon-mp
+cpu-type-g7 =
+ pentium4 pentium4m athlon athlon-tbird athlon-4 athlon-xp athlon-mp
$(cpu-type-em64t) $(cpu-type-amd64) ;
cpu-type-itanium = itanium itanium1 merced ;
cpu-type-itanium2 = itanium2 mckinley ;
-# Sets up flag definitions that are dependent on the version ot
-# compiler.
+
+# Sets up flag definitions dependent on the compiler version used.
# - 'version' is the version of compiler in N.M format.
-# - 'condition' is the property set to be used as condition for flag
-# - 'toolset' is the toolset for which flag settings are to be defined
+# - 'condition' is the property set to be used as condition for flag.
+# - 'toolset' is the toolset for which flag settings are to be defined.
# This makes the rule reusable for other msvc-option-compatible compilers.
rule configure-version-specific ( toolset : version : condition )
{
toolset.push-checking-for-flags-module unchecked ;
- # Starting with versions 7.0, the msvc compiler have the /Zc:forScope
- # and /Zc:wchar_t options that improve C++ standard conformance, but
- # those options are off by default.
- # If we're sure that msvc version is at 7.*, add those options explicitly.
- # We can be sure either if user specified version 7.* explicitly,
- # or if the installation path contain 7.* (this is checked above).
+ # Starting with versions 7.0, the msvc compiler have the /Zc:forScope and
+ # /Zc:wchar_t options that improve C++ standard conformance, but those
+ # options are off by default. If we're sure that msvc version is at 7.*, add
+ # those options explicitly. We can be sure either if user specified version
+ # 7.* explicitly, or if the installation path contain 7.* (checked above).
if ! [ MATCH ^(6\\.) : $(version) ]
{
- flags $(toolset).compile CFLAGS $(condition) : /Zc:forScope /Zc:wchar_t ;
- flags $(toolset).compile.c++ C++FLAGS $(condition) : /wd4675 ;
+ toolset.flags $(toolset).compile CFLAGS $(condition) : /Zc:forScope /Zc:wchar_t ;
+ toolset.flags $(toolset).compile.c++ C++FLAGS $(condition) : /wd4675 ;
# disable the function is deprecated warning
# Some version of msvc have a bug, that cause deprecation
# warning to be emitted even with /W0
- flags $(toolset).compile CFLAGS $(condition)/<warnings>off : /wd4996 ;
+ toolset.flags $(toolset).compile CFLAGS $(condition)/<warnings>off : /wd4996 ;
# 64-bit compatibility warning
- flags $(toolset).compile CFLAGS $(condition)/<warnings>all : /Wp64 ;
+ toolset.flags $(toolset).compile CFLAGS $(condition)/<warnings>all : /Wp64 ;
}
-
+
#
# Processor-specific optimization
#
if [ MATCH ^([67]) : $(version) ]
{
- # 8.0 deprecates some of the options
- flags $(toolset).compile CFLAGS $(condition)/<optimization>speed $(condition)/<optimization>space : /Ogiy /Gs ;
- flags $(toolset).compile CFLAGS $(condition)/<optimization>speed : /Ot ;
- flags $(toolset).compile CFLAGS $(condition)/<optimization>space : /Os ;
-
- flags $(toolset).compile CFLAGS $(condition)/$(cpu-arch-i386)/<instruction-set> : /GB ;
- flags $(toolset).compile CFLAGS $(condition)/$(cpu-arch-i386)/<instruction-set>i386 : /G3 ;
- flags $(toolset).compile CFLAGS $(condition)/$(cpu-arch-i386)/<instruction-set>i486 : /G4 ;
- flags $(toolset).compile CFLAGS $(condition)/$(cpu-arch-i386)/<instruction-set>$(cpu-type-g5) : /G5 ;
- flags $(toolset).compile CFLAGS $(condition)/$(cpu-arch-i386)/<instruction-set>$(cpu-type-g6) : /G6 ;
- flags $(toolset).compile CFLAGS $(condition)/$(cpu-arch-i386)/<instruction-set>$(cpu-type-g7) : /G7 ;
-
- # Improve floating-point accuracy. Otherwise, some of C++ Boost's
- # "math" tests will fail.
- flags $(toolset).compile CFLAGS $(condition) : /Op ;
-
- # 7.1 and below have single-threaded static RTL
- flags $(toolset).compile CFLAGS $(condition)/<runtime-debugging>off/<runtime-link>static/<threading>single : /ML ;
- flags $(toolset).compile CFLAGS $(condition)/<runtime-debugging>on/<runtime-link>static/<threading>single : /MLd ;
+ # 8.0 deprecates some of the options.
+ toolset.flags $(toolset).compile CFLAGS $(condition)/<optimization>speed $(condition)/<optimization>space : /Ogiy /Gs ;
+ toolset.flags $(toolset).compile CFLAGS $(condition)/<optimization>speed : /Ot ;
+ toolset.flags $(toolset).compile CFLAGS $(condition)/<optimization>space : /Os ;
+
+ toolset.flags $(toolset).compile CFLAGS $(condition)/$(cpu-arch-i386)/<instruction-set> : /GB ;
+ toolset.flags $(toolset).compile CFLAGS $(condition)/$(cpu-arch-i386)/<instruction-set>i386 : /G3 ;
+ toolset.flags $(toolset).compile CFLAGS $(condition)/$(cpu-arch-i386)/<instruction-set>i486 : /G4 ;
+ toolset.flags $(toolset).compile CFLAGS $(condition)/$(cpu-arch-i386)/<instruction-set>$(cpu-type-g5) : /G5 ;
+ toolset.flags $(toolset).compile CFLAGS $(condition)/$(cpu-arch-i386)/<instruction-set>$(cpu-type-g6) : /G6 ;
+ toolset.flags $(toolset).compile CFLAGS $(condition)/$(cpu-arch-i386)/<instruction-set>$(cpu-type-g7) : /G7 ;
+
+ # Improve floating-point accuracy. Otherwise, some of C++ Boost's "math"
+ # tests will fail.
+ toolset.flags $(toolset).compile CFLAGS $(condition) : /Op ;
+
+ # 7.1 and below have single-threaded static RTL.
+ toolset.flags $(toolset).compile CFLAGS $(condition)/<runtime-debugging>off/<runtime-link>static/<threading>single : /ML ;
+ toolset.flags $(toolset).compile CFLAGS $(condition)/<runtime-debugging>on/<runtime-link>static/<threading>single : /MLd ;
}
else
{
- # 8.0 and above adds some more options
- flags $(toolset).compile CFLAGS $(condition)/$(cpu-arch-amd64)/<instruction-set> : /favor:blend ;
- flags $(toolset).compile CFLAGS $(condition)/$(cpu-arch-amd64)/<instruction-set>$(cpu-type-em64t) : /favor:EM64T ;
- flags $(toolset).compile CFLAGS $(condition)/$(cpu-arch-amd64)/<instruction-set>$(cpu-type-amd64) : /favor:AMD64 ;
-
- # 8.0 and above only has multi-threaded static RTL
- flags $(toolset).compile CFLAGS $(condition)/<runtime-debugging>off/<runtime-link>static/<threading>single : /MT ;
- flags $(toolset).compile CFLAGS $(condition)/<runtime-debugging>on/<runtime-link>static/<threading>single : /MTd ;
+ # 8.0 and above adds some more options.
+ toolset.flags $(toolset).compile CFLAGS $(condition)/$(cpu-arch-amd64)/<instruction-set> : /favor:blend ;
+ toolset.flags $(toolset).compile CFLAGS $(condition)/$(cpu-arch-amd64)/<instruction-set>$(cpu-type-em64t) : /favor:EM64T ;
+ toolset.flags $(toolset).compile CFLAGS $(condition)/$(cpu-arch-amd64)/<instruction-set>$(cpu-type-amd64) : /favor:AMD64 ;
+
+ # 8.0 and above only has multi-threaded static RTL.
+ toolset.flags $(toolset).compile CFLAGS $(condition)/<runtime-debugging>off/<runtime-link>static/<threading>single : /MT ;
+ toolset.flags $(toolset).compile CFLAGS $(condition)/<runtime-debugging>on/<runtime-link>static/<threading>single : /MTd ;
}
toolset.pop-checking-for-flags-module ;
}
-# Returns the default installation path for the given version.
+# Returns the default installation path for the given version.
local rule default-path ( version )
{
- # Use auto-detected path if possible
- local path = [ get-values <command> :
+ # Use auto-detected path if possible
+ local path = [ feature.get-values <command> :
[ $(.versions).get $(version) : options ] ] ;
if $(path)
@@ -524,12 +517,13 @@
return $(path) ;
}
-# Returns either the default installation path (if 'version' is not empty) or list of all
-# known default paths (if no version is given)
+
+# Returns either the default installation path (if 'version' is not empty) or
+# list of all known default paths (if no version is given)
rule default-paths ( version ? )
{
local possible-paths ;
-
+
if $(version)
{
possible-paths += [ default-path $(version) ] ;
@@ -546,14 +540,13 @@
}
-# Declare generators
+# Declare generators.
-# is it possible to combine these?
-# make the generators non-composing, so that they don't convert each source
-# into separate rsp file.
+# Is it possible to combine these? Make the generators non-composing so that
+# they don't convert each source into a separate rsp file.
generators.register-linker msvc.link : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB : EXE : <toolset>msvc ;
generators.register-linker msvc.link.dll : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB : SHARED_LIB IMPORT_LIB : <toolset>msvc ;
-
+
generators.register-archiver msvc.archive : OBJ : STATIC_LIB : <toolset>msvc ;
generators.register-c-compiler msvc.compile.c++ : CPP : OBJ : <toolset>msvc ;
generators.register-c-compiler msvc.compile.c : C : OBJ : <toolset>msvc ;
@@ -571,14 +564,14 @@
# pch support
-feature pch-source : : free dependency ;
+feature.feature pch-source : : free dependency ;
class msvc-pch-generator : pch-generator
{
import property-set ;
rule run-pch ( project name ? : property-set : sources * )
- {
+ {
# searching header and source file in the sources
local pch-header ;
local pch-source ;
@@ -595,30 +588,24 @@
pch-source = $(s) ;
}
}
-
+
if ! $(pch-header)
{
errors.user-error "can't build pch without pch-header" ;
}
- # If we don't have PCH source, it's fine, we'll
- # create temporary .cpp file in the action.
+ # If we don't have PCH source - that's fine. We'll just create a
+ # temporary .cpp file in the action.
- local generated =
- [
- # Passing of <pch-source> is a dirty trick,
- # needed because non-composing generators
- # with multiple inputs are subtly broken:
- # https://zigzag.cs.msu.su:7813/boost.build/ticket/111
- generator.run $(project) $(name)
- : [
- property-set.create
- <pch-source>$(pch-source)
- [ $(property-set).raw ]
- ]
- : $(pch-header)
- ]
- ;
+ local generated = [ generator.run $(project) $(name)
+ : [ property-set.create
+ # Passing of <pch-source> is a dirty trick, needed because
+ # non-composing generators with multiple inputs are subtly
+ # broken. For more detailed information see:
+ # https://zigzag.cs.msu.su:7813/boost.build/ticket/111
+ <pch-source>$(pch-source)
+ [ $(property-set).raw ] ]
+ : $(pch-header) ] ;
local pch-file ;
for local g in $(generated)
@@ -629,29 +616,24 @@
}
}
- return
- [
- property-set.create
- <pch-header>$(pch-header)
- <pch-file>$(pch-file)
- ]
- $(generated)
- ;
+ return [ property-set.create <pch-header>$(pch-header)
+ <pch-file>$(pch-file) ] $(generated) ;
}
}
-# Note: the 'H' source type will catch both '.h' header and '.hpp' header. The latter
-# have HPP type, but HPP type is derived from H. The type of compilation is determined
-# entirely by the destination type.
+
+# Note: the 'H' source type will catch both '.h' header and '.hpp' header. The
+# latter have HPP type, but HPP type is derived from H. The type of compilation
+# is determined entirely by the destination type.
generators.register [ new msvc-pch-generator msvc.compile.c.pch : H : C_PCH OBJ : <pch>on <toolset>msvc ] ;
generators.register [ new msvc-pch-generator msvc.compile.c++.pch : H : CPP_PCH OBJ : <pch>on <toolset>msvc ] ;
generators.override msvc.compile.c.pch : pch.default-c-pch-generator ;
generators.override msvc.compile.c++.pch : pch.default-cpp-pch-generator ;
-flags msvc.compile PCH_FILE <pch>on : <pch-file> ;
-flags msvc.compile PCH_SOURCE <pch>on : <pch-source> ;
-flags msvc.compile PCH_HEADER <pch>on : <pch-header> ;
+flags msvc.compile PCH_FILE <pch>on : <pch-file> ;
+flags msvc.compile PCH_SOURCE <pch>on : <pch-source> ;
+flags msvc.compile PCH_HEADER <pch>on : <pch-header> ;
#
# Declare flags and action for compilation
@@ -681,7 +663,11 @@
flags msvc.compile C++FLAGS <exception-handling>on/<asynch-exceptions>on/<extern-c-nothrow>off : /EHa ;
flags msvc.compile C++FLAGS <exception-handling>on/<asynch-exceptions>on/<extern-c-nothrow>on : /EHac ;
+# By default 8.0 enables rtti support while prior versions disabled it. We
+# simply enable or disable it expliclty so we do not have to depend on this
+# default behaviour.
flags msvc.compile CFLAGS <rtti>on : /GR ;
+flags msvc.compile CFLAGS <rtti>off : /GR- ;
flags msvc.compile CFLAGS <runtime-debugging>off/<runtime-link>shared : /MD ;
flags msvc.compile CFLAGS <runtime-debugging>on/<runtime-link>shared : /MDd ;
@@ -697,22 +683,26 @@
flags msvc.compile UNDEFS <undef> ;
flags msvc.compile INCLUDES <include> ;
+
rule get-rspline ( target : lang-opt )
{
CC_RSPLINE on $(target) = [ on $(target) return $(lang-opt) -U$(UNDEFS) $(CFLAGS) $(C++FLAGS) $(OPTIONS) -c $(nl)-D$(DEFINES) $(nl)\"-I$(INCLUDES)\" ] ;
}
+
rule compile-c-c++ ( targets + : sources * )
{
DEPENDS $(<[1]) : [ on $(<[1]) return $(PCH_HEADER) ] ;
DEPENDS $(<[1]) : [ on $(<[1]) return $(PCH_FILE) ] ;
}
+
actions compile-c-c++
{
$(.CC) @"@($(<[1]:W).rsp:E="$(>[1]:W)" -Fo"$(<[1]:W)" -Yu"$(>[3]:D=)" -Fp"$(>[2]:W)" $(CC_RSPLINE))" $(.CC.FILTER)
}
+
rule compile.c ( targets + : sources * : properties * )
{
C++FLAGS on $(targets[1]) = ;
@@ -720,19 +710,22 @@
compile-c-c++ $(<) : $(>) [ on $(<) return $(PCH_FILE) ] [ on $(<) return $(PCH_HEADER) ] ;
}
+
rule compile.c++ ( targets + : sources * : properties * )
{
get-rspline $(targets) : -TP ;
compile-c-c++ $(<) : $(>) [ on $(<) return $(PCH_FILE) ] [ on $(<) return $(PCH_HEADER) ] ;
}
+
actions compile-c-c++-pch-s
{
$(.CC) @"@($(<[1]:W).rsp:E="$(>[2]:W)" -Fo"$(<[2]:W)" -Yc"$(>[1]:D=)" -Yl"__bjam_pch_symbol_$(>[1]:D=)" -Fp"$(<[1]:W)" $(CC_RSPLINE))" $(.CC.FILTER)
}
-# Needed only to avoid messing up Emacs syntax highlighting in
-# the messing N-quoted code below.
+
+# Needed only to avoid messing up Emacs syntax highlighting in the messy
+# N-quoted code below.
quote = "\"" ;
actions compile-c-c++-pch
@@ -740,6 +733,7 @@
$(.CC) @"@($(<[1]:W).rsp:E="$(>[2]:W)" -Fo"$(<[2]:W)" -Yc"$(>[1]:D=)" -Yl"__bjam_pch_symbol_$(>[1]:D=)" -Fp"$(<[1]:W)" $(CC_RSPLINE))" "@($(<[1]:W).cpp:E=#include $(quote)$(>[1]:D=)$(quote))" $(.CC.FILTER)
}
+
rule compile.c.pch ( targets + : sources * : properties * )
{
C++FLAGS on $(targets[1]) = ;
@@ -747,47 +741,52 @@
get-rspline $(targets[2]) : -TC ;
local pch-source = [ on $(<) return $(PCH_SOURCE) ] ;
if $(pch-source)
- {
+ {
DEPENDS $(<) : $(pch-source) ;
compile-c-c++-pch-s $(targets) : $(sources) $(pch-source) ;
}
else
{
compile-c-c++-pch $(targets) : $(sources) ;
- }
+ }
}
+
rule compile.c++.pch ( targets + : sources * : properties * )
{
get-rspline $(targets[1]) : -TP ;
get-rspline $(targets[2]) : -TP ;
local pch-source = [ on $(<) return $(PCH_SOURCE) ] ;
if $(pch-source)
- {
+ {
DEPENDS $(<) : $(pch-source) ;
compile-c-c++-pch-s $(targets) : $(sources) $(pch-source) ;
}
else
{
compile-c-c++-pch $(targets) : $(sources) ;
- }
+ }
}
+
actions compile.rc
{
$(.RC) -l 0x409 -U$(UNDEFS) -D$(DEFINES) -I"$(INCLUDES)" -fo "$(<:W)" "$(>:W)"
}
+
# See midl.jam for details
TOUCH_FILE = [ common.file-touch-command ] ;
+
actions compile.idl
{
$(.IDL) /nologo @"@($(<[1]:W).rsp:E=$(nl)"$(>:W)" $(nl)-D$(DEFINES) $(nl)"-I$(INCLUDES)" $(nl)-U$(UNDEFS) $(nl)$(MIDLFLAGS) $(nl)/tlb "$(<[1]:W)" $(nl)/h "$(<[2]:W)" $(nl)/iid "$(<[3]:W)" $(nl)/proxy "$(<[4]:W)" $(nl)/dlldata "$(<[5]:W)")"
- $(TOUCH_FILE) "$(<[4]:W)"
- $(TOUCH_FILE) "$(<[5]:W)"
+ $(TOUCH_FILE) "$(<[4]:W)"
+ $(TOUCH_FILE) "$(<[5]:W)"
}
+
# Declare flags and action for the assembler
flags msvc.compile.asm USER_ASMFLAGS <asmflags> : ;
@@ -805,12 +804,13 @@
$(.ASM) -nologo -c -coff -Zp4 -Cp -Cx $(USER_ASMFLAGS) -Fo "$(<:W)" "$(>:W)"
}
+
# Declare flags and action for linking
-flags msvc.link PDB_LINKFLAG <debug-symbols>on/<debug-store>database : /PDB: ; # not used yet
+flags msvc.link PDB_LINKFLAG <debug-symbols>on/<debug-store>database : /PDB: ; # not used yet
flags msvc.link LINKFLAGS <debug-symbols>on : /DEBUG ;
flags msvc.link DEF_FILE <def-file> ;
-# The linker disables the default optimizations when using /DEBUG. Whe have
-# to enable them manually for release builds with debug symbols.
+# The linker disables the default optimizations when using /DEBUG. Whe have to
+# enable them manually for release builds with debug symbols.
flags msvc LINKFLAGS <debug-symbols>on/<runtime-debugging>off : /OPT:REF,ICF ;
flags msvc LINKFLAGS <user-interface>console : /subsystem:console ;
@@ -822,7 +822,6 @@
flags msvc.link OPTIONS <linkflags> ;
flags msvc.link LINKPATH <library-path> ;
-
flags msvc.link FINDLIBS_ST <find-static-library> ;
flags msvc.link FINDLIBS_SA <find-shared-library> ;
flags msvc.link LIBRARY_OPTION <toolset>msvc : "" : unchecked ;
@@ -836,44 +835,42 @@
DEPENDS $(<) : [ on $(<) return $(DEF_FILE) ] ;
}
-# Declare action for creating static libraries
-# If library exists, remove it before adding files. See
-# http://article.gmane.org/gmane.comp.lib.boost.build/4241
-# for rationale.
+# Declare action for creating static libraries. If library exists, remove it
+# before adding files. See
+# http://article.gmane.org/gmane.comp.lib.boost.build/4241 for rationale.
if [ os.name ] in NT
-{
- # The 'DEL' command would issue a message to stdout
- # if the file does not exist, so need a check.
+{
+ # The 'DEL' command would issue a message to stdout if the file does not
+ # exist, so need a check.
actions archive
- {
- if exist "$(<[1])" DEL "$(<[1])"
+ {
+ if exist "$(<[1])" DEL "$(<[1])"
$(.LD) $(AROPTIONS) /out:"$(<[1])" @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)$(LIBRARIES_MENTIONED_BY_FILE) $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"
}
}
else
{
actions archive
- {
+ {
$(RM) "$(<[1])"
$(.LD) $(AROPTIONS) /out:"$(<[1])" @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)$(LIBRARIES_MENTIONED_BY_FILE) $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"
}
}
-
-# incremental linking a DLL causes no end of problems: if the
-# actual exports don't change, the import .lib file is never
-# updated. Therefore, the .lib is always out-of-date and gets
-# rebuilt every time. I'm not sure that incremental linking is
-# such a great idea in general, but in this case I'm sure we
-# don't want it.
-
-# Windows Manifests is a new way to specify dependencies
-# on managed DotNet assemblies and Windows native DLLs. The
-# manifests are embedded as resources and are useful in
-# any PE targets (both DLL and EXE)
+
+
+# Incremental linking a DLL causes no end of problems: if the actual exports
+# don't change, the import .lib file is never updated. Therefore, the .lib is
+# always out-of-date and gets rebuilt every time. I'm not sure that incremental
+# linking is such a great idea in general, but in this case I'm sure we don't
+# want it.
+
+# Windows manifest is a new way to specify dependencies on managed DotNet
+# assemblies and Windows native DLLs. The manifests are embedded as resources
+# and are useful in any PE target (both DLL and EXE).
if [ os.name ] in NT
{
- actions link bind DEF_FILE
+ actions link bind DEF_FILE LIBRARIES_MENTIONED_BY_FILE
{
$(.LD) $(LINKFLAGS) /out:"$(<[1]:W)" /LIBPATH:"$(LINKPATH:W)" $(OPTIONS) @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)$(LIBRARIES_MENTIONED_BY_FILE) $(nl)$(LIBRARIES) $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"
if %ERRORLEVEL% NEQ 0 EXIT %ERRORLEVEL%
@@ -882,7 +879,7 @@
)
}
- actions link.dll bind DEF_FILE
+ actions link.dll bind DEF_FILE LIBRARIES_MENTIONED_BY_FILE
{
$(.LD) /DLL $(LINKFLAGS) /out:"$(<[1]:W)" /IMPLIB:"$(<[2]:W)" /LIBPATH:"$(LINKPATH:W)" /def:"$(DEF_FILE)" $(OPTIONS) @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)$(LIBRARIES_MENTIONED_BY_FILE) $(nl)$(LIBRARIES) $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"
if %ERRORLEVEL% NEQ 0 EXIT %ERRORLEVEL%
@@ -893,7 +890,7 @@
}
else
{
- actions link bind DEF_FILE
+ actions link bind DEF_FILE LIBRARIES_MENTIONED_BY_FILE
{
$(.LD) $(LINKFLAGS) /out:"$(<[1]:W)" /LIBPATH:"$(LINKPATH:W)" $(OPTIONS) @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)$(LIBRARIES_MENTIONED_BY_FILE) $(nl)$(LIBRARIES) $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"
if test -e "$(<[1]).manifest"; then
@@ -901,7 +898,7 @@
fi
}
- actions link.dll bind DEF_FILE
+ actions link.dll bind DEF_FILE LIBRARIES_MENTIONED_BY_FILE
{
$(.LD) /DLL $(LINKFLAGS) /out:"$(<[1]:W)" /IMPLIB:"$(<[2]:W)" /LIBPATH:"$(LINKPATH:W)" /def:"$(DEF_FILE)" $(OPTIONS) @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)$(LIBRARIES_MENTIONED_BY_FILE) $(nl)$(LIBRARIES) $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"
if test -e "$(<[1]).manifest"; then
@@ -919,7 +916,7 @@
#
# Autodetection code
-# detects versions listed as '.known-versions' using registry, environment
+# detects versions listed as '.known-versions' using registry, environment
# and checking default paths. Supports both native Windows and Cygwin.
#
@@ -933,7 +930,7 @@
.version-alias-7 = 7.0 ;
.version-alias-8 = 8.0 ;
.version-alias-9 = 9.0 ;
-
+
# Name of the registry key that contains Visual C++ installation path
# (relative to "HKEY_LOCAL_MACHINE\SOFTWARE\\Microsoft"
.version-6.0-reg = "VisualStudio\\6.0\\Setup\\Microsoft Visual C++" ;
@@ -945,18 +942,18 @@
.version-9.0express-reg = "VCExpress\\9.0\\Setup\\VC" ;
# Visual C++ Toolkit 2003 do not store its installation path in the registry.
-# The environment variable 'VCToolkitInstallDir' and the default installation
+# The environment variable 'VCToolkitInstallDir' and the default installation
# path will be checked instead.
.version-7.1toolkit-path = "Microsoft Visual C++ Toolkit 2003" "bin" ;
.version-7.1toolkit-env = VCToolkitInstallDir ;
-# Path to the folder containing "cl.exe" relative to the value of the corresponding
-# environment variable
+# Path to the folder containing "cl.exe" relative to the value of the
+# corresponding environment variable.
.version-7.1toolkit-envpath = "bin" ;
-# Validates given path, registers found configuration and prints debug information
-# about it.
+# Validates given path, registers found configuration and prints debug
+# information about it.
local rule register-configuration ( version : path ? )
{
if $(path)
@@ -978,8 +975,7 @@
if [ os.name ] in NT CYGWIN
{
- # Get installation paths from the registry
-
+ # Get installation paths from the registry.
for local i in $(.known-versions)
{
if $(.version-$(i)-reg)
@@ -988,11 +984,10 @@
for local x in "" "Wow6432Node\\"
{
vc-path += [ W32_GETREG
- "HKEY_LOCAL_MACHINE\\SOFTWARE\\Microsoft\\"$(x)$(.version-$(i)-reg)
+ "HKEY_LOCAL_MACHINE\\SOFTWARE\\"$(x)"\\Microsoft\\"$(.version-$(i)-reg)
: "ProductDir" ] ;
}
-
-
+
if $(vc-path)
{
vc-path = [ path.native [ path.join [ path.make-NT $(vc-path[1]) ] "bin" ] ] ;
@@ -1003,7 +998,7 @@
}
-# Check environment and default installation paths
+# Check environment and default installation paths.
for local i in $(.known-versions)
{
@@ -1012,4 +1007,3 @@
register-configuration $(i) : [ default-path $(i) ] ;
}
}
-
Modified: branches/release/tools/build/v2/tools/notfile.jam
==============================================================================
--- branches/release/tools/build/v2/tools/notfile.jam (original)
+++ branches/release/tools/build/v2/tools/notfile.jam 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -4,47 +4,52 @@
# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
# http://www.boost.org/LICENSE_1_0.txt)
+import "class" : new ;
import generators ;
-import type ;
-import feature ;
import project ;
import targets ;
import toolset ;
-import "class" : new ;
+import type ;
+
type.register NOTFILE_MAIN ;
+
class notfile-generator : generator
{
rule __init__ ( * : * )
{
generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
}
-
+
rule run ( project name ? : property-set : sources * : multiple ? )
{
local action ;
local action-name = [ $(property-set).get <action> ] ;
-
+
local m = [ MATCH ^@(.*) : $(action-name) ] ;
-
- if $(m)
+
+ if $(m)
{
- action = [ new action $(sources) : $(m[1])
- : $(property-set) ] ;
+ action = [ new action $(sources) : $(m[1])
+ : $(property-set) ] ;
}
else
{
- action = [ new action $(sources) : notfile.run
- : $(property-set) ] ;
- }
+ action = [ new action $(sources) : notfile.run
+ : $(property-set) ] ;
+ }
return [ new notfile-target $(name) : $(project) : $(action) ] ;
- }
+ }
}
+
generators.register [ new notfile-generator notfile.main : : NOTFILE_MAIN ] ;
+
toolset.flags notfile.run ACTION : <action> ;
+
+
actions run
{
$(ACTION)
@@ -52,20 +57,17 @@
rule notfile ( target-name : action + : sources * : requirements * : default-build * )
-{
+{
local project = [ project.current ] ;
-
+
requirements += <action>$(action) ;
-
- targets.main-target-alternative
+ targets.main-target-alternative
[ new typed-target $(target-name) : $(project) : NOTFILE_MAIN
- : [ targets.main-target-sources $(sources) : $(target-name) ]
- : [ targets.main-target-requirements $(requirements) : $(project) ]
- : [ targets.main-target-default-build $(default-build) : $(project) ]
+ : [ targets.main-target-sources $(sources) : $(target-name) ]
+ : [ targets.main-target-requirements $(requirements) : $(project) ]
+ : [ targets.main-target-default-build $(default-build) : $(project) ]
] ;
}
IMPORT $(__name__) : notfile : : notfile ;
-
-
Modified: branches/release/tools/build/v2/tools/package.jam
==============================================================================
--- branches/release/tools/build/v2/tools/package.jam (original)
+++ branches/release/tools/build/v2/tools/package.jam 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -34,7 +34,6 @@
import targets ;
import "class" : new ;
import option ;
-import errors ;
import stage ;
import property ;
@@ -77,7 +76,6 @@
# source header files
local include-locate = [ option.get includedir : $(prefix)/include ] ;
-
stage.install $(name)-bin : $(binaries) : $(requirements) <location>$(bin-locate) ;
stage.install $(name)-lib :
$(binaries) $(libraries)
Modified: branches/release/tools/build/v2/tools/pathscale.jam
==============================================================================
--- branches/release/tools/build/v2/tools/pathscale.jam (original)
+++ branches/release/tools/build/v2/tools/pathscale.jam 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -129,7 +129,7 @@
actions link bind LIBRARIES
{
- "$(CONFIG_COMMAND)" $(OPTIONS) -L"$(LINKPATH)" -Wl,-rpath,"$(RPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST)
+ "$(CONFIG_COMMAND)" $(OPTIONS) -L"$(LINKPATH)" -Wl,$(RPATH_OPTION:E=-R)$(SPACE)-Wl,"$(RPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST)
}
# Slight mods for dlls
@@ -140,7 +140,7 @@
actions link.dll bind LIBRARIES
{
- "$(CONFIG_COMMAND)" $(OPTIONS) -L"$(LINKPATH)" -Wl,-rpath,"$(RPATH)" -o "$(<)" -Wl,-soname$(SPACE)-Wl,$(<[1]:D=) -shared "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST)
+ "$(CONFIG_COMMAND)" $(OPTIONS) -L"$(LINKPATH)" -Wl,$(RPATH_OPTION:E=-R)$(SPACE)-Wl,"$(RPATH)" -o "$(<)" -Wl,-soname$(SPACE)-Wl,$(<[1]:D=) -shared "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST)
}
# Declare action for creating static libraries
Modified: branches/release/tools/build/v2/tools/pch.jam
==============================================================================
--- branches/release/tools/build/v2/tools/pch.jam (original)
+++ branches/release/tools/build/v2/tools/pch.jam 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -34,19 +34,18 @@
type.register C_PCH : : PCH ;
type.register CPP_PCH : : PCH ;
-# control precompiled header (PCH) generation
+# Control precompiled header (PCH) generation.
feature.feature pch :
- on
- off
- : propagated
- ;
+ on
+ off
+ : propagated ;
+
feature.feature pch-header : : free dependency ;
feature.feature pch-file : : free dependency ;
-# Base PCH generator. The 'run' method has the logic to
-# prevent this generator from being run unless it's used
-# in top-level PCH target.
+# Base PCH generator. The 'run' method has the logic to prevent this generator
+# from being run unless it's being used for a top-level PCH target.
class pch-generator : generator
{
import property-set ;
@@ -60,38 +59,37 @@
{
if ! $(name)
{
- # Unless this generator is invoked as the top-most
- # generator for a main target, fail. This allows using
- # 'H' type as input type for this generator, while
- # preventing Boost.Build to try this generator when not
- # explicitly asked for.
+ # Unless this generator is invoked as the top-most generator for a
+ # main target, fail. This allows using 'H' type as input type for
+ # this generator, while preventing Boost.Build to try this generator
+ # when not explicitly asked for.
#
- # One bad example is msvc, where pch generator produces
- # both PCH target and OBJ target, so if there's any
- # header generated (like by bison, or by msidl), we'd
- # try to use pch generator to get OBJ from that H, which
- # is completely wrong. By restricting this generator
+ # One bad example is msvc, where pch generator produces both PCH
+ # target and OBJ target, so if there's any header generated (like by
+ # bison, or by msidl), we'd try to use pch generator to get OBJ from
+ # that H, which is completely wrong. By restricting this generator
# only to pch main target, such problem is solved.
}
else
{
- local r = [ run-pch $(project) $(name)
+ local r = [ run-pch $(project) $(name)
: [ $(property-set).add-raw <define>BOOST_BUILD_PCH_ENABLED ]
: $(sources) ] ;
return [ generators.add-usage-requirements $(r)
: <define>BOOST_BUILD_PCH_ENABLED ] ;
- }
+ }
}
-
+
# This rule must be overridden by the derived classes.
rule run-pch ( project name ? : property-set : sources + )
- {
- }
+ {
+ }
}
-# NOTE: requiremetns are empty,
-# default pch generator can be applied when pch=off
-generators.register [
- new dummy-generator pch.default-c-pch-generator : : C_PCH ] ;
-generators.register [
- new dummy-generator pch.default-cpp-pch-generator : : CPP_PCH ] ;
+
+# NOTE: requirements are empty, default pch generator can be applied when
+# pch=off.
+generators.register
+ [ new dummy-generator pch.default-c-pch-generator : : C_PCH ] ;
+generators.register
+ [ new dummy-generator pch.default-cpp-pch-generator : : CPP_PCH ] ;
Modified: branches/release/tools/build/v2/tools/pgi.jam
==============================================================================
--- branches/release/tools/build/v2/tools/pgi.jam (original)
+++ branches/release/tools/build/v2/tools/pgi.jam 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -11,6 +11,7 @@
import fortran ;
import type ;
import common ;
+import gcc ;
feature.extend toolset : pgi ;
toolset.inherit pgi : unix ;
@@ -41,6 +42,8 @@
flags pgi.link FINDLIBS-SA : [
feature.get-values <find-shared-library> : $(options) ] : unchecked ;
+
+ gcc.init-link-flags pgi gnu $(condition) ;
}
# Declare generators
@@ -49,7 +52,8 @@
generators.register-fortran-compiler pgi.compile.fortran : FORTRAN : OBJ : <toolset>pgi ;
# Declare flags and actions for compilation
-flags pgi.compile OPTIONS <link>shared : -shared -fpic ;
+flags pgi.compile OPTIONS : -Kieee ;
+flags pgi.compile OPTIONS <link>shared : -fpic ;
flags pgi.compile OPTIONS <debug-symbols>on : -g ;
flags pgi.compile OPTIONS <profiling>on : -xprofile=tcov ;
flags pgi.compile OPTIONS <optimization>speed : -fast -Mx,8,0x10000000 ;
@@ -93,7 +97,7 @@
flags pgi.link LINKPATH <library-path> ;
flags pgi.link FINDLIBS-ST <find-static-library> ;
flags pgi.link FINDLIBS-SA <find-shared-library> ;
-flags pgi.link FINDLIBS-SA <threading>multi : pthread ;
+flags pgi.link FINDLIBS-SA <threading>multi : pthread rt ;
flags pgi.link LIBRARIES <library-file> ;
flags pgi.link LINK-RUNTIME <runtime-link>static : static ;
flags pgi.link LINK-RUNTIME <runtime-link>shared : dynamic ;
@@ -129,7 +133,7 @@
actions link.dll bind LIBRARIES
{
- "$(CONFIG_COMMAND)" -shared -fpic $(OPTIONS) -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -Bdynamic -l$(FINDLIBS-SA) -Bstatic -l$(FINDLIBS-ST) -B$(LINK-RUNTIME)
+ "$(CONFIG_COMMAND)" $(OPTIONS) -shared -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" "$(>)" -Wl,-h -Wl,$(<[1]:D=) "$(LIBRARIES)" -Bdynamic -l$(FINDLIBS-SA) -Bstatic -l$(FINDLIBS-ST) -B$(LINK-RUNTIME)
}
actions updated together piecemeal pgi.archive
Modified: branches/release/tools/build/v2/tools/python.jam
==============================================================================
--- branches/release/tools/build/v2/tools/python.jam (original)
+++ branches/release/tools/build/v2/tools/python.jam 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -55,6 +55,11 @@
# Extra library needed by phtread on some platforms.
lib rt ;
+# The pythonpath feature specifies additional elements for the PYTHONPATH
+# environment variable, set by run-pyd. For example, pythonpath can be used
+# to access Python modules that are part of the product being built, but
+# are not installed in the development system's default paths.
+feature.feature pythonpath : : free optional path ;
# Initializes the Python toolset. Note that all parameters are
# optional.
@@ -94,8 +99,8 @@
#
# using python 2.3 ;
# using python 2.3 : /usr/local/bin/python ;
-#
-rule init ( version ? : cmd-or-prefix ? : includes * : libraries ?
+#
+rule init ( version ? : cmd-or-prefix ? : includes * : libraries ?
: condition * : extension-suffix ? )
{
project.push-current $(.project) ;
@@ -108,7 +113,7 @@
debug-message " user-specified "$(v): \"$($(v))\" ;
}
}
-
+
configure $(version) : $(cmd-or-prefix) : $(includes) : $(libraries) : $(condition) : $(extension-suffix) ;
project.pop-current ;
@@ -610,13 +615,16 @@
# appears to duplicate the logic already in gcc.jam, it
# doesn't as long as we're not forcing <threading>multi.
- # Caleb Epstein reports that his python's
+ # On solaris 10,
# distutils.sysconfig.get_config_var('LIBS') yields
- # -lresolv -lsocket -lnsl -lrt -ldl. However, we're not
- # yet sure that is the right list for extension modules.
- # Being conservative, we add rt and remove pthread, which
- # was causing errors.
- return <library>dl <toolset>gcc:<library>rt ;
+ # '-lresolv -lsocket -lnsl -lrt -ldl'. However, that
+ # doesn't seem to be the right list for extension modules.
+ # For example, on my installation, adding -ldl causes at
+ # least one test to fail because the library can't be
+ # found and removing it causes no failures.
+
+ # Apparently, though, we need to add -lrt for gcc.
+ return <toolset>gcc:<library>rt ;
case osf : return <library>pthread <toolset>gcc:<library>rt ;
@@ -661,7 +669,7 @@
}
# implementation of init
-local rule configure (
+local rule configure (
version ? : cmd-or-prefix ? : includes * : libraries ? : condition * : extension-suffix ? )
{
local prefix ;
@@ -672,13 +680,13 @@
local target-os = [ feature.get-values target-os : $(condition) ] ;
target-os ?= [ feature.defaults target-os ] ;
target-os = $(target-os:G=) ;
-
- if $(target-os) = windows && <python-debugging>on in $(condition)
+
+ if $(target-os) = windows && <python-debugging>on in $(condition)
{
extension-suffix ?= _d ;
}
extension-suffix ?= "" ;
-
+
# Normalize and dissect any version number
local major-minor ;
if $(version)
@@ -712,7 +720,7 @@
# Values to use in case we can't really find anything in the system.
local fallback-cmd = $(cmds-to-try[1]) ;
local fallback-version ;
-
+
# Anything left to find or check?
if ! ( $(interpreter-cmd) && $(includes) && $(libraries) )
{
@@ -731,7 +739,7 @@
case windows : platform = win32 ;
case cygwin : platform = cygwin ;
}
-
+
while $(cmds-to-try)
{
# pop top command
@@ -777,7 +785,7 @@
}
}
}
-
+
# Anything left to compute?
if $(includes) && $(libraries)
{
@@ -855,13 +863,13 @@
}
local dll-path = $(libraries) ;
-
+
# Make sure that we can find the Python DLL on windows
if $(target-os) = windows && $(exec-prefix)
{
dll-path += $(exec-prefix) ;
}
-
+
#
# prepare usage requirements
#
@@ -880,10 +888,15 @@
usage-requirements += <define>Py_DEBUG ;
}
}
+
+ # Global, but conditional, requirements to give access to the interpreter
+ # for general utilities, like other toolsets, that run Python scripts.
+ toolset.add-requirements
+ $(target-requirements:J=,):<python.interpreter>$(interpreter-cmd) ;
- # Register the right suffix for extensions
+ # Register the right suffix for extensions.
register-extension-suffix $(extension-suffix) : $(target-requirements) ;
-
+
#
# Declare the "python" target. This should really be called
# python_for_embedding
@@ -908,7 +921,7 @@
# (http://article.gmane.org/gmane.comp.python.general/544986). The
# evil here, aside from the workaround necessitated by
# Python's bug, is that:
- #
+ #
# a. we're guessing the location of the python standard
# library from the location of pythonXX.lib
#
@@ -921,7 +934,7 @@
set-PYTHONPATH =
[ common.prepend-path-variable-command PYTHONPATH : $(libraries:D)/Lib ] ;
}
-
+
alias python
:
: $(target-requirements)
@@ -930,8 +943,8 @@
# the system libs is a mystery, but if we don't do it, on
# cygwin, -lpythonX.Y never appears in the command line
# (although it does on linux).
- : $(usage-requirements)
- <testing.launcher>$(set-PYTHONPATH)
+ : $(usage-requirements)
+ <testing.launcher>$(set-PYTHONPATH)
<library-path>$(libraries) <dll-path>$(dll-path) <library>python.lib
;
}
@@ -984,12 +997,12 @@
local rule register-extension-suffix ( root : condition * )
{
local suffix ;
-
+
switch [ feature.get-values target-os : $(condition) ]
{
case windows : suffix = pyd ;
case cygwin : suffix = dll ;
- case hpux :
+ case hpux :
{
if [ feature.get-values python : $(condition) ] in 1.5 1.6 2.0 2.1 2.2 2.3 2.4
{
@@ -1002,7 +1015,7 @@
}
case * : suffix = so ;
}
-
+
type.set-generated-target-suffix PYTHON_EXTENSION : $(condition) : <$(root).$(suffix)> ;
}
@@ -1020,23 +1033,23 @@
local project = [ project.current ] ;
-
targets.main-target-alternative
- [ new typed-target $(name) : $(project) : PYTHON_EXTENSION
- : [ targets.main-target-sources $(sources) : $(name) ]
- : [ targets.main-target-requirements $(requirements) : $(project) ]
- : [ targets.main-target-default-build $(default-build) : $(project) ]
- ] ;
+ [ new typed-target $(name) : $(project) : PYTHON_EXTENSION
+ : [ targets.main-target-sources $(sources) : $(name) ]
+ : [ targets.main-target-requirements $(requirements) : $(project) ]
+ : [ targets.main-target-default-build $(default-build) : $(project) ]
+ ] ;
}
IMPORT python : python-extension : : python-extension ;
+
# Support for testing
type.register PY : py ;
type.register RUN_PYD_OUTPUT ;
-#type.set-generated-target-suffix RUN_PYD : : run ;
type.register RUN_PYD : : TEST ;
+
class python-test-generator : generator
{
import set ;
@@ -1140,6 +1153,13 @@
return $(x[1]) ;
}
+# Extract the path to a single ".pyd" source. This is used to build the
+# PYTHONPATH for running bpl tests.
+local rule pyd-pythonpath ( source )
+{
+ return [ on $(source) return $(LOCATE) $(SEARCH) ] ;
+}
+
# The flag settings on testing.capture-output do not
# apply to python.capture output at the moment.
# Redo this explicitly.
@@ -1153,8 +1173,9 @@
# over explicitly.
RUN_PATH on $(sources[1]) = [ on $(sources[2-]) return $(RUN_PATH) ] ;
- PYTHONPATH = [ on $(sources[2-]) return $(LOCATE) $(SEARCH) ] ;
-
+ PYTHONPATH = [ sequence.transform pyd-pythonpath : $(sources[2-]) ] ;
+ PYTHONPATH += [ feature.get-values pythonpath : $(properties) ] ;
+
# After test is run, we remove the Python module, but not the Python
# script.
testing.capture-output $(target) : $(sources[1]) : $(properties)
Modified: branches/release/tools/build/v2/tools/qt4.jam
==============================================================================
--- branches/release/tools/build/v2/tools/qt4.jam (original)
+++ branches/release/tools/build/v2/tools/qt4.jam 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -492,17 +492,18 @@
local r = [ virtual-target.register $(target) ] ;
- # Since this generator will return H target, the linking generator
- # won't use it at all, and won't set any dependency on it.
- # However, we need to target to be seen by bjam, so that dependency
- # from sources to this generated header is detected -- if jam does
- # not know about this target, it won't do anything.
+ # Since this generator will return a H target, the linking generator
+ # won't use it at all, and won't set any dependency on it. However, we
+ # need the target to be seen by bjam, so that dependency from sources to
+ # this generated header is detected -- if jam does not know about this
+ # target, it won't do anything.
DEPENDS all : [ $(r).actualize ] ;
return $(r) ;
}
}
+
class moc-h-generator : generator
{
rule __init__ ( * : * )
@@ -525,17 +526,19 @@
local r = [ virtual-target.register $(target) ] ;
- # Since this generator will return H target, the linking generator
- # won't use it at all, and won't set any dependency on it.
- # However, we need to target to be seen by bjam, so that dependency
- # from sources to this generated header is detected -- if jam does
- # not know about this target, it won't do anything.
+ # Since this generator will return a H target, the linking generator
+ # won't use it at all, and won't set any dependency on it. However,
+ # we need the target to be seen by bjam, so that dependency from
+ # sources to this generated header is detected -- if jam does not
+ # know about this target, it won't do anything.
DEPENDS all : [ $(r).actualize ] ;
return $(r) ;
}
}
}
+
+
class moc-inc-generator : generator
{
rule __init__ ( * : * )
@@ -556,11 +559,11 @@
local target = [
new file-target moc_$(name) : CPP : $(project) : $(a) ] ;
- # Since this generator will return H target, the linking generator
- # won't use it at all, and won't set any dependency on it.
- # However, we need to target to be seen by bjam, so that dependency
- # from sources to this generated header is detected -- if jam does
- # not know about this target, it won't do anything.
+ # Since this generator will return a H target, the linking generator
+ # won't use it at all, and won't set any dependency on it. However,
+ # we need the target to be seen by bjam, so that dependency from
+ # sources to this generated header is detected -- if jam does not
+ # know about this target, it won't do anything.
DEPENDS all : [ $(target).actualize ] ;
return [ virtual-target.register $(target) ] ;
@@ -568,18 +571,20 @@
}
}
-# Query the installation directory
-# This is needed in at least two scenarios
-# First, when re-using sources from the Qt-Tree.
-# Second, to "install" custom Qt plugins to the Qt-Tree.
+
+# Query the installation directory. This is needed in at least two scenarios.
+# First, when re-using sources from the Qt-Tree. Second, to "install" custom Qt
+# plugins to the Qt-Tree.
rule directory
{
return $(.prefix) ;
}
+
# Get <include> and <defines> from current toolset
flags qt4.moc INCLUDES <include> ;
-flags qt4.moc DEFINES <define> ;
+flags qt4.moc DEFINES <define> ;
+
# Processes headers to create Qt MetaObject information
# Qt4-moc has its c++-parser, so pass INCLUDES and DEFINES.
@@ -588,6 +593,7 @@
$(.binprefix)/moc -I$(INCLUDES) -D$(DEFINES) -f $(>) -o $(<)
}
+
# When moccing files for include only, we don't need -f,
# otherwise the generated code will include the .cpp
# and we'll get duplicated symbols.
@@ -603,12 +609,14 @@
$(.binprefix)/rcc $(>) -name $(>:B) -o $(<)
}
+
# Generates user-interface source from .ui files
actions uic-h
{
$(.binprefix)/uic $(>) -o $(<)
}
+
# Scanner for .qrc files.
# Look for the CDATA section of the <file> tag.
# Ignore the "alias" attribute.
Modified: branches/release/tools/build/v2/tools/quickbook.jam
==============================================================================
--- branches/release/tools/build/v2/tools/quickbook.jam (original)
+++ branches/release/tools/build/v2/tools/quickbook.jam 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -253,7 +253,8 @@
rule pattern ( )
{
return "\\[[ \t]*include[ \t]+([^]]+)\\]"
- "\\[[ \t]*include:[a-zA-Z0-9_]+[ \t]+([^]]+)\\]" ;
+ "\\[[ \t]*include:[a-zA-Z0-9_]+[ \t]+([^]]+)\\]"
+ "\\[[ \t]*import[ \t]+([^]]+)\\]" ;
}
}
Modified: branches/release/tools/build/v2/tools/stage.jam
==============================================================================
--- branches/release/tools/build/v2/tools/stage.jam (original)
+++ branches/release/tools/build/v2/tools/stage.jam 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -1,42 +1,50 @@
-# Copyright 2003 Dave Abrahams
-# Copyright 2005, 2006 Rene Rivera
-# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+# Copyright 2003 Dave Abrahams
+# Copyright 2005, 2006 Rene Rivera
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-# This module defines the 'install' rule, used to copy a set of targets to
-# a single location
+# This module defines the 'install' rule, used to copy a set of targets to a
+# single location
import targets ;
import "class" : new ;
-import property ;
-import errors : error ;
-import type : type ;
+import errors ;
import type ;
-import regex ;
import generators ;
import feature ;
import project ;
-import property-set ;
import virtual-target ;
import path ;
+import types/register ;
-feature.feature <install-dependencies> : off on : incidental ;
-feature.feature <install-type> : : free incidental ;
-feature.feature <install-source-root> : : free path ;
-# If 'on', version symblinks for shared libraries won't be created
-# This feature has effect only on Unix.
+
+feature.feature <install-dependencies> : off on : incidental ;
+feature.feature <install-type> : : free incidental ;
+feature.feature <install-source-root> : : free path ;
+feature.feature <so-version> : : free incidental ;
+
+# If 'on', version symlinks for shared libraries won't be created. Affects Unix
+# builds only.
feature.feature <install-no-version-symlinks> : on : optional incidental ;
-feature.feature <so-version> : : free incidental ;
+
class install-target-class : basic-target
{
- import feature project type errors generators path stage ;
+ import feature ;
+ import project ;
+ import type ;
+ import errors ;
+ import generators ;
+ import path ;
+ import stage ;
import "class" : new ;
-
+ import property ;
+ import property-set ;
+
rule __init__ ( name-and-dir : project : sources * : requirements * : default-build * )
{
- basic-target.__init__ $(name-and-dir) : $(project) : $(sources) : $(requirements)
+ basic-target.__init__ $(name-and-dir) : $(project) : $(sources) : $(requirements)
: $(default-build) ;
}
@@ -50,195 +58,192 @@
property-set = [ $(property-set).add-raw $(loc:G=<location>) ] ;
}
-
+
return $(property-set) ;
}
-
+
# Takes a target that is installed and property set which is
# used when installing.
rule adjust-properties ( target : build-property-set )
{
local ps-raw ;
- local a = [ $(target).action ] ;
+ local a = [ $(target).action ] ;
if $(a)
{
local ps = [ $(a).properties ] ;
ps-raw = [ $(ps).raw ] ;
-
+
# Unless <hardcode-dll-paths>true is in properties, which can
# happen only if the user has explicitly requested it, nuke all
- # <dll-path> properties
+ # <dll-path> properties
if [ $(property-set).get <hardcode-dll-paths> ] != true
{
ps-raw = [ property.change $(ps-raw) : <dll-path> ] ;
}
-
+
# If any <dll-path> properties were specified for installing,
# add them.
local l = [ $(build-property-set).get <dll-path> ] ;
ps-raw += $(l:G=<dll-path>) ;
-
+
# Also copy <linkflags> feature from current build
# set, to be used for relinking.
local l = [ $(build-property-set).get <linkflags> ] ;
- ps-raw += $(l:G=<linkflags>) ;
+ ps-raw += $(l:G=<linkflags>) ;
}
-
+
# Remove the <tag> feature on original targets.
ps-raw = [ property.change $(ps-raw) : <tag> ] ;
# And <location>. If stage target has another stage target
# in sources, then we'll get virtual targets with <location>
# property set.
ps-raw = [ property.change $(ps-raw) : <location> ] ;
-
-
+
+
local d = [ $(build-property-set).get <dependency> ] ;
ps-raw += $(d:G=<dependency>) ;
-
+
local d = [ $(build-property-set).get <location> ] ;
ps-raw += $(d:G=<location>) ;
-
+
local ns = [ $(build-property-set).get <install-no-version-symlinks> ] ;
ps-raw += $(ns:G=<install-no-version-symlinks>) ;
-
+
local d = [ $(build-property-set).get <install-source-root> ] ;
# Make the path absolute: we'll use it to compute relative
# paths and making the path absolute will help.
if $(d)
- {
+ {
d = [ path.root $(d) [ path.pwd ] ] ;
ps-raw += $(d:G=<install-source-root>) ;
- }
-
+ }
+
if $(ps-raw)
{
return [ property-set.create $(ps-raw) ] ;
- }
+ }
else
{
return [ property-set.empty ] ;
- }
+ }
}
-
-
+
rule construct ( name : source-targets * : property-set )
- {
- source-targets = [
+ {
+ source-targets = [
targets-to-stage $(source-targets) : $(property-set) ] ;
-
- property-set = [ update-location $(property-set) ] ;
-
- local ename = [ $(property-set).get <name> ] ;
-
+
+ property-set = [ update-location $(property-set) ] ;
+
+ local ename = [ $(property-set).get <name> ] ;
+
if $(ename) && $(source-targets[2])
{
- errors.error
+ errors.error
"When <name> property is used in 'install', only one source is allowed" ;
}
-
-
+
local result ;
for local i in $(source-targets)
- {
+ {
local staged-targets ;
-
- local new-properties =
+
+ local new-properties =
[ adjust-properties $(i) : $(property-set) ] ;
-
+
# See if something special should be done when staging this
- # type. It is indicated by presense of special "staged" type
- local t = [ $(i).type ] ;
+ # type. It is indicated by presense of special "staged" type
+ local t = [ $(i).type ] ;
if $(t) && [ type.registered INSTALLED_$(t) ]
{
if $(ename)
{
- error.error "In 'install': <name> property specified with target that requires relinking" ;
+ errors.error "In 'install': <name> property specified with target that requires relinking" ;
}
else
{
- local targets = [ generators.construct $(self.project) $(name) :
+ local targets = [ generators.construct $(self.project) $(name) :
INSTALLED_$(t) : $(new-properties) : $(i) ] ;
- staged-targets += $(targets[2-]) ;
- }
+ staged-targets += $(targets[2-]) ;
+ }
}
- else
- {
+ else
+ {
staged-targets = [ stage.copy-file $(self.project) $(ename)
: $(i) : $(new-properties) ] ;
}
-
+
if ! $(staged-targets)
- {
+ {
errors.error "Unable to generate staged version of " [ $(source).str ] ;
- }
-
- for t in $(staged-targets)
+ }
+
+ for t in $(staged-targets)
{
- result += [ virtual-target.register $(t) ] ;
- }
+ result += [ virtual-target.register $(t) ] ;
+ }
}
-
+
return [ property-set.empty ] $(result) ;
- }
-
+ }
# Given the list of source targets explicitly passed to 'stage',
# returns the list of targets which must be staged.
rule targets-to-stage ( source-targets * : property-set )
- {
+ {
local result ;
-
+
# Traverse the dependencies, if needed.
if [ $(property-set).get <install-dependencies> ] = "on"
{
source-targets = [ collect-targets $(source-targets) ] ;
}
-
+
# Filter the target types, if needed
- local included-types = [ $(property-set).get <install-type> ] ;
+ local included-types = [ $(property-set).get <install-type> ] ;
for local r in $(source-targets)
{
- local ty = [ $(r).type ] ;
+ local ty = [ $(r).type ] ;
if $(ty)
{
# Don't stage searched libs.
if $(ty) != SEARCHED_LIB
{
if $(included-types)
- {
+ {
if [ include-type $(ty) : $(included-types) ]
{
result += $(r) ;
- }
- }
+ }
+ }
else
{
- result += $(r) ;
- }
- }
+ result += $(r) ;
+ }
+ }
}
else if ! $(included-types)
{
# Don't install typeless target if there's
# explicit list of allowed types.
result += $(r) ;
- }
+ }
}
-
+
return $(result) ;
}
-
+
# CONSIDER: figure out why we can't use virtual-target.traverse here.
rule collect-targets ( targets * )
{
# Find subvariants
- local s ;
+ local s ;
for local t in $(targets)
{
s += [ $(t).creating-subvariant ] ;
}
s = [ sequence.unique $(s) ] ;
-
+
local result = $(targets) ;
for local i in $(s)
{
@@ -250,11 +255,11 @@
if $(r:G) != <use>
{
result2 += $(r:G=) ;
- }
- }
- result = [ sequence.unique $(result2) ] ;
+ }
+ }
+ result = [ sequence.unique $(result2) ] ;
}
-
+
# Returns true iff 'type' is subtype of some element of 'types-to-include'.
local rule include-type ( type : types-to-include * )
{
@@ -264,27 +269,27 @@
if [ type.is-subtype $(type) $(types-to-include[1]) ]
{
found = true ;
- }
+ }
types-to-include = $(types-to-include[2-]) ;
}
-
+
return $(found) ;
- }
+ }
}
-# Creates a copy of target 'source'. The 'properties' object should
-# have a <location> property which specifies where the target must
-# be placed.
+
+# Creates a copy of target 'source'. The 'properties' object should have a
+# <location> property which specifies where the target must be placed.
rule copy-file ( project name ? : source : properties )
-{
+{
local targets ;
name ?= [ $(source).name ] ;
- new-a = [
- new non-scanning-action $(source) : common.copy : $(properties) ] ;
+ new-a = [
+ new non-scanning-action $(source) : common.copy : $(properties) ] ;
local source-root = [ $(properties).get <install-source-root> ] ;
- if $(source-root)
- {
+ if $(source-root)
+ {
# Get the real path of the target. We probably need to strip
# relative path from the target name at construction...
local path = [ $(source).path ] ;
@@ -293,8 +298,8 @@
# path. The 'source-root' is already absolute, see the
# 'adjust-properties' method above.
path = [ path.root $(path) [ path.pwd ] ] ;
-
- relative = [ path.relative-to $(source-root) $(path) ] ;
+
+ relative = [ path.relative-to $(source-root) $(path) ] ;
# Note: using $(name:D=$(relative)) might be faster
# here, but then we need to explicitly check that
# relative is not ".", otherwise we might get paths like
@@ -302,190 +307,200 @@
# <prefix>/boost/.
#
# try to create it, and mkdir will obviously fail.
- name = [ path.root $(name:D=) $(relative) ] ;
- targets = [ new file-target $(name) exact : [ $(source).type ]
+ name = [ path.root $(name:D=) $(relative) ] ;
+ targets = [ new file-target $(name) exact : [ $(source).type ]
: $(project) : $(new-a) ] ;
-
- }
+ }
else
{
- targets = [ new file-target $(name:D=) exact : [ $(source).type ]
+ targets = [ new file-target $(name:D=) exact : [ $(source).type ]
: $(project) : $(new-a) ] ;
}
-
+
return $(targets) ;
}
+
rule symlink ( name : project : source : properties )
{
local a = [ new action $(source) : symlink.ln :
$(properties) ] ;
- local targets = [
+ local targets = [
new file-target $(name) exact : [ $(source).type ] : $(project) : $(a) ] ;
-
+
return $(targets) ;
}
-rule relink-file ( project : source : property-set )
-{
+
+rule relink-file ( project : source : property-set )
+{
local action = [ $(source).action ] ;
local cloned-action = [ virtual-target.clone-action $(action) : $(project) :
"" : $(property-set) ] ;
- local result = [ $(cloned-action).targets ] ;
-
+ local result = [ $(cloned-action).targets ] ;
+
return $(result) ;
}
-# Declare installed version of the EXE type. Generator for this type will
-# cause relinking to the new location.
+
+# Declare installed version of the EXE type. Generator for this type will cause
+# relinking to the new location.
type.register INSTALLED_EXE : : EXE ;
class installed-exe-generator : generator
{
- import type property-set modules stage ;
-
+ import type ;
+ import property-set ;
+ import modules ;
+ import stage ;
+
rule __init__ ( )
{
generator.__init__ install-exe : EXE : INSTALLED_EXE ;
}
-
+
rule run ( project name ? : property-set : source : multiple ? )
{
- if [ $(property-set).get <os> ] in NT CYGWIN
+ if [ $(property-set).get <os> ] in NT CYGWIN ||
+ [ $(property-set).get <target-os> ] in windows cygwin
{
# Relinking is never needed on NT
- return [ stage.copy-file $(project)
- : $(source) : $(property-set) ] ;
+ return [ stage.copy-file $(project)
+ : $(source) : $(property-set) ] ;
}
- else
+ else
{
- return [ stage.relink-file $(project)
- : $(source) : $(property-set) ] ;
- }
- }
+ return [ stage.relink-file $(project)
+ : $(source) : $(property-set) ] ;
+ }
+ }
}
generators.register [ new installed-exe-generator ] ;
-# Installing shared link on Unix might cause a creation of
-# versioned symbolic links.
+# Installing a shared link on Unix might cause a creation of versioned symbolic
+# links.
type.register INSTALLED_SHARED_LIB : : SHARED_LIB ;
+
class installed-shared-lib-generator : generator
{
- import type property-set modules stage ;
-
+ import type ;
+ import property-set ;
+ import modules ;
+ import stage ;
+
rule __init__ ( )
{
generator.__init__ install-shared-lib : SHARED_LIB
: INSTALLED_SHARED_LIB ;
}
-
+
rule run ( project name ? : property-set : source : multiple ? )
- {
- if [ $(property-set).get <os> ] = NT
+ {
+ if [ $(property-set).get <os> ] in NT CYGWIN ||
+ [ $(property-set).get <target-os> ] in windows cygwin
{
- local copied = [ stage.copy-file $(project)
- : $(source) : $(property-set) ] ;
-
+ local copied = [ stage.copy-file $(project)
+ : $(source) : $(property-set) ] ;
+
copied = [ virtual-target.register $(copied) ] ;
-
+
return $(copied) ;
}
- else
+ else
{
- local a = [ $(source).action ] ;
+ local a = [ $(source).action ] ;
local copied ;
if ! $(a)
{
# Non-derived file, just copy.
- copied = [ stage.copy-file $(project)
- : $(source) : $(property-set) ] ;
+ copied = [ stage.copy-file $(project)
+ : $(source) : $(property-set) ] ;
}
else
- {
+ {
local cp = [ $(a).properties ] ;
local current-dll-path = [ $(cp).get <dll-path> ] ;
local new-dll-path = [ $(property-set).get <dll-path> ] ;
-
+
if $(current-dll-path) != $(new-dll-path)
{
# Rpath changed, need to relink.
- copied = [ stage.relink-file
+ copied = [ stage.relink-file
$(project) : $(source) : $(property-set) ] ;
}
else
- {
- copied = [ stage.copy-file $(project)
- : $(source) : $(property-set) ] ;
+ {
+ copied = [ stage.copy-file $(project)
+ : $(source) : $(property-set) ] ;
}
}
-
+
copied = [ virtual-target.register $(copied) ] ;
-
+
local result = $(copied) ;
# If the name is in the form NNN.XXX.YYY.ZZZ, where all
# 'X', 'Y' and 'Z' are numbers, we need to create
# NNN.XXX and NNN.XXX.YYY symbolic links.
- local m = [ MATCH (.*)\\.([0123456789]+)\\.([0123456789]+)\\.([0123456789]+)$
+ local m = [ MATCH (.*)\\.([0123456789]+)\\.([0123456789]+)\\.([0123456789]+)$
: [ $(copied).name ] ] ;
if $(m)
{
# Symlink without version at all is used to make
# -lsome_library work.
result += [ stage.symlink $(m[1]) : $(project)
- : $(copied) : $(property-set) ] ;
-
+ : $(copied) : $(property-set) ] ;
+
# Symlinks of some libfoo.N and libfoo.N.M are used
# so that library can found at runtime, if libfoo.N.M.X
# has soname of libfoo.N. That happens when the library
# makes some binary compatibility guarantees. If not,
# it's possible to skip those symlinks.
- local suppress =
+ local suppress =
[ $(property-set).get <install-no-version-symlinks> ] ;
-
+
if $(suppress) != "on"
- {
+ {
result += [ stage.symlink $(m[1]).$(m[2]) : $(project)
: $(copied) : $(property-set) ] ;
result += [ stage.symlink $(m[1]).$(m[2]).$(m[3]) : $(project)
: $(copied) : $(property-set) ] ;
- }
+ }
}
-
+
return $(result) ;
- }
- }
+ }
+ }
}
generators.register [ new installed-shared-lib-generator ] ;
-
-# Main target rule for 'install'
+# Main target rule for 'install'.
rule install ( name : sources * : requirements * : default-build * )
{
local project = [ project.current ] ;
-
+
# Unless the user has explicitly asked us to hardcode dll paths, add
# <hardcode-dll-paths>false in requirements, to override default
# value.
if ! <hardcode-dll-paths>true in $(requirements)
{
requirements += <hardcode-dll-paths>false ;
- }
-
+ }
+
if <tag> in $(requirements:G)
{
- errors.user-error
+ errors.user-error
"The <tag> property is not allowed for the 'install' rule" ;
}
-
+
targets.main-target-alternative
- [ new install-target-class $(name) : $(project)
+ [ new install-target-class $(name) : $(project)
: [ targets.main-target-sources $(sources) : $(name) ]
- : [ targets.main-target-requirements $(requirements) : $(project) ]
- : [ targets.main-target-default-build $(default-build) : $(project) ]
+ : [ targets.main-target-requirements $(requirements) : $(project) ]
+ : [ targets.main-target-default-build $(default-build) : $(project) ]
] ;
}
Modified: branches/release/tools/build/v2/tools/testing.jam
==============================================================================
--- branches/release/tools/build/v2/tools/testing.jam (original)
+++ branches/release/tools/build/v2/tools/testing.jam 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -1,36 +1,36 @@
-# Copyright 2005 Dave Abrahams
-# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+# Copyright 2005 Dave Abrahams
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-# This module implements regression testing framework. It declares a number of
-# main target rules, which perform some action, and if the results are ok,
+# This module implements regression testing framework. It declares a number of
+# main target rules which perform some action and, if the results are ok,
# creates an output file.
-#
+#
# The exact list of rules is:
-# 'compile' -- creates .test file if compilation of sources was successfull
-# 'compile-fail' -- creates .test file if compilation of sources failed
+# 'compile' -- creates .test file if compilation of sources was
+# successful.
+# 'compile-fail' -- creates .test file if compilation of sources failed.
# 'run' -- creates .test file is running of executable produced from
-# sources was successfull. Also leaves behing .output file
+# sources was successful. Also leaves behind .output file
# with the output from program run.
# 'run-fail' -- same as above, but .test file is created if running fails.
#
-# In all cases, presense of .test file is an incication that
-# the test passed. For more convenient reporting, you might want to use C++ Boost
-# regression testing utilities, see
-# http://www.boost.org/more/regression.html
+# In all cases, presence of .test file is an indication that the test passed.
+# For more convenient reporting, you might want to use C++ Boost regression
+# testing utilities, see http://www.boost.org/more/regression.html
#
-# For historical reason, a 'unit-test' rule is available which
-# has the same syntax as 'exe' and behaves just like 'run'.
+# For historical reason, a 'unit-test' rule is available which has the same
+# syntax as 'exe' and behaves just like 'run'.
# Things to do:
# - Teach compiler_status handle Jamfile.v2.
# Notes:
-# - <no-warn> is not implemented, since in Como-specific, and it's not clear how
-# to implement it
-# - std::locale-support is not impelemted (it's used in one test).
+# - <no-warn> is not implemented, since in Como-specific, and it's not clear
+# how to implement it
+# - std::locale-support is not implemented (it's used in one test).
+
-
import targets ;
import "class" : new ;
import property ;
@@ -48,77 +48,83 @@
import sequence ;
import errors ;
-rule init ( ) { }
-# The feature which controls the name of program used to
-# lanch test programs.
-feature.feature testing.launcher : : optional free ;
-feature.feature test-info : : free incidental ;
-feature.feature testing.arg : : free incidental ;
+rule init ( )
+{
+}
+
+
+# Feature controling the command used to lanch test programs.
+feature.feature testing.launcher : : free optional ;
+
+feature.feature test-info : : free incidental ;
+feature.feature testing.arg : : free incidental ;
feature.feature testing.input-file : : free dependency ;
+
# Register target types.
-type.register TEST : test ;
-type.register COMPILE : : TEST ;
-type.register COMPILE_FAIL : : TEST ;
-type.register RUN_OUTPUT : run ;
-type.register RUN : : TEST ;
-type.register RUN_FAIL : : TEST ;
-type.register LINK_FAIL : : TEST ;
-type.register LINK : : TEST ;
-type.register UNIT_TEST : passed : TEST ;
-
-# Declare the rules which create main targets.
-# While the 'type' module already creates rules with the same names for us,
-# we need extra convenience: default name of main target, so write
-# our own versions.
-
-# Helper rule. Create a test target, using basename of first source if no
-# target name is explicitly passed. Remembers the created target in
-# a global variable.
+type.register TEST : test ;
+type.register COMPILE : : TEST ;
+type.register COMPILE_FAIL : : TEST ;
+type.register RUN_OUTPUT : run ;
+type.register RUN : : TEST ;
+type.register RUN_FAIL : : TEST ;
+type.register LINK_FAIL : : TEST ;
+type.register LINK : : TEST ;
+type.register UNIT_TEST : passed : TEST ;
+
+
+# Declare the rules which create main targets. While the 'type' module already
+# creates rules with the same names for us, we need extra convenience: default
+# name of main target, so write our own versions.
+
+# Helper rule. Create a test target, using basename of first source if no target
+# name is explicitly passed. Remembers the created target in a global variable.
rule make-test ( target-type : sources + : requirements * : target-name ? )
{
target-name ?= $(sources[1]:D=:S=) ;
local project = [ project.current ] ;
- # The <location-prefix> forces the build system for generate paths in the form
- # $build_dir/array1.test/gcc/debug
- # This is necessary to allow post-processing tools to work.
- local t =
- [ targets.create-typed-target
+ # The <location-prefix> forces the build system for generate paths in the
+ # form '$build_dir/array1.test/gcc/debug'. This is necessary to allow
+ # post-processing tools to work.
+ local t =
+ [ targets.create-typed-target
[ type.type-from-rule-name $(target-type) ] : $(project)
- : $(target-name) : $(sources)
+ : $(target-name) : $(sources)
: $(requirements) <location-prefix>$(target-name).test ] ;
-
- # Remember the test (for --dump-test).
- # A good way would be to collect all given a project.
- # This has some technical problems: e.g. we can't call this dump from
- # Jamfile since projects referred by 'build-project' are not available until
- # whole Jamfile is loaded.
+
+ # Remember the test (for --dump-tests). A good way would be to collect all
+ # given a project. This has some technical problems: e.g. we can't call this
+ # dump from Jamfile since projects referred by 'build-project' are not
+ # available until the whole Jamfile is loaded.
.all-tests += $(t) ;
- return $(t) ;
+ return $(t) ;
}
-# Note: passing more that one cpp file here is know to
-# fail. Passing a cpp file and a library target works.
+
+# Note: passing more that one cpp file here is known to fail. Passing a cpp file
+# and a library target works.
rule compile ( sources + : requirements * : target-name ? )
-{
+{
return [ make-test compile : $(sources) : $(requirements) : $(target-name) ] ;
}
+
rule compile-fail ( sources + : requirements * : target-name ? )
-{
+{
return [ make-test compile-fail : $(sources) : $(requirements) : $(target-name) ] ;
}
+
rule link ( sources + : requirements * : target-name ? )
-{
+{
return [ make-test link : $(sources) : $(requirements) : $(target-name) ] ;
}
rule link-fail ( sources + : requirements * : target-name ? )
-{
+{
return [ make-test link-fail : $(sources) : $(requirements) : $(target-name) ] ;
}
@@ -127,56 +133,55 @@
{
if $(input-files[2])
{
- # Check that sorting made when creating property-set instance
- # won't change the ordering.
+ # Check that sorting made when creating property-set instance won't
+ # change the ordering.
if [ sequence.insertion-sort $(input-files) ] != $(input-files)
{
errors.user-error "Names of input files must be sorted alphabetically"
- : "due to internal limitations" ;
- }
+ : "due to internal limitations" ;
+ }
}
return <testing.input-file>$(input-files) ;
}
-rule run ( sources + : args * : input-files * : requirements * : target-name ?
+rule run ( sources + : args * : input-files * : requirements * : target-name ?
: default-build * )
-{
+{
requirements += <testing.arg>$(args:J=" ") ;
requirements += [ handle-input-files $(input-files) ] ;
return [ make-test run : $(sources) : $(requirements) : $(target-name) ] ;
}
-rule run-fail ( sources + : args * : input-files * : requirements * : target-name ?
- : default-build * )
-{
- requirements += <testing.arg>$(args:J=" ") ;
+
+rule run-fail ( sources + : args * : input-files * : requirements *
+ : target-name ? : default-build * )
+{
+ requirements += <testing.arg>$(args:J=" ") ;
requirements += [ handle-input-files $(input-files) ] ;
return [ make-test run-fail : $(sources) : $(requirements) : $(target-name) ] ;
}
-
-# Use 'test-suite' as synonym for 'alias', for backward compatibility.
+# Use 'test-suite' as a synonym for 'alias', for backward compatibility.
IMPORT : alias : : test-suite ;
-
-# For all main target in 'project-module',
-# which are typed target with type derived from 'TEST',
-# produce some interesting information.
+# For all main targets in 'project-module', which are typed targets with type
+# derived from 'TEST', produce some interesting information.
rule dump-tests # ( project-module )
{
for local t in $(.all-tests)
{
dump-test $(t) ;
- }
+ }
}
-# Given a project location, compute the name of Boost library
+
+# Given a project location in normalized form (slashes are forward), compute the
+# name of the Boost library.
local rule get-library-name ( path )
{
# Path is in normalized form, so all slashes are forward.
-
local match1 = [ MATCH /libs/(.*)/(test|example) : $(path) ] ;
local match2 = [ MATCH /libs/(.*)$ : $(path) ] ;
local match3 = [ MATCH (/status$) : $(path) ] ;
@@ -184,19 +189,20 @@
if $(match1) { return $(match1[0]) ; }
else if $(match2) { return $(match2[0]) ; }
else if $(match3) { return "" ; }
- else if --dump-tests in [ modules.peek : ARGV ]
- {
- # The 'run' rule and others might be used outside
- # boost. In that case, just return the path,
- # since the 'library name' makes no sense.
+ else if --dump-tests in [ modules.peek : ARGV ]
+ {
+ # The 'run' rule and others might be used outside boost. In that case,
+ # just return the path, since the 'library name' makes no sense.
return $(path) ;
}
}
+
# Was an XML dump requested?
.out-xml = [ MATCH --out-xml=(.*) : [ modules.peek : ARGV ] ] ;
-# Take a target (instance of 'basic-target') and prints
+
+# Takes a target (instance of 'basic-target') and prints
# - its type
# - its name
# - comments specified via the <test-info> property
@@ -206,44 +212,44 @@
local type = [ $(target).type ] ;
local name = [ $(target).name ] ;
local project = [ $(target).project ] ;
-
+
local project-root = [ $(project).get project-root ] ;
- local library = [ get-library-name
+ local library = [ get-library-name
[ path.root [ $(project).get location ] [ path.pwd ] ] ] ;
if $(library)
{
name = $(library)/$(name) ;
}
-
+
local sources = [ $(target).sources ] ;
local source-files ;
for local s in $(sources)
{
- if [ class.is-a $(s) : file-reference ]
+ if [ class.is-a $(s) : file-reference ]
{
- local location =
- [ path.root
+ local location =
+ [ path.root
[ path.root [ $(s).name ] [ $(s).location ] ]
- [ path.pwd ] ] ;
-
- source-files +=
- [ path.relative
+ [ path.pwd ] ] ;
+
+ source-files +=
+ [ path.relative
$(location)
[ path.root $(project-root) [ path.pwd ] ] ] ;
- }
+ }
}
-
+
local r = [ $(target).requirements ] ;
- # Extract values of the <test-info> feature
+ # Extract values of the <test-info> feature.
local test-info = [ $(r).get <test-info> ] ;
-
- # If the user requested XML output on the command-line, add the
- # test info to that XML file rather than dumping them to stdout.
+
+ # If the user requested XML output on the command-line, add the test info to
+ # that XML file rather than dumping them to stdout.
if $(.out-xml)
{
local nl = "
" ;
- .contents on $(.out-xml) +=
+ .contents on $(.out-xml) +=
"$(nl) <test type=\"$(type)\" name=\"$(name)\">"
"$(nl) <info><![CDATA[$(test-info)]]></info>"
"$(nl) <source><![CDATA[$(source-files)]]></source>"
@@ -252,123 +258,123 @@
}
else
{
- # Format them into a single string of quoted strings
+ # Format them into a single string of quoted strings.
test-info = \"$(test-info:J=\"\ \")\" ;
-
- ECHO boost-test($(type)) \"$(name)\"
- [$(test-info)]
- ":" \"$(source-files)\"
- ;
+
+ ECHO boost-test($(type)) \"$(name)\"
+ [$(test-info)]
+ ":" \"$(source-files)\" ;
}
}
-# Register generators. Depending on target type, either
-# 'expect-success' or 'expect-failure' rule will be used.
-generators.register-standard testing.expect-success : OBJ : COMPILE ;
-generators.register-standard testing.expect-failure : OBJ : COMPILE_FAIL ;
-generators.register-standard testing.expect-success : RUN_OUTPUT : RUN ;
-generators.register-standard testing.expect-failure : RUN_OUTPUT : RUN_FAIL ;
-generators.register-standard testing.expect-failure : EXE : LINK_FAIL ;
-generators.register-standard testing.expect-success : EXE : LINK ;
+
+# Register generators. Depending on target type, either 'expect-success' or
+# 'expect-failure' rule will be used.
+generators.register-standard testing.expect-success : OBJ : COMPILE ;
+generators.register-standard testing.expect-failure : OBJ : COMPILE_FAIL ;
+generators.register-standard testing.expect-success : RUN_OUTPUT : RUN ;
+generators.register-standard testing.expect-failure : RUN_OUTPUT : RUN_FAIL ;
+generators.register-standard testing.expect-failure : EXE : LINK_FAIL ;
+generators.register-standard testing.expect-success : EXE : LINK ;
# Generator which runs an EXE and captures output.
generators.register-standard testing.capture-output : EXE : RUN_OUTPUT ;
-# Generator which creates target if sources runs successfully.
-# Differers from RUN in that run output is not captured.
-# The reason why it exists is that the 'run' rule is much better for
-# automated testing, but is not user-friendly. See
+# Generator which creates a target if sources runs successfully. Differs from
+# RUN in that run output is not captured. The reason why it exists is that the
+# 'run' rule is much better for automated testing, but is not user-friendly. See
# http://article.gmane.org/gmane.comp.lib.boost.build/6353/
generators.register-standard testing.unit-test : EXE : UNIT_TEST ;
# The action rules called by generators.
-# Causes the 'target' to exist after bjam invocation if and only if all the
+# Causes the 'target' to exist after bjam invocation if and only if all the
# dependencies were successfully built.
rule expect-success ( target : dependency + : requirements * )
{
- **passed** $(target) : $(sources) ;
+ **passed** $(target) : $(sources) ;
}
-# Causes the 'target' to exist after bjam invocation if and only if all some
-# of the dependencies were not successfully built.
+# Causes the 'target' to exist after bjam invocation if and only if all some of
+# the dependencies were not successfully built.
rule expect-failure ( target : dependency + : properties * )
{
local grist = [ MATCH ^<(.*)> : $(dependency:G) ] ;
local marker = $(dependency:G=$(grist)*fail) ;
(failed-as-expected) $(marker) ;
- FAIL_EXPECTED $(dependency) ;
+ FAIL_EXPECTED $(dependency) ;
LOCATE on $(marker) = [ on $(dependency) return $(LOCATE) ] ;
RMOLD $(marker) ;
DEPENDS $(marker) : $(dependency) ;
-
DEPENDS $(target) : $(marker) ;
**passed** $(target) : $(marker) ;
}
-# The rule/action combination used to report successfull passing
-# of a test.
+
+# The rule/action combination used to report successfull passing of a test.
rule **passed**
{
- # Dump all the tests, if needed.
- # We do it here, since dump should happen after all Jamfiles are read,
- # and there's no such place currently defined (but should).
- if ! $(.dumped-tests) && --dump-tests in [ modules.peek : ARGV ]
+ # Dump all the tests, if needed. We do it here, since dump should happen
+ # only after all Jamfiles have been read, and there's no such place
+ # currently defined (but there should be).
+ if ! $(.dumped-tests) && --dump-tests in [ modules.peek : ARGV ]
{
.dumped-tests = true ;
dump-tests ;
}
-
- # Force deletion of the target, in case any dependencies failed
- # to build.
+
+ # Force deletion of the target, in case any dependencies failed to build.
RMOLD $(<) ;
}
+
actions **passed**
{
echo passed > $(<)
}
+
actions (failed-as-expected)
{
echo failed as expected > $(<)
}
+
rule run-path-setup ( target : source : properties * )
{
- # For testing, we need to make sure that all dynamic libraries needed by
- # the test are found. So, we collect all paths from dependency libraries
- # (via xdll-path property) and add whatever explicit dll-path user has
- # specified. The resulting paths are added to environment on each test
- # invocation.
+ # For testing, we need to make sure that all dynamic libraries needed by the
+ # test are found. So, we collect all paths from dependency libraries (via
+ # xdll-path property) and add whatever explicit dll-path user has specified.
+ # The resulting paths are added to the environment on each test invocation.
local dll-paths = [ feature.get-values <dll-path> : $(properties) ] ;
dll-paths += [ feature.get-values <xdll-path> : $(properties) ] ;
dll-paths += [ on $(source) return $(RUN_PATH) ] ;
dll-paths = [ sequence.unique $(dll-paths) ] ;
if $(dll-paths)
- {
+ {
dll-paths = [ sequence.transform path.native : $(dll-paths) ] ;
-
- PATH_SETUP on $(target) =
- [ common.prepend-path-variable-command
- [ os.shared-library-path-variable ] : $(dll-paths) ] ;
- }
+ PATH_SETUP on $(target) = [ common.prepend-path-variable-command
+ [ os.shared-library-path-variable ] : $(dll-paths) ] ;
+ }
}
+
local argv = [ modules.peek : ARGV ] ;
if --preserve-test-targets in $(argv)
{
preserve-test-targets = true ;
}
+
toolset.flags testing.capture-output ARGS <testing.arg> ;
toolset.flags testing.capture-output INPUT_FILES <testing.input-file> ;
toolset.flags testing.capture-output LAUNCHER <testing.launcher> ;
-# Runs executable 'sources' and stores stdout in file 'target'.
-# If --preserve-test-targets command line option, removes the executable.
-# The 'target-to-remove' parameter controls what should be removed:
+
+# Runs executable 'sources' and stores stdout in file 'target'. Unless
+# --preserve-test-targets command line option has been specified, removes the
+# executable. The 'target-to-remove' parameter controls what should be removed:
# - if 'none', does not remove anything, ever
# - if empty, removes 'source'
# - if non-empty and not 'none', contains a list of sources to remove.
@@ -376,19 +382,20 @@
{
output-file on $(target) = $(target:S=.output) ;
LOCATE on $(target:S=.output) = [ on $(target) return $(LOCATE) ] ;
-
+
# The INCLUDES kill a warning about independent target...
INCLUDES $(target) : $(target:S=.output) ;
- # but it also puts .output into dependency graph, so we must tell jam
- # it's OK if it cannot find the target or updating rule.
- NOCARE $(target:S=.output) ;
-
+ # but it also puts .output into dependency graph, so we must tell jam it's
+ # OK if it cannot find the target or updating rule.
+ NOCARE $(target:S=.output) ;
+
# This has two-fold effect. First it adds input files to the dependendency
# graph, preventing a warning. Second, it causes input files to be bound
- # before target is created. Therefore, they are bound using SEARCH setting
- # on them and not LOCATE setting of $(target), as in other case (due to jam bug).
+ # before target is created. Therefore, they are bound using SEARCH setting
+ # on them and not LOCATE setting of $(target), as in other case (due to jam
+ # bug).
DEPENDS $(target) : [ on $(target) return $(INPUT_FILES) ] ;
-
+
if $(targets-to-remove) = none
{
targets-to-remove = ;
@@ -397,23 +404,19 @@
{
targets-to-remove = $(source) ;
}
-
+
run-path-setup $(target) : $(source) : $(properties) ;
-
+
if ! $(preserve-test-targets)
{
TEMPORARY $(targets-to-remove) ;
- # Set a second action on target that will
- # be executed after capture output action.
- # The 'RmTemps' rule has the 'ignore' modifier
- # so it's always considered succeeded.
- # This is needed for 'run-fail' test. For that
- # test the target will be marked with FAIL_EXPECTED,
- # and without 'ingore' successfull execution
- # will be negated and be reported as failure.
- # With 'ignore' we don't detect a case where
- # removing files, but it's not likely to
- # happen.
+ # Set a second action on target that will be executed after capture
+ # output action. The 'RmTemps' rule has the 'ignore' modifier so it's
+ # always considered succeeded. This is needed for 'run-fail' test. For
+ # that test the target will be marked with FAIL_EXPECTED, and without
+ # 'ignore' successful execution will be negated and be reported as
+ # failure. With 'ignore' we don't detect a case where removing files
+ # fails, but it's not likely to happen.
RmTemps $(target) : $(targets-to-remove) ;
}
}
@@ -421,34 +424,32 @@
if [ os.name ] = NT
{
- STATUS = %status% ;
- SET_STATUS = "set status=%ERRORLEVEL%" ;
+ STATUS = %status% ;
+ SET_STATUS = "set status=%ERRORLEVEL%" ;
RUN_OUTPUT_NL = "echo." ;
- STATUS_0 = "%status% EQU 0 (" ;
- STATUS_NOT_0 = "%status% NEQ 0 (" ;
- VERBOSE = "%verbose% EQU 1 (" ;
- ENDIF = ")" ;
- SHELL_SET = "set " ;
-
- CATENATE = type ;
- CP = copy ;
+ STATUS_0 = "%status% EQU 0 (" ;
+ STATUS_NOT_0 = "%status% NEQ 0 (" ;
+ VERBOSE = "%verbose% EQU 1 (" ;
+ ENDIF = ")" ;
+ SHELL_SET = "set " ;
+ CATENATE = type ;
+ CP = copy ;
}
else
{
- STATUS = "$status" ;
- SET_STATUS = "status=$?" ;
+ STATUS = "$status" ;
+ SET_STATUS = "status=$?" ;
RUN_OUTPUT_NL = "echo" ;
- STATUS_0 = "test $status -eq 0 ; then" ;
- STATUS_NOT_0 = "test $status -ne 0 ; then" ;
- VERBOSE = "test $verbose -eq 1 ; then" ;
- ENDIF = "fi" ;
- SHELL_SET = "" ;
-
- CATENATE = cat ;
- CP = cp ;
+ STATUS_0 = "test $status -eq 0 ; then" ;
+ STATUS_NOT_0 = "test $status -ne 0 ; then" ;
+ VERBOSE = "test $verbose -eq 1 ; then" ;
+ ENDIF = "fi" ;
+ SHELL_SET = "" ;
+ CATENATE = cat ;
+ CP = cp ;
}
-if --verbose-test in [ modules.peek : ARGV ]
+if --verbose-test in [ modules.peek : ARGV ]
{
VERBOSE_TEST = 1 ;
}
@@ -460,10 +461,11 @@
RM = [ common.rm-command ] ;
+
actions capture-output bind INPUT_FILES output-file
{
$(PATH_SETUP)
- $(LAUNCHER) "$(>)" $(ARGS) "$(INPUT_FILES)" > "$(output-file)" 2>&1
+ $(LAUNCHER) "$(>)" $(ARGS) "$(INPUT_FILES)" > "$(output-file)" 2>&1
$(SET_STATUS)
$(RUN_OUTPUT_NL) >> "$(output-file)"
echo EXIT STATUS: $(STATUS) >> "$(output-file)"
@@ -478,37 +480,42 @@
echo ====== BEGIN OUTPUT ======
$(CATENATE) "$(output-file)"
echo ====== END OUTPUT ======
- $(ENDIF)
- exit $(STATUS)
+ $(ENDIF)
+ exit $(STATUS)
}
+
actions quietly updated ignore piecemeal together RmTemps
{
$(RM) "$(>)"
}
+
MAKE_FILE = [ common.file-creation-command ] ;
toolset.flags testing.unit-test LAUNCHER <testing.launcher> ;
+toolset.flags testing.unit-test ARGS <testing.arg> ;
rule unit-test ( target : source : properties * )
{
run-path-setup $(target) : $(source) : $(properties) ;
}
-actions unit-test
+
+actions unit-test
{
$(PATH_SETUP)
- $(LAUNCHER) $(>) && $(MAKE_FILE) $(<)
+ $(LAUNCHER) $(>) $(ARGS) && $(MAKE_FILE) $(<)
}
-IMPORT $(__name__) : compile compile-fail run run-fail link link-fail
- : : compile compile-fail run run-fail link link-fail ;
+IMPORT $(__name__) : compile compile-fail run run-fail link link-fail
+ : : compile compile-fail run run-fail link link-fail ;
type.register TIME : time ;
generators.register-standard testing.time : : TIME ;
-rule record-time ( target source : user : system )
+
+rule record-time ( target : source : start end user system )
{
local src-string = [$(source:G=:J=",")"] " ;
USER_TIME on $(target) += $(src-string)$(user) ;
@@ -516,21 +523,24 @@
}
IMPORT testing : record-time : : testing.record-time ;
+
+
rule time ( target : source : properties * )
{
- # Set up rule for recording timing information
+ # Set up rule for recording timing information.
__TIMING_RULE__ on $(source) = testing.record-time $(target) ;
-
- # Make sure that the source is rebuilt any time we need to
- # retrieve that information
+
+ # Make sure that the source is rebuilt any time we need to retrieve that
+ # information.
REBUILDS $(target) : $(source) ;
}
+
actions time
{
echo user: $(USER_TIME)
echo system: $(SYSTEM_TIME)
-
+
echo user: $(USER_TIME)" seconds" > $(<)
echo system: $(SYSTEM_TIME)" seconds" > $(<)
}
Modified: branches/release/tools/build/v2/util/doc.jam
==============================================================================
--- branches/release/tools/build/v2/util/doc.jam (original)
+++ branches/release/tools/build/v2/util/doc.jam 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -1,13 +1,13 @@
-# Copyright 2002, 2005 Dave Abrahams
-# Copyright 2002, 2003, 2006 Rene Rivera
-# Copyright 2003 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+# Copyright 2002, 2005 Dave Abrahams
+# Copyright 2002, 2003, 2006 Rene Rivera
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# Documentation system, handles --help requests.
# It defines rules that attach documentation to modules, rules, and variables.
-# Collects and generates documentation for the various parts of the build system.
-# The documentation is collected from comments integrated into the code.
+# Collects and generates documentation for the various parts of the build
+# system. The documentation is collected from comments integrated into the code.
import modules ;
import print ;
@@ -235,17 +235,17 @@
# Generates a general description of the documentation and help system.
#
local rule print-help-top ( )
-{
+{
print.section "General command line usage" ;
print.text " bjam [options] [properties] [targets]
-
+
Options, properties and targets can be specified in any order.
" ;
-
- print.section "Important Options" ;
-
- print.list-start ;
+
+ print.section "Important Options" ;
+
+ print.list-start ;
print.list-item "--clean Remove targets instead of building" ;
print.list-item "-a Rebuild everything" ;
print.list-item "-n Don't execute the commands, only print them" ;
@@ -256,11 +256,11 @@
print.list-item "--debug-building Report which targets are built with what properties" ;
print.list-item "--debug-generator Diagnose generator search/execution" ;
print.list-end ;
-
+
print.section "Further Help"
The following options can be used to obtain additional documentation.
;
-
+
print.list-start ;
print.list-item "--help-options Print more obscure command line options." ;
print.list-item "--help-internal Boost.Build implementation details." ;
@@ -279,18 +279,18 @@
print.list-item -a;
Build all targets, even if they are current. ;
print.list-item -fx;
- Read '"x"' as the Jamfile for building instead of searching
- for the Boost.Build system. ;
+ Read '"x"' as the Jamfile for building instead of searching for the
+ Boost.Build system. ;
print.list-item -jx;
Run up to '"x"' commands concurrently. ;
print.list-item -n;
- Do not execute build commands. Instead print out the commands
- as they would be executed if building. ;
+ Do not execute build commands. Instead print out the commands as they
+ would be executed if building. ;
print.list-item -ox;
- Write the build commands to the file '"x"'. ;
+ Output the used build commands to file '"x"'. ;
print.list-item -q;
- Quit as soon as the build of a target fails. Specifying this prevents the
- attempt of building as many targets as possible regardless of failures. ;
+ Quit as soon as a build failure is encountered. Without this option
+ Boost.Jam will continue building as many targets as it can.
print.list-item -sx=y;
Sets a Jam variable '"x"' to the value '"y"', overriding any value that
variable would have from the environment. ;
@@ -299,19 +299,19 @@
print.list-item -v;
Display the version of bjam. ;
print.list-item --x;
- Option '"x"' is ignored but considered and option. The option is then
- available from the '"ARGV"' variable. ;
+ Any option not explicitly handled by Boost.Jam remains available to
+ build scripts using the '"ARGV"' variable. ;
print.list-item -dn;
Enables output of diagnostic messages. The debug level '"n"' and all
below it are enabled by this option. ;
print.list-item -d+n;
- Enables output of diagnostic messages. Only the output for debug level '"n"'
- is enabled. ;
+ Enables output of diagnostic messages. Only the output for debug level
+ '"n"' is enabled. ;
print.list-end ;
print.section "Debug Levels"
- Each debug level shows a different set of information. Usually with the higher
- levels producing more verbose information. The following levels are supported:
- ;
+ Each debug level shows a different set of information. Usually with
+ higher levels producing more verbose information. The following levels
+ are supported: ;
print.list-start ;
print.list-item 0;
Turn off all diagnostic output. Only errors are reported. ;
@@ -353,9 +353,9 @@
)
{
print.section "Help Options"
- These are all the options available for enabling or disabling
- to control the help system in various ways. Options can be enabled
- or disabled with '"--help-enable-<option>"', and "'--help-disable-<option>'"
+ These are all the options available for enabling or disabling to control
+ the help system in various ways. Options can be enabled or disabled with
+ '"--help-enable-<option>"', and "'--help-disable-<option>'"
respectively.
;
local options-to-list = [ MATCH ^[.]option[.](.*) : $($(module-name).variables) ] ;
@@ -410,11 +410,11 @@
}
}
-# Generate documentation for possible modules. We attempt to list all known
-# modules, and a brief description of each.
+# Generate documentation for all possible modules. We attempt to list all known
+# modules together with a brief description of each.
#
local rule print-help-all (
- ignored # Usually the module name, but is ignored here.
+ ignored # Usually the module name, but is ignored here.
)
{
print.section "Modules"
@@ -451,19 +451,19 @@
{
# Print the docs.
print.section "Module '$(module-name)'" $($(module-name).docs) ;
-
+
# Print out the documented classes.
print-help-module-section $(module-name) classes : "Module '$(module-name)' classes"
Use --help $(module-name).<class-name> to get more information. ;
-
+
# Print out the documented rules.
print-help-module-section $(module-name) rules : "Module '$(module-name)' rules"
Use --help $(module-name).<rule-name> to get more information. ;
-
+
# Print out the documented variables.
print-help-module-section $(module-name) variables : "Module '$(module-name)' variables"
Use --help $(module-name).<variable-name> to get more information. ;
-
+
# Print out all the same information but indetailed form.
if $(.option.detailed)
{
@@ -536,11 +536,11 @@
print.list-end ;
}
}
-
+
# Print out the documented rules of the class.
print-help-module-section $(module-name) $(class-name).class-rules : "Class '$(module-name).$(class-name)' rules"
Use --help $(module-name).<rule-name> to get more information. ;
-
+
# Print out all the rules if details are requested.
if $(.option.detailed)
{
@@ -594,7 +594,7 @@
# Print the docs.
print.section "Project-specific help"
Project has jamfile at $(jamfile) ;
-
+
print.lines $(jamfile<$(jamfile)>.docs) "" ;
}
}
@@ -612,7 +612,7 @@
# Print the docs.
print.section "Configuration help"
Configuration file at $(config-file) ;
-
+
print.lines $(jamfile<$(config-file)>.docs) "" ;
}
}
@@ -738,7 +738,7 @@
scope-level = $(scope-level[2-]) ;
}
}
-
+
return true ;
}
}
@@ -847,10 +847,10 @@
}
}
-# Scan a module file for documentation comments. This also
-# invokes any actions assigned to the module. The actions
-# are the rules that do the actual output of the documentation.
-# This rue is invoked as the header scan rule for the module file.
+# Scan a module file for documentation comments. This also invokes any actions
+# assigned to the module. The actions are the rules that do the actual output of
+# the documentation. This rule is invoked as the header scan rule for the module
+# file.
#
rule scan-module (
target # The module file.
Modified: branches/release/tools/build/v2/util/indirect.jam
==============================================================================
--- branches/release/tools/build/v2/util/indirect.jam (original)
+++ branches/release/tools/build/v2/util/indirect.jam 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -1,14 +1,16 @@
-# Copyright 2003 Dave Abrahams
-# Copyright 2003 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+# Copyright 2003 Dave Abrahams
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
import modules ;
import numbers ;
-# The pattern that indirect rules must match: module$rule
+
+# The pattern that indirect rules must match: module%rule
.pattern = ^([^%]*)%([^%]+)$ ;
-
+
+
#
# Type checking rules.
#
@@ -16,14 +18,15 @@
{
if ! [ MATCH $(.pattern) : $(x) ]
{
- return "expected a string of the form module$rule, but got \""$(x)"\" for argument" ;
+ return "expected a string of the form module%rule, but got \""$(x)"\" for argument" ;
}
}
-# make an indirect rule which calls the given rule; if context is
-# supplied it is expected to be the module in which to invoke the rule
-# by the 'call' rule below. Otherwise, the rule will be invoked in
-# the module of this rule's caller.
+
+# Make an indirect rule which calls the given rule. If context is supplied it is
+# expected to be the module in which to invoke the rule by the 'call' rule
+# below. Otherwise, the rule will be invoked in the module of this rule's
+# caller.
rule make ( rulename bound-args * : context ? )
{
context ?= [ CALLER_MODULE ] ;
@@ -31,12 +34,12 @@
return $(context)%$(rulename) $(bound-args) ;
}
-# make an indirect rule which calls the given rule. rulename may be a
-# qualified rule; if so it is returned unchanged. Otherwise, if
-# frames is not supplied, the result will be invoked (by 'call',
-# below) in the module of the caller. Otherwise, frames > 1
-# specifies additional call frames to back up in order to find the
-# module context.
+
+# Make an indirect rule which calls the given rule. 'rulename' may be a
+# qualified rule; if so it is returned unchanged. Otherwise, if frames is not
+# supplied, the result will be invoked (by 'call', below) in the module of the
+# caller. Otherwise, frames > 1 specifies additional call frames to back up in
+# order to find the module context.
rule make-qualified ( rulename bound-args * : frames ? )
{
if [ MATCH $(.pattern) : $(rulename) ]
@@ -46,17 +49,16 @@
else
{
frames ?= 1 ;
- # Take the first dot-separated element as module name.
- # This disallows module names with dots, but allows rule names
- # with dots.
+ # Take the first dot-separated element as module name. This disallows
+ # module names with dots, but allows rule names with dots.
local module-context = [ MATCH ^([^.]*)\\..* : $(rulename) ] ;
module-context ?= [ CALLER_MODULE $(frames) ] ;
return [ make $(rulename) $(bound-args) : $(module-context) ] ;
}
}
-# return the module name in which the given indirect rule will be
-# invoked.
+
+# Returns the module name in which the given indirect rule will be invoked.
rule get-module ( [indirect-rule] x )
{
local m = [ MATCH $(.pattern) : $(x) ] ;
@@ -67,37 +69,36 @@
return $(m[1]) ;
}
-# return the rulename that will be called when x is invoked
+
+# Returns the rulename that will be called when x is invoked.
rule get-rule ( [indirect-rule] x )
{
local m = [ MATCH $(.pattern) : $(x) ] ;
return $(m[2]) ;
}
+
# Invoke the given indirect-rule.
rule call ( [indirect-rule] r args * : * )
{
- return [
- modules.call-in [ get-module $(r) ]
- : [ get-rule $(r) ] $(args) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9)
- ] ;
+ return [ modules.call-in [ get-module $(r) ] : [ get-rule $(r) ] $(args)
+ : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ] ;
}
+
rule __test__
{
import assert ;
-
+
rule foo-barr! ( x )
{
assert.equal $(x) : x ;
}
-
+
assert.equal [ get-rule [ make foo-barr! ] ] : foo-barr! ;
assert.equal [ get-module [ make foo-barr! ] ] : [ CALLER_MODULE ] ;
-
+
call [ make foo-barr! ] x ;
call [ make foo-barr! x ] ;
-
-
call [ make foo-barr! : [ CALLER_MODULE ] ] x ;
}
Modified: branches/release/tools/build/v2/util/option.jam
==============================================================================
--- branches/release/tools/build/v2/util/option.jam (original)
+++ branches/release/tools/build/v2/util/option.jam 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -25,7 +25,7 @@
# Check command-line args as soon as possible. For each option try
# to load module named after option. Is that succeeds, invoke 'process'
# rule in the module. The rule may return "true" to indicate that the
- # regular built process should not be attempted.
+ # regular build process should not be attempted.
#
# Options take the general form of: --<name>[=<value>] [<value>]
#
Modified: branches/release/tools/build/v2/util/order.jam
==============================================================================
--- branches/release/tools/build/v2/util/order.jam (original)
+++ branches/release/tools/build/v2/util/order.jam 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -3,70 +3,71 @@
# License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy
# at http://www.boost.org/LICENSE_1_0.txt)
-# This module defines a class which allows to order arbitrary object
-# with regard to arbitrary binary relation.
+# This module defines a class which allows to order arbitrary object with
+# regard to arbitrary binary relation.
#
-# The primary use case is the gcc toolset, which is sensitive to
-# library order: if library 'a' uses symbols from library 'b',
-# then 'a' must be present before 'b' on the linker's command line.
+# The primary use case is the gcc toolset, which is sensitive to library order:
+# if library 'a' uses symbols from library 'b', then 'a' must be present before
+# 'b' on the linker's command line.
#
-# This requirement can be lifted for gcc with GNU ld, but for gcc with
-# Solaris LD (and for Solaris toolset as well), the order always matters.
+# This requirement can be lifted for gcc with GNU ld, but for gcc with Solaris
+# LD (and for Solaris toolset as well), the order always matters.
#
-# So, we need to store order requirements and then order libraries
-# according to them. It it not possible to use dependency graph as
-# order requirements. What we need is "use symbols" relationship
-# while dependency graph provides "needs to be updated" relationship.
+# So, we need to store order requirements and then order libraries according to
+# them. It is not possible to use the dependency graph as order requirements.
+# What we need is a "use symbols" relationship while dependency graph provides
+# the "needs to be updated" relationship.
#
# For example::
# lib a : a.cpp b;
# lib b ;
#
-# For static linking, the 'a' library need not depend on 'b'. However, it
-# still should come before 'b' on the command line.
+# For static linking, library 'a' need not depend on 'b'. However, it should
+# still come before 'b' on the command line.
-class order
+class order
{
- rule __init__ ( ) {
+ rule __init__ ( )
+ {
}
-
- # Adds the constraint that 'first' should precede 'second'
+
+ # Adds the constraint that 'first' should preceede 'second'.
rule add-pair ( first second )
{
.constraits += $(first)--$(second) ;
}
NATIVE_RULE class_at_order : add-pair ;
-
- # Given a list of objects, reorder them so that the constains specified
- # by 'add-pair' are satisfied.
+
+ # Given a list of objects, reorder them so that the constraints specified by
+ # 'add-pair' are satisfied.
#
# The algorithm was adopted from an awk script by Nikita Youshchenko
# (yoush at cs dot msu dot su)
rule order ( objects * )
{
- # The algorithm used is the same is standard transitive closure,
- # except that we're not keeping in-degree for all vertices, but
- # rather removing edges.
+ # The algorithm used is the same is standard transitive closure, except
+ # that we're not keeping in-degree for all vertices, but rather removing
+ # edges.
local result ;
if $(objects)
- {
- local constraints = [ eliminate-unused-constraits $(objects) ] ;
-
- # Find some library that nobody depends upon and add it to
- # the 'result' array.
+ {
+ local constraints = [ eliminate-unused-constraits $(objects) ] ;
+
+ # Find some library that nobody depends upon and add it to the
+ # 'result' array.
local obj ;
while $(objects)
- {
+ {
local new_objects ;
while $(objects)
{
- obj = $(objects[1]) ;
+ obj = $(objects[1]) ;
if [ has-no-dependents $(obj) : $(constraints) ]
{
# Emulate break ;
new_objects += $(objects[2-]) ;
objects = ;
- }
+ }
else
{
new_objects += $(obj) ;
@@ -74,30 +75,30 @@
objects = $(objects[2-]) ;
}
}
-
+
if ! $(obj)
{
errors.error "Circular order dependencies" ;
}
# No problem with placing first.
result += $(obj) ;
- # Remove all containts where 'obj' comes first,
- # since they are already satisfied.
+ # Remove all contraints where 'obj' comes first, since they are
+ # already satisfied.
constraints = [ remove-satisfied $(constraints) : $(obj) ] ;
- # Add the remaining objects for further processing
- # on the next iteration
-
- objects = $(new_objects) ;
- }
-
- }
+
+ # Add the remaining objects for further processing on the next
+ # iteration
+ objects = $(new_objects) ;
+ }
+
+ }
return $(result) ;
- }
+ }
NATIVE_RULE class_at_order : order ;
-
- # Eliminate constains which mentions objects not in 'objects'.
- # In graph-theory terms, this is finding subgraph induced by
- # ordered vertices.
+
+ # Eliminate constraints which mention objects not in 'objects'. In
+ # graph-theory terms, this is finding a subgraph induced by ordered
+ # vertices.
rule eliminate-unused-constraits ( objects * )
{
local result ;
@@ -107,32 +108,32 @@
if $(m[1]) in $(objects) && $(m[2]) in $(objects)
{
result += $(c) ;
- }
- }
+ }
+ }
return $(result) ;
}
-
- # Returns true if there's no constrain in 'constaraint' where
- # 'obj' comes second.
+
+ # Returns true if there's no constraint in 'constaraints' where 'obj' comes
+ # second.
rule has-no-dependents ( obj : constraints * )
{
local failed ;
- while $(constraints) && ! $(failed)
+ while $(constraints) && ! $(failed)
{
local c = $(constraints[1]) ;
local m = [ MATCH (.*)--(.*) : $(c) ] ;
if $(m[2]) = $(obj)
{
failed = true ;
- }
+ }
constraints = $(constraints[2-]) ;
}
if ! $(failed)
{
return true ;
- }
+ }
}
-
+
rule remove-satisfied ( constraints * : obj )
{
local result ;
@@ -142,32 +143,27 @@
if $(m[1]) != $(obj)
{
result += $(c) ;
- }
+ }
}
- return $(result) ;
- }
+ return $(result) ;
+ }
}
+
rule __test__ ( )
{
import "class" : new ;
import assert ;
-
+
c1 = [ new order ] ;
$(c1).add-pair l1 l2 ;
-
+
assert.result l1 l2 : $(c1).order l1 l2 ;
assert.result l1 l2 : $(c1).order l2 l1 ;
-
+
$(c1).add-pair l2 l3 ;
assert.result l1 l2 : $(c1).order l2 l1 ;
$(c1).add-pair x l2 ;
assert.result l1 l2 : $(c1).order l2 l1 ;
assert.result l1 l2 l3 : $(c1).order l2 l3 l1 ;
-
-
-
-
}
-
-
Modified: branches/release/tools/build/v2/util/os.jam
==============================================================================
--- branches/release/tools/build/v2/util/os.jam (original)
+++ branches/release/tools/build/v2/util/os.jam 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -1,23 +1,26 @@
-# Copyright 2001, 2002, 2003, 2005 Dave Abrahams
-# Copyright 2006 Rene Rivera
-# Copyright 2003, 2005 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+# Copyright 2001, 2002, 2003, 2005 Dave Abrahams
+# Copyright 2006 Rene Rivera
+# Copyright 2003, 2005 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
import modules ;
import string ;
-# Return the value(s) of the given environment variable(s) at the time
-# bjam was invoked.
+
+# Return the value(s) of the given environment variable(s) at the time bjam was
+# invoked.
rule environ ( variable-names + )
{
return [ modules.peek .ENVIRON : $(variable-names) ] ;
}
+
.name = [ modules.peek : OS ] ;
.platform = [ modules.peek : OSPLAT ] ;
.version = [ modules.peek : OSVER ] ;
+
local rule constant ( c : os ? )
{
os ?= $(.name) ;
@@ -27,10 +30,10 @@
return $(result[1]) ;
}
-rule get-constant ( os ? )
+rule get-constant ( os ? )
{
- # Find the name of the constant being accessed, which is
- # equal to the name used to invoke us.
+ # Find the name of the constant being accessed, which is equal to the name
+ # used to invoke us.
local bt = [ BACKTRACE 1 ] ;
local rulename = [ MATCH ([^.]*)$ : $(bt[4]) ] ;
return [ constant $(rulename) : $(os) ] ;
@@ -39,15 +42,15 @@
# export all the common constants
.constants = name platform version shared-library-path-variable path-separator executable-path-variable executable-suffix ;
-for local constant in $(.constants)
+for local constant in $(.constants)
{
IMPORT $(__name__) : get-constant : $(__name__) : $(constant) ;
}
EXPORT $(__name__) : $(.constants) ;
.executable-path-variable-NT = PATH ;
-# On Windows the case and capitalization of PATH is not always
-# predictable, so let's find out what variable name was really set.
+# On Windows the case and capitalization of PATH is not always predictable, so
+# let's find out what variable name was really set.
if $(.name) = NT
{
for local n in [ VARNAMES .ENVIRON ]
@@ -59,8 +62,8 @@
}
}
-# Specific constants for various platforms. There's no need to define
-# any constant whose value would be the same as the default, below.
+# Specific constants for various platforms. There's no need to define any
+# constant whose value would be the same as the default, below.
.shared-library-path-variable-NT = $(.executable-path-variable-NT) ;
.path-separator-NT = ";" ;
.expand-variable-prefix-NT = % ;
@@ -81,16 +84,20 @@
.executable-path-variable = PATH ;
.executable-suffix = "" ;
-# Return a list of the directories in the PATH. Yes, that information
-# is (sort of) available in the global module, but jam code can change
-# those values, and it isn't always clear what case/capitalization to
-# use when looking. This rule is a more reliable way to get there.
+
+# Return a list of the directories in the PATH. Yes, that information is (sort
+# of) available in the global module, but jam code can change those values, and
+# it isn't always clear what case/capitalization to use when looking. This rule
+# is a more reliable way to get there.
rule executable-path ( )
{
- return [ string.words [ environ [ constant executable-path-variable ] ]
- : [ constant path-separator ] ] ;
+ return [ string.words [ environ [ constant executable-path-variable ] ]
+ : [ constant path-separator ] ] ;
}
-
+
+
+# Initialize the list of home directories for the current user depending on the
+# OS.
if $(.name) = NT
{
local home = [ environ HOMEDRIVE HOMEPATH ] ;
@@ -101,14 +108,16 @@
.home-directories = [ environ HOME ] ;
}
-# Can't use 'constant' mechanism because it only returns 1-element
-# values.
+
+# Can't use 'constant' mechanism because it only returns 1-element values.
rule home-directories ( )
{
return $(.home-directories) ;
}
-# Return the string needed to represent the expansion of the named
-# shell variable.
+
+
+# Return the string needed to represent the expansion of the named shell
+# variable.
rule expand-variable ( variable )
{
local prefix = [ constant expand-variable-prefix ] ;
@@ -116,17 +125,18 @@
return $(prefix)$(variable)$(suffix) ;
}
+
# Returns true if running on windows, whether in cygwin or not.
-rule on-windows
+rule on-windows ( )
{
local result ;
- if [ modules.peek : NT ]
+ if [ modules.peek : NT ]
{
result = true ;
}
- else if [ modules.peek : UNIX ]
+ else if [ modules.peek : UNIX ]
{
- switch [ modules.peek : JAMUNAME ]
+ switch [ modules.peek : JAMUNAME ]
{
case CYGWIN* :
{
@@ -137,24 +147,22 @@
return $(result) ;
}
-if ! [ on-windows ]
+
+if ! [ on-windows ]
{
.on-unix = 1 ;
}
+
rule on-unix
{
return $(.on-unix) ;
}
-
-import regex ;
rule __test__
{
import assert ;
- rule identity ( args * ) { return $(args) ; }
-
if ! ( --quiet in [ modules.peek : ARGV ] )
{
ECHO os: name= [ name ] ;
Modified: branches/release/tools/build/v2/util/path.jam
==============================================================================
--- branches/release/tools/build/v2/util/path.jam (original)
+++ branches/release/tools/build/v2/util/path.jam 2008-03-15 14:55:28 EDT (Sat, 15 Mar 2008)
@@ -6,39 +6,35 @@
# (See accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
-# Performs various path manipulations. Path are always in a 'normilized'
+# Performs various path manipulations. Paths are always in a 'normalized'
# representation. In it, a path may be either:
#
# - '.', or
#
# - ['/'] [ ( '..' '/' )* (token '/')* token ]
-#
+#
# In plain english, path can be rooted, '..' elements are allowed only
# at the beginning, and it never ends in slash, except for path consisting
# of slash only.
+import errors ;
import modules ;
-import sequence ;
import regex ;
-import errors : error ;
+import sequence ;
import set ;
os = [ modules.peek : OS ] ;
-if [ modules.peek : UNIX ]
-{
+if [ modules.peek : UNIX ]
+{
local uname = [ modules.peek : JAMUNAME ] ;
switch $(uname)
{
- case CYGWIN* :
- os = CYGWIN ;
-
- case * :
- os = UNIX ;
- }
+ case CYGWIN* : os = CYGWIN ;
+ case * : os = UNIX ;
+ }
}
-#
# Converts the native path into normalized form.
#
rule make ( native )
@@ -46,7 +42,7 @@
return [ make-$(os) $(native) ] ;
}
-#
+
# Builds native representation of the path.
#
rule native ( path )
@@ -54,7 +50,7 @@
return [ native-$(os) $(path) ] ;
}
-#
+
# Tests if a path is rooted.
#
rule is-rooted ( path )
@@ -62,19 +58,22 @@
return [ MATCH "^(/)" : $(path) ] ;
}
-#
+
# Tests if a path has a parent.
#
rule has-parent ( path )
{
- if $(path) != / {
+ if $(path) != /
+ {
return 1 ;
- } else {
+ }
+ else
+ {
return ;
}
}
-#
+
# Returns the path without any directory components.
#
rule basename ( path )
@@ -82,41 +81,52 @@
return [ MATCH "([^/]+)$" : $(path) ] ;
}
-#
+
# Returns parent directory of the path. If no parent exists, error is issued.
#
rule parent ( path )
{
- if [ has-parent $(path) ] {
-
- if $(path) = . {
+ if [ has-parent $(path) ]
+ {
+ if $(path) = .
+ {
return .. ;
- } else {
-
+ }
+ else
+ {
# Strip everything at the end of path up to and including
# the last slash
local result = [ regex.match "((.*)/)?([^/]+)" : $(path) : 2 3 ] ;
# Did we strip what we shouldn't?
- if $(result[2]) = ".." {
+ if $(result[2]) = ".."
+ {
return $(path)/.. ;
- } else {
- if ! $(result[1]) {
- if [ is-rooted $(path) ] {
+ }
+ else
+ {
+ if ! $(result[1])
+ {
+ if [ is-rooted $(path) ]
+ {
result = / ;
- } else {
+ }
+ else
+ {
result = . ;
}
}
return $(result[1]) ;
}
}
- } else {
- error "Path '$(path)' has no parent" ;
+ }
+ else
+ {
+ errors.error "Path '$(path)' has no parent" ;
}
}
-#
+
# Returns path2 such that "[ join path path2 ] = .".
# The path may not contain ".." element or be rooted.
#
@@ -130,14 +140,15 @@
{
local tokens = [ regex.split $(path) "/" ] ;
local tokens2 ;
- for local i in $(tokens) {
+ for local i in $(tokens)
+ {
tokens2 += .. ;
}
return [ sequence.join $(tokens2) : "/" ] ;
}
}
-#
+
# Auxillary rule: does all the semantic of 'join', except for error cheching.
# The error checking is separated because this rule is recursive, and I don't
# like the idea of checking the same input over and over.
@@ -156,31 +167,30 @@
return $(result) ;
}
-#
+
# Contanenates the passed path elements. Generates an error if
# any element other than the first one is rooted.
#
rule join ( elements + )
{
- if ! $(elements[2])
+ if ! $(elements[2])
{
return $(elements[1]) ;
}
else
- {
+ {
for local e in $(elements[2-])
{
if [ is-rooted $(e) ]
{
- error only first element may be rooted ;
+ errors.error only first element may be rooted ;
}
}
return [ join-imp $(elements) ] ;
- }
+ }
}
-#
# If 'path' is relative, it is rooted at 'root'. Otherwise, it's unchanged.
#
rule root ( path root )
@@ -192,7 +202,7 @@
}
}
-#
+
# Returns the current working directory.
#
rule pwd ( )
@@ -205,12 +215,12 @@
{
.pwd = [ make [ PWD ] ] ;
return $(.pwd) ;
- }
+ }
}
-#
+
# Returns the list of files matching the given pattern in the
-# specified directory. Both directories and patterns are
+# specified directory. Both directories and patterns are
# supplied as portable paths. Each pattern should be non-absolute
# path, and can't contain "." or ".." elements. Each slash separated
# element of pattern can contain the following special characters:
@@ -219,9 +229,9 @@
# A file $(d)/e1/e2/e3 (where 'd' is in $(dirs)) matches pattern p1/p2/p3
# if and only if e1 matches p1, e2 matches p2 and so on.
#
-# For example:
-# [ glob . : *.cpp ]
-# [ glob . : */build/Jamfile ]
+# For example:
+# [ glob . : *.cpp ]
+# [ glob . : */build/Jamfile ]
rule glob ( dirs * : patterns + : exclude-patterns * )
{
local result ;
@@ -233,23 +243,24 @@
{
local pattern = [ path.root $(p) $(d) ] ;
real-patterns += [ path.native $(pattern) ] ;
- }
-
+ }
+
for local p in $(exclude-patterns)
{
local pattern = [ path.root $(p) $(d) ] ;
real-exclude-patterns += [ path.native $(pattern) ] ;
- }
- }
+ }
+ }
local inc = [ GLOB-RECURSIVELY $(real-patterns) ] ;
inc = [ sequence.transform NORMALIZE_PATH : $(inc) ] ;
local exc = [ GLOB-RECURSIVELY $(real-exclude-patterns) ] ;
exc = [ sequence.transform NORMALIZE_PATH : $(exc) ] ;
-
- return [ sequence.transform path.make :
+
+ return [ sequence.transform path.make :
[ set.difference $(inc) : $(exc) ] ] ;
-}
+}
+
# Recursive version of GLOB. Builds the glob of files while
# also searching in the subdirectories of the given roots. An
@@ -267,6 +278,7 @@
] ] ;
}
+
local rule .glob-tree ( roots * : patterns * : exclude-patterns * )
{
local excluded ;
@@ -290,7 +302,6 @@
}
-#
# Returns true is the specified file exists.
#
rule exists ( file )
@@ -300,8 +311,6 @@
NATIVE_RULE path : exists ;
-
-#
# Find out the absolute name of path and returns the list of all the parents,
# starting with the immediate one. Parents are returned as relative names.
# If 'upper_limit' is specified, directories above it will be pruned.
@@ -321,9 +330,10 @@
upper_ele = $(upper_ele[2-]) ;
path_ele = $(path_ele[2-]) ;
}
-
+
# All upper elements removed ?
- if ! $(upper_ele) {
+ if ! $(upper_ele)
+ {
# Create the relative paths to parents, number of elements in 'path_ele'
local result ;
for local i in $(path_ele) {
@@ -332,13 +342,13 @@
}
return $(result) ;
}
- else {
- error "$(upper_limit) is not prefix of $(path)" ;
+ else
+ {
+ errors.error "$(upper_limit) is not prefix of $(path)" ;
}
}
-#
# Search for 'pattern' in parent directories of 'dir', up till and including
# 'upper_limit', if it is specified, or till the filesystem root otherwise.
#
@@ -352,24 +362,24 @@
result = [ glob $(parent-dirs[1]) : $(patterns) ] ;
parent-dirs = $(parent-dirs[2-]) ;
}
- return $(result) ;
+ return $(result) ;
}
-#
+
# Assuming 'child' is a subdirectory of 'parent', return the relative
# path from 'parent' to 'child'
#
rule relative ( child parent )
{
- if $(parent) = "."
+ if $(parent) = "."
{
return $(child) ;
}
- else
- {
+ else
+ {
local split1 = [ regex.split $(parent) / ] ;
local split2 = [ regex.split $(child) / ] ;
-
+
while $(split1)
{
if $(split1[1]) = $(split2[1])
@@ -380,19 +390,20 @@
else
{
errors.error $(child) is not a subdir of $(parent) ;
- }
- }
+ }
+ }
if $(split2)
{
- return [ join $(split2) ] ;
+ return [ join $(split2) ] ;
}
else
{
return "." ;
}
- }
+ }
}
+
# Returns the minimal path to path2 that is relative path1.
#
rule relative-to ( path1 path2 )
@@ -417,6 +428,7 @@
return [ join . $(root_1) $(split2) ] ;
}
+
# Returns the list of paths which are used by the operating system
# for looking up programs
rule programs-path ( )
@@ -428,11 +440,12 @@
if $(p)
{
result += [ path.make $(p) ] ;
- }
+ }
}
return $(result) ;
}
+
rule make-NT ( native )
{
local tokens = [ regex.split $(native) "[/\\]" ] ;
@@ -450,22 +463,32 @@
{
result = /$(result) ;
}
-
+
if $(native) = ""
{
result = "." ;
}
-
+
return $(result) ;
}
+
rule native-NT ( path )
{
- local result = [ MATCH "^/?(.*)" : $(path) ] ;
+ local result ;
+ if [ is-rooted $(path) ] && ! [ regex.match "^/(.:)" : $(path) ]
+ {
+ result = $(path) ;
+ }
+ else
+ {
+ result = [ MATCH "^/?(.*)" : $(path) ] ;
+ }
result = [ sequence.join [ regex.split $(result) "/" ] : "\\" ] ;
return $(result) ;
}
+
rule make-UNIX ( native )
{
# VP: I have no idea now 'native' can be empty here! But it can!
@@ -474,21 +497,24 @@
errors.error "Empty path passed to 'make-UNIX'" ;
}
else
- {
+ {
return [ NORMALIZE_PATH $(native:T) ] ;
- }
+ }
}
+
rule native-UNIX ( path )
{
return $(path) ;
}
+
rule make-CYGWIN ( path )
{
return [ make-NT $(path) ] ;
}
+
rule native-CYGWIN ( path )
{
local result = $(path) ;
@@ -499,7 +525,7 @@
return [ native-UNIX $(result) ] ;
}
-#
+
# split-VMS: splits input native path into
# device dir file (each part is optional),
# example:
@@ -516,7 +542,7 @@
return $(device) $(dir) $(file) ;
}
-#
+
# Converts a native VMS path into a portable path spec.
#
# Does not handle current-device absolute paths such
@@ -532,13 +558,13 @@
{
errors.error "Can't handle default-device absolute paths: " $(native) ;
}
-
+
local parts = [ split-path-VMS $(native) ] ;
local device = $(parts[1]) ;
local dir = $(parts[2]) ;
local file = $(parts[3]) ;
local elems ;
-
+
if $(device)
{
#
@@ -546,7 +572,7 @@
#
elems = /$(device) ;
}
-
+
if $(dir) = "[]"
{
#
@@ -558,7 +584,7 @@
{
dir = [ regex.replace $(dir) "\\[|\\]" "" ] ;
local dir_parts = [ regex.split $(dir) \\. ] ;
-
+
if $(dir_parts[1]) = ""
{
#
@@ -566,15 +592,15 @@
#
dir_parts = $(dir_parts[2--1]) ;
}
-
+
#
# replace "parent-directory" parts (- => ..)
#
dir_parts = [ regex.replace-list $(dir_parts) : - : .. ] ;
-
+
elems = $(elems) $(dir_parts) ;
}
-
+
if $(file)
{
if ! [ MATCH (\\.) : $(file) ]
@@ -592,7 +618,7 @@
return $(portable) ;
}
-#
+
# Converts a portable path spec into a native VMS path.
#
# Relies on having at least one dot (".") included in the file
@@ -634,7 +660,7 @@
file = $(maybe_file) ;
dir = [ sequence.join $(split[1--2]) : / ] ;
}
-
+
#
# Has dir spec ?
#
@@ -651,20 +677,20 @@
{
#
# Relative directory
- #
+ #
dir = "."$(dir) ;
}
dir = "["$(dir)"]" ;
}
-
+
native = [ sequence.join $(device) $(dir) $(file) ] ;
return $(native) ;
}
-rule __test__ ( ) {
-
+rule __test__ ( )
+{
import assert ;
import errors : try catch ;
@@ -694,7 +720,6 @@
assert.result ".." : parent "../foo" ;
assert.result "../../foo" : parent "../../foo/bar" ;
-
assert.result "." : reverse "." ;
assert.result ".." : reverse "foo" ;
assert.result "../../.." : reverse "foo/bar/giz" ;
@@ -733,7 +758,7 @@
local CWD = "/home/ghost" ;
assert.result . .. : all-parents "Jamfile" : "/home" : $(CWD) ;
assert.result . : all-parents "Jamfile" : "/home/ghost" : $(CWD) ;
-
+
assert.result "c/d" : relative "a/b/c/d" "a/b" ;
assert.result "foo" : relative "foo" "." ;
@@ -744,18 +769,21 @@
assert.result "foo/bar/giz" : make "foo\\bar\\giz" ;
assert.result "foo" : make "foo/." ;
assert.result "foo" : make "foo/bar/.." ;
+ assert.result "foo/bar" : make "foo/././././bar" ;
+ assert.result "/foo" : make "\\foo" ;
assert.result "/D:/My Documents" : make "D:\\My Documents" ;
assert.result "/c:/boost/tools/build/new/project.jam" : make "c:\\boost\\tools\\build\\test\\..\\new\\project.jam" ;
assert.result "foo\\bar\\giz" : native "foo/bar/giz" ;
assert.result "foo" : native "foo" ;
+ assert.result "\\foo" : native "/foo" ;
assert.result "D:\\My Documents\\Work" : native "/D:/My Documents/Work" ;
modules.poke path : os : UNIX ;
assert.result "foo/bar/giz" : make "foo/bar/giz" ;
assert.result "/sub1" : make "/sub1/." ;
- assert.result "/sub1" : make "/sub1/sub2/.." ;
+ assert.result "/sub1" : make "/sub1/sub2/.." ;
assert.result "sub1" : make "sub1/." ;
assert.result "sub1" : make "sub1/sub2/.." ;
assert.result "/foo/bar" : native "/foo/bar" ;
@@ -767,12 +795,12 @@
#
assert.result "disk:" "[dir]" "file" : split-path-VMS "disk:[dir]file" ;
assert.result "disk:" "[dir]" "" : split-path-VMS "disk:[dir]" ;
- assert.result "disk:" "" "" : split-path-VMS "disk:" ;
- assert.result "disk:" "" "file" : split-path-VMS "disk:file" ;
+ assert.result "disk:" "" "" : split-path-VMS "disk:" ;
+ assert.result "disk:" "" "file" : split-path-VMS "disk:file" ;
assert.result "" "[dir]" "file" : split-path-VMS "[dir]file" ;
assert.result "" "[dir]" "" : split-path-VMS "[dir]" ;
- assert.result "" "" "file" : split-path-VMS "file" ;
- assert.result "" "" "" : split-path-VMS "" ;
+ assert.result "" "" "file" : split-path-VMS "file" ;
+ assert.result "" "" "" : split-path-VMS "" ;
#
# Special case: current directory
@@ -820,5 +848,4 @@
assert.result "disk:[my_docs.work]Jamfile." : native "/disk:/my_docs/work/Jamfile." ;
modules.poke path : os : $(save-os) ;
-
}
Boost-Commit list run by bdawes at acm.org, david.abrahams at rcn.com, gregod at cs.rpi.edu, cpdaniel at pacbell.net, john at johnmaddock.co.uk