|
Boost-Commit : |
Subject: [Boost-commit] svn:boost r68437 - in branches/release/tools/build/v2: . build doc doc/src engine/doc engine/src engine/src/modules example/customization example/generate example/make kernel test tools tools/types util
From: ghost_at_[hidden]
Date: 2011-01-25 13:07:10
Author: vladimir_prus
Date: 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
New Revision: 68437
URL: http://svn.boost.org/trac/boost/changeset/68437
Log:
Merge Boost.Build from trunk
Removed:
branches/release/tools/build/v2/doc/src/v1_vs_v2.xml
branches/release/tools/build/v2/engine/doc/
branches/release/tools/build/v2/example/customization/jamfile.jam
Properties modified:
branches/release/tools/build/v2/ (props changed)
branches/release/tools/build/v2/user-config.jam (props changed)
Text files modified:
branches/release/tools/build/v2/build-system.jam | 6
branches/release/tools/build/v2/build/alias.py | 13
branches/release/tools/build/v2/build/build_request.py | 113 ++--
branches/release/tools/build/v2/build/engine.py | 35
branches/release/tools/build/v2/build/errors.py | 11
branches/release/tools/build/v2/build/feature.py | 589 +++++++++++-----------
branches/release/tools/build/v2/build/generators.py | 228 ++++++--
branches/release/tools/build/v2/build/project.jam | 25
branches/release/tools/build/v2/build/project.py | 312 +++++++---
branches/release/tools/build/v2/build/property.jam | 42 +
branches/release/tools/build/v2/build/property.py | 373 ++++++-------
branches/release/tools/build/v2/build/property_set.py | 233 +++++--
branches/release/tools/build/v2/build/scanner.py | 7
branches/release/tools/build/v2/build/targets.jam | 4
branches/release/tools/build/v2/build/targets.py | 565 +++++++++++++--------
branches/release/tools/build/v2/build/toolset.py | 168 +++---
branches/release/tools/build/v2/build/type.py | 31
branches/release/tools/build/v2/build/virtual-target.jam | 15
branches/release/tools/build/v2/build/virtual_target.py | 237 +++++---
branches/release/tools/build/v2/build_system.py | 1051 ++++++++++++++++++++++++++++-----------
branches/release/tools/build/v2/doc/jamfile.jam | 2
branches/release/tools/build/v2/doc/src/install.xml | 162 +----
branches/release/tools/build/v2/doc/src/overview.xml | 72 ++
branches/release/tools/build/v2/doc/src/reference.xml | 4
branches/release/tools/build/v2/doc/src/standalone.xml | 4
branches/release/tools/build/v2/doc/src/tutorial.xml | 4
branches/release/tools/build/v2/doc/src/userman.xml | 2
branches/release/tools/build/v2/engine/src/Jambase | 5
branches/release/tools/build/v2/engine/src/build.jam | 15
branches/release/tools/build/v2/engine/src/build.sh | 13
branches/release/tools/build/v2/engine/src/builtins.c | 80 ++
branches/release/tools/build/v2/engine/src/builtins.h | 1
branches/release/tools/build/v2/engine/src/compile.c | 164 ++++--
branches/release/tools/build/v2/engine/src/jam.c | 68 ++
branches/release/tools/build/v2/engine/src/jambase.c | 5
branches/release/tools/build/v2/engine/src/lists.c | 34 +
branches/release/tools/build/v2/engine/src/lists.h | 11
branches/release/tools/build/v2/engine/src/modules/order.c | 6
branches/release/tools/build/v2/engine/src/pathsys.h | 9
branches/release/tools/build/v2/engine/src/rules.c | 1
branches/release/tools/build/v2/engine/src/rules.h | 1
branches/release/tools/build/v2/engine/src/scan.c | 2
branches/release/tools/build/v2/example/customization/jamroot.jam | 5
branches/release/tools/build/v2/example/generate/jamroot.jam | 28
branches/release/tools/build/v2/example/make/jamroot.jam | 5
branches/release/tools/build/v2/kernel/bootstrap.jam | 33 +
branches/release/tools/build/v2/kernel/bootstrap.py | 2
branches/release/tools/build/v2/manager.py | 22
branches/release/tools/build/v2/nightly.sh | 9
branches/release/tools/build/v2/roll.sh | 12
branches/release/tools/build/v2/test/BoostBuild.py | 22
branches/release/tools/build/v2/test/absolute_sources.py | 15
branches/release/tools/build/v2/test/chain.py | 4
branches/release/tools/build/v2/test/custom_generator.py | 17
branches/release/tools/build/v2/test/default_build.py | 3
branches/release/tools/build/v2/test/dll_path.py | 46 +
branches/release/tools/build/v2/test/explicit.py | 4
branches/release/tools/build/v2/test/generator_selection.py | 22
branches/release/tools/build/v2/test/loop.py | 1
branches/release/tools/build/v2/test/notfile.py | 2
branches/release/tools/build/v2/test/project_root_constants.py | 5
branches/release/tools/build/v2/test/searched_lib.py | 6
branches/release/tools/build/v2/test/standalone.py | 17
branches/release/tools/build/v2/test/suffix.py | 17
branches/release/tools/build/v2/test/test_all.py | 4
branches/release/tools/build/v2/test/using.py | 9
branches/release/tools/build/v2/test/wrong_project.py | 9
branches/release/tools/build/v2/tools/boostbook.jam | 3
branches/release/tools/build/v2/tools/builtin.py | 172 +++---
branches/release/tools/build/v2/tools/clang-linux.jam | 4
branches/release/tools/build/v2/tools/common.py | 33 +
branches/release/tools/build/v2/tools/fop.jam | 29
branches/release/tools/build/v2/tools/gcc.jam | 85 +++
branches/release/tools/build/v2/tools/gcc.py | 14
branches/release/tools/build/v2/tools/make.py | 18
branches/release/tools/build/v2/tools/msvc.jam | 5
branches/release/tools/build/v2/tools/package.jam | 61 +
branches/release/tools/build/v2/tools/pathscale.jam | 49 +
branches/release/tools/build/v2/tools/pgi.jam | 4
branches/release/tools/build/v2/tools/qcc.jam | 6
branches/release/tools/build/v2/tools/quickbook.jam | 8
branches/release/tools/build/v2/tools/types/lib.py | 90 ++
branches/release/tools/build/v2/tools/unix.py | 6
branches/release/tools/build/v2/util/__init__.py | 136 +++++
branches/release/tools/build/v2/util/logger.py | 2
branches/release/tools/build/v2/util/path.py | 112 +--
branches/release/tools/build/v2/util/sequence.py | 24
87 files changed, 3806 insertions(+), 2097 deletions(-)
Modified: branches/release/tools/build/v2/build-system.jam
==============================================================================
--- branches/release/tools/build/v2/build-system.jam (original)
+++ branches/release/tools/build/v2/build-system.jam 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -215,9 +215,9 @@
# Initializes a new configuration module.
#
-local rule initialize-config-module ( module-name )
+local rule initialize-config-module ( module-name : location ? )
{
- project.initialize $(module-name) ;
+ project.initialize $(module-name) : $(location) ;
if USER_MODULE in [ RULENAMES ]
{
USER_MODULE $(module-name) ;
@@ -419,7 +419,7 @@
}
if $(file)
{
- initialize-config-module project-config ;
+ initialize-config-module project-config : $(file:D) ;
load-config project-config : project-config.jam : $(file:D) ;
}
}
Modified: branches/release/tools/build/v2/build/alias.py
==============================================================================
--- branches/release/tools/build/v2/build/alias.py (original)
+++ branches/release/tools/build/v2/build/alias.py 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -29,6 +29,8 @@
import property_set
from b2.manager import get_manager
+from b2.util import metatarget
+
class AliasTarget(targets.BasicTarget):
def __init__(self, *args):
@@ -43,16 +45,15 @@
# look like 100% alias.
return base.add(subvariant.sources_usage_requirements())
-def alias(name, sources, requirements=None, default_build=None, usage_requirements=None):
+@metatarget
+def alias(name, sources=[], requirements=[], default_build=[], usage_requirements=[]):
+
project = get_manager().projects().current()
targets = get_manager().targets()
- if default_build:
- default_build = default_build[0]
-
targets.main_target_alternative(AliasTarget(
- name[0], project,
- targets.main_target_sources(sources, name),
+ name, project,
+ targets.main_target_sources(sources, name, no_renaming=True),
targets.main_target_requirements(requirements or [], project),
targets.main_target_default_build(default_build, project),
targets.main_target_usage_requirements(usage_requirements or [], project)))
Modified: branches/release/tools/build/v2/build/build_request.py
==============================================================================
--- branches/release/tools/build/v2/build/build_request.py (original)
+++ branches/release/tools/build/v2/build/build_request.py 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -7,80 +7,85 @@
# all copies. This software is provided "as is" without express or implied
# warranty, and with no claim as to its suitability for any purpose.
-import feature
-from b2.util import set
+import b2.build.feature
+feature = b2.build.feature
+
from b2.util.utility import *
+import b2.build.property_set as property_set
def expand_no_defaults (property_sets):
""" Expand the given build request by combining all property_sets which don't
specify conflicting non-free features.
"""
# First make all features and subfeatures explicit
- expanded_property_sets = [ __apply_to_property_set (feature.expand_subfeatures, x) for x in property_sets ]
+ expanded_property_sets = [ps.expand_subfeatures() for ps in property_sets]
# Now combine all of the expanded property_sets
product = __x_product (expanded_property_sets)
- return product
-
-def __apply_to_property_set (f, property_set):
- """ Transform property_set by applying f to each component property.
- """
- properties = feature.split (property_set)
- return '/'.join (f (properties))
-
+ return [property_set.create(p) for p in product]
def __x_product (property_sets):
""" Return the cross-product of all elements of property_sets, less any
that would contain conflicting values for single-valued features.
"""
- x_product_seen = []
- x_product_used = []
- feature_space = []
- return __x_product_aux (property_sets, x_product_seen, x_product_used, feature_space)
-
-def __x_product_aux (property_sets, x_product_seen, x_product_used, feature_space):
- """ Implementation of __x_product.
+ x_product_seen = set()
+ return __x_product_aux (property_sets, x_product_seen)[0]
+
+def __x_product_aux (property_sets, seen_features):
+ """Returns non-conflicting combinations of property sets.
+
+ property_sets is a list of PropertySet instances. seen_features is a set of Property
+ instances.
+
+ Returns a tuple of:
+ - list of lists of Property instances, such that within each list, no two Property instance
+ have the same feature, and no Property is for feature in seen_features.
+ - set of features we saw in property_sets
"""
- result = []
-
- if property_sets:
- p = feature.split (property_sets [0])
+ if not property_sets:
+ return ([], set())
+
+ properties = property_sets[0].all()
+
+ these_features = set()
+ for p in property_sets[0].non_free():
+ these_features.add(p.feature())
+
+ # Note: the algorithm as implemented here, as in original Jam code, appears to
+ # detect conflicts based on features, not properties. For example, if command
+ # line build request say:
+ #
+ # <a>1/<b>1 c<1>/<b>1
+ #
+ # It will decide that those two property sets conflict, because they both specify
+ # a value for 'b' and will not try building "<a>1 <c1> <b1>", but rather two
+ # different property sets. This is a topic for future fixing, maybe.
+ if these_features & seen_features:
+
+ (inner_result, inner_seen) = __x_product_aux(property_sets[1:], seen_features)
+ return (inner_result, inner_seen | these_features)
+
else:
- p = []
-
- f = set.difference (get_grist (p), feature.free_features ())
-
- seen = []
- # No conflict with things used at a higher level?
- if not set.intersection (f, x_product_used):
- # don't mix in any conflicting features
- local_x_product_used = x_product_used + f
- local_x_product_seen = []
-
- if len (property_sets) > 1:
- rest = __x_product_aux (property_sets [1:], local_x_product_seen, local_x_product_used, feature_space)
- result = [ property_sets [0] + '/' + x for x in rest]
-
- if not result and property_sets:
- result = [property_sets [0]]
-
- # If we didn't encounter a conflicting feature lower down,
- # don't recurse again.
- if not set.intersection (f, local_x_product_seen):
- property_sets = []
+
+ result = []
+ (inner_result, inner_seen) = __x_product_aux(property_sets[1:], seen_features | these_features)
+ if inner_result:
+ for inner in inner_result:
+ result.append(properties + inner)
+ else:
+ result.append(properties)
- seen = local_x_product_seen
-
- if len (property_sets) > 1:
- result.extend (__x_product_aux (property_sets [1:], x_product_seen, x_product_used, feature_space))
- x_product_seen += f + seen
-
- # Note that we've seen these features so that higher levels will
- # recurse again without them set.
+ if inner_seen & these_features:
+ # Some of elements in property_sets[1:] conflict with elements of property_sets[0],
+ # Try again, this time omitting elements of property_sets[0]
+ (inner_result2, inner_seen2) = __x_product_aux(property_sets[1:], seen_features)
+ result.extend(inner_result2)
+
+ return (result, inner_seen | these_features)
- return result
+
def looks_like_implicit_value(v):
"""Returns true if 'v' is either implicit value, or
@@ -142,7 +147,7 @@
else:
result = [e1 + "/" + e2 for e1 in result for e2 in lresult]
- return result
+ return [property_set.create(b2.build.feature.split(r)) for r in result]
###
### rule __test__ ( )
Modified: branches/release/tools/build/v2/build/engine.py
==============================================================================
--- branches/release/tools/build/v2/build/engine.py (original)
+++ branches/release/tools/build/v2/build/engine.py 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -7,6 +7,10 @@
bjam_interface = __import__('bjam')
import operator
+import re
+
+import b2.build.property_set as property_set
+import b2.util
class BjamAction:
"""Class representing bjam action defined from Python."""
@@ -18,6 +22,7 @@
def __call__(self, targets, sources, property_set):
if self.function:
self.function(targets, sources, property_set)
+
# Bjam actions defined from Python have only the command
# to execute, and no associated jam procedural code. So
# passing 'property_set' to it is not necessary.
@@ -25,18 +30,25 @@
targets, sources, [])
class BjamNativeAction:
- """Class representing bjam action fully defined by Jam code."""
+ """Class representing bjam action defined by Jam code.
+
+ We still allow to associate a Python callable that will
+ be called when this action is installed on any target.
+ """
- def __init__(self, action_name):
+ def __init__(self, action_name, function):
self.action_name = action_name
+ self.function = function
def __call__(self, targets, sources, property_set):
+ if self.function:
+ self.function(targets, sources, property_set)
+
+ p = []
if property_set:
- bjam_interface.call("set-update-action", self.action_name,
- targets, sources, property_set.raw())
- else:
- bjam_interface.call("set-update-action", self.action_name,
- targets, sources, [])
+ p = property_set.raw()
+
+ b2.util.set_jam_action(self.action_name, targets, sources, p)
action_modifiers = {"updated": 0x01,
"together": 0x02,
@@ -83,7 +95,7 @@
for target in targets:
self.do_set_target_variable (target, variable, value, append)
- def set_update_action (self, action_name, targets, sources, properties):
+ def set_update_action (self, action_name, targets, sources, properties=property_set.empty()):
""" Binds a target to the corresponding update action.
If target needs to be updated, the action registered
with action_name will be used.
@@ -91,6 +103,7 @@
either 'register_action' or 'register_bjam_action'
method.
"""
+ assert(isinstance(properties, property_set.PropertySet))
if isinstance (targets, str):
targets = [targets]
self.do_set_update_action (action_name, targets, sources, properties)
@@ -123,7 +136,7 @@
self.actions[action_name] = BjamAction(action_name, function)
- def register_bjam_action (self, action_name):
+ def register_bjam_action (self, action_name, function=None):
"""Informs self that 'action_name' is declared in bjam.
From this point, 'action_name' is a valid argument to the
@@ -136,7 +149,7 @@
# can just register them without specially checking if
# action is already registered.
if not self.actions.has_key(action_name):
- self.actions[action_name] = BjamNativeAction(action_name)
+ self.actions[action_name] = BjamNativeAction(action_name, function)
# Overridables
@@ -144,7 +157,7 @@
def do_set_update_action (self, action_name, targets, sources, property_set):
action = self.actions.get(action_name)
if not action:
- raise "No action %s was registered" % action_name
+ raise Exception("No action %s was registered" % action_name)
action(targets, sources, property_set)
def do_set_target_variable (self, target, variable, value, append):
Modified: branches/release/tools/build/v2/build/errors.py
==============================================================================
--- branches/release/tools/build/v2/build/errors.py (original)
+++ branches/release/tools/build/v2/build/errors.py 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -20,7 +20,7 @@
import sys
def format(message, prefix=""):
- parts = message.split("\n")
+ parts = str(message).split("\n")
return "\n".join(prefix+p for p in parts)
@@ -58,9 +58,9 @@
self.stack_ = stack
def report(self):
- print "error:", self.message
+ print "error:", self.args[0]
if self.original_exception_:
- print format(self.original_exception_.message, " ")
+ print format(str(self.original_exception_), " ")
print
print " error context (most recent first):"
for c in self.context_[::-1]:
@@ -94,6 +94,10 @@
def __init__(self):
self.contexts_ = []
+ self._count = 0
+
+ def count(self):
+ return self._count
def push_user_context(self, message, nested=None):
self.contexts_.append(Context(message, nested))
@@ -114,6 +118,7 @@
raise ExceptionWithUserContext("unexpected exception", self.contexts_[:],
e, sys.exc_info()[2])
def __call__(self, message):
+ self._count = self._count + 1
raise ExceptionWithUserContext(message, self.contexts_[:],
stack=traceback.extract_stack())
Modified: branches/release/tools/build/v2/build/feature.py
==============================================================================
--- branches/release/tools/build/v2/build/feature.py (original)
+++ branches/release/tools/build/v2/build/feature.py 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -1,6 +1,5 @@
-# Status: mostly ported.
-# TODO: carry over tests.
-# Base revision: 56043
+# Status: ported, except for unit tests.
+# Base revision: 64488
#
# Copyright 2001, 2002, 2003 Dave Abrahams
# Copyright 2002, 2006 Rene Rivera
@@ -8,13 +7,10 @@
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-# TODO: stop using grists to identify the name of features?
-# create a class for Features and Properties?
-# represent conditions using object trees, composite pattern?
-
import re
-from b2.util import set, utility
+from b2.util import utility, bjam_signature
+import b2.util.set
from b2.util.utility import add_grist, get_grist, ungrist, replace_grist, to_seq
from b2.exceptions import *
@@ -22,16 +18,75 @@
__re_no_hyphen = re.compile ('^([^:]+)$')
__re_slash_or_backslash = re.compile (r'[\\/]')
+class Feature(object):
+
+ # Map from string attribute names to integers bit flags.
+ # This will be initialized after declaration of the class.
+ _attribute_name_to_integer = {}
+
+ def __init__(self, name, values, attributes):
+ self._name = name
+ self._values = values
+ self._default = None
+ self._attributes = 0
+ for a in attributes:
+ self._attributes = self._attributes | Feature._attribute_name_to_integer[a]
+ self._attributes_string_list = attributes
+ self._subfeatures = []
+ self._parent = None
+
+ def name(self):
+ return self._name
+
+ def values(self):
+ return self._values
+
+ def add_values(self, values):
+ self._values.extend(values)
+
+ def attributes(self):
+ return self._attributes
+
+ def set_default(self, value):
+ self._default = value
+
+ def default(self):
+ return self._default
+
+ # FIXME: remove when we fully move to using classes for features/properties
+ def attributes_string_list(self):
+ return self._attributes_string_list
+
+ def subfeatures(self):
+ return self._subfeatures
+
+ def add_subfeature(self, name):
+ self._subfeatures.append(name)
+
+ def parent(self):
+ """For subfeatures, return pair of (parent_feature, value).
+
+ Value may be None if this subfeature is not specific to any
+ value of the parent feature.
+ """
+ return self._parent
+
+ def set_parent(self, feature, value):
+ self._parent = (feature, value)
+
+ def __str__(self):
+ return self._name
+
+
def reset ():
""" Clear the module state. This is mainly for testing purposes.
"""
global __all_attributes, __all_features, __implicit_features, __composite_properties
- global __features_with_attributes, __subfeature_value_to_name, __all_top_features, __free_features
+ global __features_with_attributes, __subfeature_from_value, __all_top_features, __free_features
global __all_subfeatures
# The list with all attribute names.
__all_attributes = [ 'implicit',
- 'executed',
'composite',
'optional',
'symmetric',
@@ -44,12 +99,17 @@
'subfeature',
'order-sensitive'
]
+ i = 1
+ for a in __all_attributes:
+ setattr(Feature, a.upper(), i)
+ Feature._attribute_name_to_integer[a] = i
+ def probe(self, flag=i):
+ return getattr(self, "_attributes") & flag
+ setattr(Feature, a.replace("-", "_"), probe)
+ i = i << 1
- # A map containing all features. The key is the gristed feature name. The value is a map with:
- # 'values': [],
- # 'attributes': [],
- # 'subfeatures': [],
- # 'default': None
+ # A map containing all features. The key is the feature name.
+ # The value is an instance of Feature class.
__all_features = {}
# All non-subfeatures.
@@ -58,8 +118,8 @@
# Maps valus to the corresponding implicit feature
__implicit_features = {}
- # A map containing all composite properties. The key is the name of the property. The value is a map with:
- # 'components': []
+ # A map containing all composite properties. The key is a Property instance,
+ # and the value is a list of Property instances
__composite_properties = {}
__features_with_attributes = {}
@@ -67,7 +127,7 @@
__features_with_attributes [attribute] = []
# Maps a value to the corresponding subfeature name.
- __subfeature_value_to_name = {}
+ __subfeature_from_value = {}
# All free features
__free_features = []
@@ -81,35 +141,38 @@
"""
return __all_features.iteritems ()
+def get(name):
+ """Return the Feature instance for the specified name.
+
+ Throws if no feature by such name exists
+ """
+ return __all_features[name]
+
# FIXME: prepare-test/finish-test?
+@bjam_signature((["name"], ["values", "*"], ["attributes", "*"]))
def feature (name, values, attributes = []):
""" Declares a new feature with the given name, values, and attributes.
name: the feature name
values: a sequence of the allowable values - may be extended later with feature.extend
attributes: a sequence of the feature's attributes (e.g. implicit, free, propagated, ...)
"""
- name = add_grist (name)
-
__validate_feature_attributes (name, attributes)
- feature = {
- 'values': [],
- 'attributes': attributes,
- 'subfeatures': [],
- 'default': None
- }
- __all_features [name] = feature
-
- feature ['attributes'] = attributes
-
+ feature = Feature(name, [], attributes)
+ __all_features[name] = feature
+ # Temporary measure while we have not fully moved from 'gristed strings'
+ __all_features["<" + name + ">"] = feature
+
for attribute in attributes:
__features_with_attributes [attribute].append (name)
+
+ name = add_grist(name)
if 'subfeature' in attributes:
__all_subfeatures.append(name)
else:
- __all_top_features.append(name)
+ __all_top_features.append(feature)
extend (name, values)
@@ -117,47 +180,41 @@
if 'free' in attributes:
__free_features.append (name)
+ return feature
+
+@bjam_signature((["feature"], ["value"]))
def set_default (feature, value):
""" Sets the default value of the given feature, overriding any previous default.
feature: the name of the feature
value: the default value to assign
"""
-
- if isinstance(feature, list):
- feature = feature[0]
-
- feature = add_grist (feature)
- f = __all_features [feature]
- attributes = f['attributes']
+ f = __all_features[feature]
+ attributes = f.attributes()
bad_attribute = None
- if "free" in attributes:
+ if attributes & Feature.FREE:
bad_attribute = "free"
- elif "optional" in attributes:
+ elif attributes & Feature.OPTIONAL:
bad_attribute = "optional"
if bad_attribute:
- raise InvalidValue ("%s property %s cannot have a default" % (bad_attribute, feature))
+ raise InvalidValue ("%s property %s cannot have a default" % (bad_attribute, feature.name()))
- if isinstance(value, list):
- value = value[0]
-
- values = f['values']
- if not value in values:
+ if not value in f.values():
raise InvalidValue ("The specified default value, '%s' is invalid.\n" % value + "allowed values are: %s" % values)
- f ['default'] = value
+ f.set_default(value)
-def defaults (features):
+def defaults(features):
""" Returns the default property values for the given features.
"""
+ # FIXME: should merge feature and property modules.
+ import property
+
result = []
for f in features:
- attributes = __all_features [f]['attributes']
- if not 'free' in attributes and not 'optional' in attributes:
- defaults = __all_features [f]['default']
- if defaults:
- result.append (replace_grist (defaults, f))
+ if not f.free() and not f.optional() and f.default():
+ result.append(property.Property(f, f.default()))
return result
@@ -174,20 +231,24 @@
def attributes (feature):
""" Returns the attributes of the given feature.
"""
- return __all_features [feature]['attributes']
+ return __all_features[feature].attributes_string_list()
def values (feature):
""" Return the values of the given feature.
"""
validate_feature (feature)
- return __all_features [feature]['values']
+ return __all_features[feature].values()
def is_implicit_value (value_string):
""" Returns true iff 'value_string' is a value_string
of an implicit feature.
"""
- v = value_string.split('-')
+
+ if __implicit_features.has_key(value_string):
+ return __implicit_features[value_string]
+ v = value_string.split('-')
+
if not __implicit_features.has_key(v[0]):
return False
@@ -210,15 +271,15 @@
return __implicit_features[components[0]]
def __find_implied_subfeature (feature, subvalue, value_string):
- feature = add_grist (feature)
- if value_string == None: value_string = ''
+
+ #if value_string == None: value_string = ''
- if not __subfeature_value_to_name.has_key (feature) \
- or not __subfeature_value_to_name [feature].has_key (value_string) \
- or not __subfeature_value_to_name [feature][value_string].has_key (subvalue):
+ if not __subfeature_from_value.has_key(feature) \
+ or not __subfeature_from_value[feature].has_key(value_string) \
+ or not __subfeature_from_value[feature][value_string].has_key (subvalue):
return None
- return __subfeature_value_to_name[feature][value_string][subvalue]
+ return __subfeature_from_value[feature][value_string][subvalue]
# Given a feature and a value of one of its subfeatures, find the name
# of the subfeature. If value-string is supplied, looks for implied
@@ -237,9 +298,10 @@
def validate_feature (name):
""" Checks if all name is a valid feature. Otherwise, raises an exception.
"""
- x = valid (name)
- if not x:
+ if not __all_features.has_key(name):
raise InvalidFeature ("'%s' is not a valid feature name" % name)
+ else:
+ return __all_features[name]
def valid (names):
""" Returns true iff all elements of names are valid features.
@@ -251,7 +313,8 @@
else:
return [ valid_one (name) for name in names ]
-def __expand_subfeatures_aux (feature, value, dont_validate = False):
+# Uses Property
+def __expand_subfeatures_aux (property, dont_validate = False):
""" Helper for expand_subfeatures.
Given a feature and value, or just a value corresponding to an
implicit feature, returns a property set consisting of all component
@@ -266,20 +329,18 @@
value: The value of the feature.
dont_validate: If True, no validation of value string will be done.
"""
- if not feature:
- feature = implied_feature(value)
- else:
- validate_feature(feature)
-
+ f = property.feature()
+ v = property.value()
if not dont_validate:
- validate_value_string(feature, value)
-
- components = value.split ("-")
+ validate_value_string(f, v)
+
+ components = v.split ("-")
- # get the top-level feature's value
- value = replace_grist(components[0], '')
+ v = components[0]
+
+ import property
- result = [ replace_grist(components[0], feature) ]
+ result = [property.Property(f, components[0])]
subvalues = components[1:]
@@ -287,21 +348,17 @@
subvalue = subvalues [0] # pop the head off of subvalues
subvalues = subvalues [1:]
- subfeature = __find_implied_subfeature (feature, subvalue, value)
+ subfeature = __find_implied_subfeature (f, subvalue, v)
# If no subfeature was found, reconstitute the value string and use that
if not subfeature:
- result = '-'.join(components)
- result = replace_grist (result, feature)
- return [result]
+ return [property.Property(f, '-'.join(components))]
- f = ungrist (feature)
- # FIXME: why grist includes '<>'?
- result.append (replace_grist (subvalue, '<' + f + '-' + subfeature + '>'))
+ result.append(property.Property(subfeature, subvalue))
return result
-def expand_subfeatures (properties, dont_validate = False):
+def expand_subfeatures(properties, dont_validate = False):
"""
Make all elements of properties corresponding to implicit features
explicit, and express all subfeature values as separate properties
@@ -320,12 +377,11 @@
"""
result = []
for p in properties:
- p_grist = get_grist (p)
# Don't expand subfeatures in subfeatures
- if ':' in p_grist:
+ if p.feature().subfeature():
result.append (p)
else:
- result.extend (__expand_subfeatures_aux (p_grist, replace_grist (p, ''), dont_validate))
+ result.extend(__expand_subfeatures_aux (p, dont_validate))
return result
@@ -355,42 +411,42 @@
name = add_grist (name)
__validate_feature (name)
feature = __all_features [name]
-
- if 'implicit' in feature ['attributes']:
+
+ if feature.implicit():
for v in values:
- if __implicit_features.has_key (v):
+ if __implicit_features.has_key(v):
raise BaseException ("'%s' is already associated with the feature '%s'" % (v, __implicit_features [v]))
- __implicit_features[v] = name
+ __implicit_features[v] = feature
- if len (feature ['values']) == 0 and len (values) > 0:
+ if len (feature.values()) == 0 and len (values) > 0:
# This is the first value specified for this feature,
# take it as default value
- feature ['default'] = values[0]
+ feature.set_default(values[0])
- feature['values'].extend (values)
+ feature.add_values(values)
-def validate_value_string (feature, value_string):
+def validate_value_string (f, value_string):
""" Checks that value-string is a valid value-string for the given feature.
"""
- f = __all_features [feature]
- if 'free' in f ['attributes'] or value_string in f ['values']:
+ if f.free() or value_string in f.values():
return
values = [value_string]
- if f['subfeatures']:
- if not value_string in f['subfeatures']:
+ if f.subfeatures():
+ if not value_string in f.values() and \
+ not value_string in f.subfeatures():
values = value_string.split('-')
# An empty value is allowed for optional features
- if not values[0] in f['values'] and \
- (values[0] or not 'optional' in f['attributes']):
- raise InvalidValue ("'%s' is not a known value of feature '%s'\nlegal values: '%s'" % (values [0], feature, f ['values']))
+ if not values[0] in f.values() and \
+ (values[0] or not f.optional()):
+ raise InvalidValue ("'%s' is not a known value of feature '%s'\nlegal values: '%s'" % (values [0], feature, f.values()))
for v in values [1:]:
# this will validate any subfeature values in value-string
- implied_subfeature(feature, v, values[0])
+ implied_subfeature(f, v, values[0])
""" Extends the given subfeature with the subvalues. If the optional
@@ -410,20 +466,31 @@
subvalues: The additional values of the subfeature being defined.
"""
-def extend_subfeature (feature, value_string, subfeature, subvalues):
- feature = add_grist (feature)
- validate_feature (feature)
+def extend_subfeature (feature_name, value_string, subfeature_name, subvalues):
+ feature = validate_feature(feature_name)
+
if value_string:
- validate_value_string (feature, value_string)
+ validate_value_string(feature, value_string)
- subfeature_name = __get_subfeature_name (subfeature, value_string)
+ subfeature_name = feature_name + '-' + __get_subfeature_name (subfeature_name, value_string)
- f = ungrist (feature)
- extend (f + '-' + subfeature_name, subvalues) ;
+ extend(subfeature_name, subvalues) ;
+ subfeature = __all_features[subfeature_name]
+
+ if value_string == None: value_string = ''
- __add_to_subfeature_value_to_name_map (feature, value_string, subfeature_name, subvalues)
+ if not __subfeature_from_value.has_key(feature):
+ __subfeature_from_value [feature] = {}
+
+ if not __subfeature_from_value[feature].has_key(value_string):
+ __subfeature_from_value [feature][value_string] = {}
+
+ for subvalue in subvalues:
+ __subfeature_from_value [feature][value_string][subvalue] = subfeature
+@bjam_signature((["feature_name", "value_string", "?"], ["subfeature"],
+ ["subvalues", "*"], ["attributes", "*"]))
def subfeature (feature_name, value_string, subfeature, subvalues, attributes = []):
""" Declares a subfeature.
feature_name: Root feature that is not a subfeature.
@@ -434,50 +501,60 @@
subvalues: The allowed values of this subfeature.
attributes: The attributes of the subfeature.
"""
- feature_name = add_grist (feature_name)
- validate_feature (feature_name)
+ parent_feature = validate_feature (feature_name)
# Add grist to the subfeature name if a value-string was supplied
subfeature_name = __get_subfeature_name (subfeature, value_string)
- if subfeature_name in __all_features [feature_name]['subfeatures']:
+ if subfeature_name in __all_features[feature_name].subfeatures():
message = "'%s' already declared as a subfeature of '%s'" % (subfeature, feature_name)
message += " specific to '%s'" % value_string
raise BaseException (message)
- __all_features [feature_name]['subfeatures'].append (subfeature_name)
-
# First declare the subfeature as a feature in its own right
- f = ungrist (feature_name)
- feature (f + '-' + subfeature_name, subvalues, attributes + ['subfeature'])
+ f = feature (feature_name + '-' + subfeature_name, subvalues, attributes + ['subfeature'])
+ f.set_parent(parent_feature, value_string)
+ parent_feature.add_subfeature(f)
+
# Now make sure the subfeature values are known.
extend_subfeature (feature_name, value_string, subfeature, subvalues)
-def compose (composite_property, component_properties):
+
+@bjam_signature((["composite_property_s"], ["component_properties_s", "*"]))
+def compose (composite_property_s, component_properties_s):
""" Sets the components of the given composite property.
+
+ All paremeters are <feature>value strings
"""
- component_properties = to_seq (component_properties)
+ import property
+
+ component_properties_s = to_seq (component_properties_s)
+ composite_property = property.create_from_string(composite_property_s)
+ f = composite_property.feature()
- feature = get_grist (composite_property)
- if not 'composite' in attributes (feature):
- raise BaseException ("'%s' is not a composite feature" % feature)
+ if len(component_properties_s) > 0 and isinstance(component_properties_s[0], property.Property):
+ component_properties = component_properties_s
+ else:
+ component_properties = [property.create_from_string(p) for p in component_properties_s]
+
+ if not f.composite():
+ raise BaseException ("'%s' is not a composite feature" % f)
- if __composite_properties.has_key (composite_property):
- raise BaseException ('components of "%s" already set: %s' % (composite_property, str (__composite_properties [composite_property]['components'])))
+ if __composite_properties.has_key(property):
+ raise BaseException ('components of "%s" already set: %s' % (composite_property, str (__composite_properties[composite_property])))
if composite_property in component_properties:
raise BaseException ('composite property "%s" cannot have itself as a component' % composite_property)
- entry = { 'components': component_properties }
- __composite_properties [composite_property] = entry
+ __composite_properties[composite_property] = component_properties
-def expand_composite (property):
+def expand_composite(property):
result = [ property ]
- if __composite_properties.has_key (property):
- for p in __composite_properties [property]['components']:
- result.extend (expand_composite (p))
+ if __composite_properties.has_key(property):
+ for p in __composite_properties[property]:
+ result.extend(expand_composite(p))
return result
@@ -500,68 +577,66 @@
""" Expand all composite properties in the set so that all components
are explicitly expressed.
"""
- explicit_features = get_grist (properties)
+ explicit_features = set(p.feature() for p in properties)
result = []
# now expand composite features
for p in properties:
- expanded = expand_composite (p)
+ expanded = expand_composite(p)
for x in expanded:
if not x in result:
- f = get_grist (x)
+ f = x.feature()
- if f in __free_features:
+ if f.free():
result.append (x)
elif not x in properties: # x is the result of expansion
if not f in explicit_features: # not explicitly-specified
- if f in get_grist (result):
- raise FeatureConflict ("expansions of composite features result in "
- "conflicting values for '%s'\nvalues: '%s'\none contributing composite property was '%s'" % (f,
- get_values (f, result) + [replace_grist (x, '')], p))
+ if any(r.feature() == f for r in result):
+ raise FeatureConflict(
+ "expansions of composite features result in "
+ "conflicting values for '%s'\nvalues: '%s'\none contributing composite property was '%s'" %
+ (f.name(), [r.value() for r in result if r.feature() == f] + [x.value()], p))
else:
result.append (x)
- elif f in get_grist (result):
+ elif any(r.feature() == f for r in result):
raise FeatureConflict ("explicitly-specified values of non-free feature '%s' conflict\n"
"existing values: '%s'\nvalue from expanding '%s': '%s'" % (f,
- get_values (f, properties), p, replace_grist (x, '')))
+ [r.value() for r in result if r.feature() == f], p, x.value()))
else:
result.append (x)
return result
+# Uses Property
def is_subfeature_of (parent_property, f):
""" Return true iff f is an ordinary subfeature of the parent_property's
feature, or if f is a subfeature of the parent_property's feature
specific to the parent_property's value.
"""
- if not valid (f) or not 'subfeature' in __all_features [f]['attributes']:
+ if not f.subfeature():
return False
- specific_subfeature = __re_split_subfeatures.match (f)
+ p = f.parent()
+ if not p:
+ return False
- if specific_subfeature:
- # The feature has the form
- # <topfeature-topvalue:subfeature>,
- # e.g. <toolset-msvc:version>
- feature_value = split_top_feature(specific_subfeature.group(1))
- if replace_grist (feature_value [1], '<' + feature_value [0] + '>') == parent_property:
- return True
- else:
- # The feature has the form <topfeature-subfeature>,
- # e.g. <toolset-version>
- top_sub = split_top_feature (ungrist (f))
+ parent_feature = p[0]
+ parent_value = p[1]
+
+ if parent_feature != parent_property.feature():
+ return False
- if top_sub [1] and add_grist (top_sub [0]) == get_grist (parent_property):
- return True
+ if parent_value and parent_value != parent_property.value():
+ return False
- return False
+ return True
def __is_subproperty_of (parent_property, p):
""" As is_subfeature_of, for subproperties.
"""
- return is_subfeature_of (parent_property, get_grist (p))
+ return is_subfeature_of (parent_property, p.feature())
# Returns true iff the subvalue is valid for the feature. When the
@@ -572,24 +647,21 @@
if not value_string:
value_string = ''
- if not __subfeature_value_to_name.has_key(feature):
+ if not __subfeature_from_value.has_key(feature):
return False
- if not __subfeature_value_to_name[feature].has_key(value_string):
+ if not __subfeature_from_value[feature].has_key(value_string):
return False
- if not __subfeature_value_to_name[feature][value_string].has_key(subvalue):
+ if not __subfeature_from_value[feature][value_string].has_key(subvalue):
return False
- if __subfeature_value_to_name[feature][value_string][subvalue]\
+ if __subfeature_from_value[feature][value_string][subvalue]\
!= subfeature:
return False
return True
-
-
-
def implied_subfeature (feature, subvalue, value_string):
result = __find_implied_subfeature (feature, subvalue, value_string)
if not result:
@@ -598,6 +670,7 @@
return result
+# Uses Property
def expand (properties):
""" Given a property set which may consist of composite and implicit
properties and combined subfeature values, returns an expanded,
@@ -609,36 +682,10 @@
two values of a given non-free feature are directly expressed in the
input, an error is issued.
"""
- expanded = expand_subfeatures (properties)
+ expanded = expand_subfeatures(properties)
return expand_composites (expanded)
-
-def split_top_feature (feature_plus):
- """ Given an ungristed string, finds the longest prefix which is a
- top-level feature name followed by a dash, and return a pair
- consisting of the parts before and after that dash. More
- interesting than a simple split because feature names can contain
- dashes.
- """
- e = feature_plus.split ('-')
- f = e [0]
-
- v = None
- while e:
- if add_grist (f) in __all_top_features:
- if len (e) > 1:
- after = '-'.join (e [1:])
- else:
- after = ''
-
- v = (f, after)
-
- e = e [1:]
- f = f + '-'
- if len (e): f += e [0]
-
- return v
-
+# Accepts list of Property objects
def add_defaults (properties):
""" Given a set of properties, add default values for features not
represented in the set.
@@ -657,35 +704,28 @@
and that's kind of strange.
"""
- result = [ x for x in properties ]
-
- for v in replace_grist (properties, ''):
- if v in properties:
- raise BaseException ("'add_defaults' requires explicitly specified features, but '%s' appears to be the value of an un-expanded implicit feature" % v)
-
- # We don't add default for elements with ":" inside. This catches:
- # 1. Conditional properties --- we don't want <variant>debug:<define>DEBUG
- # to be takes as specified value for <variant>
- # 2. Free properties with ":" in values. We don't care, since free properties
- # don't have defaults.
- xproperties = [ property for property in properties if __re_no_hyphen.match (property) ]
- missing_top = set.difference (__all_top_features, get_grist (xproperties))
- more = defaults (missing_top)
- result += more
- xproperties += more
+ result = [x for x in properties]
+ handled_features = set()
+ for p in properties:
+ # We don't add default for conditional properties. We don't want
+ # <variant>debug:<define>DEBUG to be takes as specified value for <variant>
+ if not p.condition():
+ handled_features.add(p.feature())
+
+ missing_top = [f for f in __all_top_features if not f in handled_features]
+ more = defaults(missing_top)
+ result.extend(more)
+ for p in more:
+ handled_features.add(p.feature())
+
# Add defaults for subfeatures of features which are present
- for p in xproperties:
- gp = get_grist (p)
- s = []
- if __all_features.has_key (gp):
- s = __all_features [gp]['subfeatures']
- f = ungrist (gp)
-
- xbase = ['<%s-%s>' % (f, xs) for xs in s]
-
- missing_subs = set.difference (xbase, get_grist (result))
- result += defaults (__select_subfeatures (p, missing_subs))
+ for p in result[:]:
+ s = p.feature().subfeatures()
+ more = defaults([s for s in p.feature().subfeatures() if not s in handled_features])
+ for p in more:
+ handled_features.add(p.feature())
+ result.extend(more)
return result
@@ -698,47 +738,35 @@
grist, and sub-property values will be expressed as elements joined
to the corresponding main property.
"""
-# FXIME: the code below was in the original feature.jam file, however 'p' is not defined.
-# # Precondition checking
-# local implicits = [ set.intersection $(p:G=) : $(p:G) ] ;
-# if $(implicits)
-# {
-# error minimize requires an expanded property set, but \"$(implicits[1])\"
-# appears to be the value of an un-expanded implicit feature ;
-# }
-
+
# remove properties implied by composite features
components = []
for property in properties:
if __composite_properties.has_key (property):
- components.extend (__composite_properties [property]['components'])
-
- x = set.difference (properties, components)
+ components.extend(__composite_properties[property])
+ properties = b2.util.set.difference (properties, components)
# handle subfeatures and implicit features
- x = __move_subfeatures_to_the_end (x)
+
+ # move subfeatures to the end of the list
+ properties = [p for p in properties if not p.feature().subfeature()] +\
+ [p for p in properties if p.feature().subfeature()]
result = []
- while x:
- fullp = x [0]
- p = fullp
- f = get_grist (p)
- v = replace_grist (p, '')
-
- # eliminate features in implicit properties.
- if 'implicit' in __all_features [f]['attributes']:
- p = v
-
+ while properties:
+ p = properties[0]
+ f = p.feature()
+
# locate all subproperties of $(x[1]) in the property set
- subproperties = __select_subproperties (fullp, x)
+ subproperties = __select_subproperties (p, properties)
if subproperties:
# reconstitute the joined property name
subproperties.sort ()
- joined = p + '-' + '-'.join (replace_grist (subproperties, ''))
- result.append (joined)
+ joined = b2.build.property.Property(p.feature(), p.value() + '-' + '-'.join ([sp.value() for sp in subproperties]))
+ result.append(joined)
- x = set.difference (x [1:], subproperties)
+ properties = b2.util.set.difference(properties[1:], subproperties)
else:
# eliminate properties whose value is equal to feature's
@@ -749,11 +777,14 @@
# have been eliminated, any remaining property whose
# feature is the same as a component of a composite in the
# set must have a non-redundant value.
- if [fullp] != defaults ([f]) or 'symmetric' in attributes (f)\
- or get_grist (fullp) in get_grist (components):
+ if p.value() != f.default() or f.symmetric():
result.append (p)
+ #\
+ #or get_grist (fullp) in get_grist (components):
+ # FIXME: restore above
+
- x = x [1:]
+ properties = properties[1:]
return result
@@ -802,27 +833,29 @@
purposes and it needs help
"""
result = []
- matched_subs = []
+ matched_subs = set()
+ all_subs = set()
for p in properties:
- pg = get_grist (p)
- if not pg:
- raise BaseException ("Gristed variable exppected. Got '%s'." % p)
+ f = p.feature()
- if not 'subfeature' in __all_features [pg]['attributes']:
+ if not f.subfeature():
subs = __select_subproperties (p, properties)
+ if subs:
- matched_subs.extend (subs)
-
- subvalues = '-'.join (get_value (subs))
- if subvalues: subvalues = '-' + subvalues
+ matched_subs.update(subs)
- result.append (p + subvalues)
+ subvalues = '-'.join (sub.value() for sub in subs)
+ result.append(b2.build.property.Property(
+ p.feature(), p.value() + '-' + subvalues,
+ p.condition()))
+ else:
+ result.append(p)
else:
- all_subs.append (p)
+ all_subs.add(p)
# TODO: this variables are used just for debugging. What's the overhead?
- assert (set.equal (all_subs, matched_subs))
+ assert all_subs == matched_subs
return result
@@ -832,22 +865,6 @@
def __select_subproperties (parent_property, properties):
return [ x for x in properties if __is_subproperty_of (parent_property, x) ]
-def __move_subfeatures_to_the_end (properties):
- """ Helper for minimize, below - returns the list with
- the same properties, but where all subfeatures
- are in the end of the list
- """
- x1 = []
- x2 = []
- for p in properties:
- if 'subfeature' in __all_features [get_grist (p)]['attributes']:
- x2.append (p)
-
- else:
- x1.append (p)
-
- return x1 + x2
-
def __get_subfeature_name (subfeature, value_string):
if value_string == None:
prefix = ''
@@ -860,7 +877,7 @@
def __validate_feature_attributes (name, attributes):
for attribute in attributes:
if not attribute in __all_attributes:
- raise InvalidAttribute ("unknown attributes: '%s' in feature declaration: '%s'" % (str (set.difference (attributes, __all_attributes)), name))
+ raise InvalidAttribute ("unknown attributes: '%s' in feature declaration: '%s'" % (str (b2.util.set.difference (attributes, __all_attributes)), name))
if name in __all_features:
raise AlreadyDefined ("feature '%s' already defined" % name)
@@ -876,20 +893,6 @@
if not __all_features.has_key (feature):
raise BaseException ('unknown feature "%s"' % feature)
-def __add_to_subfeature_value_to_name_map (feature, value_string, subfeature_name, subvalues):
- # provide a way to get from the given feature or property and
- # subfeature value to the subfeature name.
- if value_string == None: value_string = ''
-
- if not __subfeature_value_to_name.has_key (feature):
- __subfeature_value_to_name [feature] = {}
-
- if not __subfeature_value_to_name [feature].has_key (value_string):
- __subfeature_value_to_name [feature][value_string] = {}
-
- for subvalue in subvalues:
- __subfeature_value_to_name [feature][value_string][subvalue] = subfeature_name
-
def __select_subfeatures (parent_property, features):
""" Given a property, return the subset of features consisting of all
Modified: branches/release/tools/build/v2/build/generators.py
==============================================================================
--- branches/release/tools/build/v2/build/generators.py (original)
+++ branches/release/tools/build/v2/build/generators.py 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -1,5 +1,5 @@
# Status: being ported by Vladimir Prus
-# Base revision: 41557
+# Base revision: 48649
# TODO: replace the logging with dout
# Copyright Vladimir Prus 2002.
@@ -59,6 +59,7 @@
from b2.util.sequence import unique
import b2.util.sequence as sequence
from b2.manager import get_manager
+import b2.build.type
def reset ():
""" Clear the module state. This is mainly for testing purposes.
@@ -66,6 +67,7 @@
global __generators, __type_to_generators, __generators_for_toolset, __construct_stack
global __overrides, __active_generators
global __viable_generators_cache, __viable_source_types_cache
+ global __vstg_cached_generators, __vst_cached_types
__generators = {}
__type_to_generators = {}
@@ -78,6 +80,9 @@
__viable_source_types_cache = {}
__active_generators = []
+ __vstg_cached_generators = []
+ __vst_cached_types = []
+
reset ()
_re_separate_types_prefix_and_postfix = re.compile ('([^\\(]*)(\\((.*)%(.*)\\))?')
@@ -101,18 +106,58 @@
global __indent
__indent = __indent[0:-4]
+
+# Updated cached viable source target type information as needed after a new
+# derived target type gets added. This is needed because if a target type is a
+# viable source target type for some generator then all of the target type's
+# derived target types are automatically viable as source target types for the
+# same generator. Does nothing if a non-derived target type is passed to it.
+#
+def update_cached_information_with_a_new_type(type):
+
+ base_type = b2.build.type.base(type)
+
+ if base_type:
+ for g in __vstg_cached_generators:
+ if base_type in __viable_source_types_cache.get(g, []):
+ __viable_source_types_cache[g].append(type)
+
+ for t in __vst_cached_types:
+ if base_type in __viable_source_types_cache.get(t, []):
+ __viable_source_types_cache[t].append(type)
+
+# Clears cached viable source target type information except for target types
+# and generators with all source types listed as viable. Should be called when
+# something invalidates those cached values by possibly causing some new source
+# types to become viable.
+#
+def invalidate_extendable_viable_source_target_type_cache():
+
+ global __vstg_cached_generators
+ generators_with_cached_source_types = __vstg_cached_generators
+ __vstg_cached_generators = []
+
+ for g in generators_with_cached_source_types:
+ if __viable_source_types_cache.has_key(g):
+ if __viable_source_types_cache[g] == ["*"]:
+ __vstg_cached_generators.append(g)
+ else:
+ del __viable_source_types_cache[g]
+
+ global __vst_cached_types
+ types_with_cached_sources_types = __vst_cached_types
+ __vst_cached_types = []
+ for t in types_with_cached_sources_types:
+ if __viable_source_types_cache.has_key(t):
+ if __viable_source_types_cache[t] == ["*"]:
+ __vst_cached_types.append(t)
+ else:
+ del __viable_source_types_cache[t]
+
def dout(message):
if debug():
print __indent + message
-def normalize_target_list (targets):
- """ Takes a vector of 'virtual-target' instances and makes a normalized
- representation, which is the same for given set of targets,
- regardless of their order.
- """
- return (targets[0], targets[1].sort ())
-
-
class Generator:
""" Creates a generator.
manager: the build manager.
@@ -138,7 +183,7 @@
NOTE: all subclasses must have a similar signature for clone to work!
"""
- def __init__ (self, id, composing, source_types, target_types_and_names, requirements):
+ def __init__ (self, id, composing, source_types, target_types_and_names, requirements = []):
assert(not isinstance(source_types, str))
assert(not isinstance(target_types_and_names, str))
self.id_ = id
@@ -215,7 +260,7 @@
self.requirements_)
- def id (self):
+ def id(self):
return self.id_
def source_types (self):
@@ -238,7 +283,7 @@
"""
return self.requirements_
- def match_rank (self, property_set_to_match):
+ def match_rank (self, ps):
""" Returns true if the generator can be run with the specified
properties.
"""
@@ -250,17 +295,18 @@
property_requirements = []
feature_requirements = []
+ # This uses strings because genenator requirements allow
+ # the '<feature>' syntax without value and regular validation
+ # is not happy about that.
for r in all_requirements:
if get_value (r):
property_requirements.append (r)
else:
feature_requirements.append (r)
-
- properties_to_match = property_set_to_match.raw ()
-
- return set.contains (property_requirements, properties_to_match) \
- and set.contains (feature_requirements, get_grist (properties_to_match))
+
+ return all(ps.get(get_grist(s)) == [get_value(s)] for s in property_requirements) \
+ and all(ps.get(get_grist(s)) for s in feature_requirements)
def run (self, project, name, prop_set, sources):
""" Tries to invoke this generator on the given sources. Returns a
@@ -348,6 +394,21 @@
return result
+ def determine_target_name(self, fullname):
+ # Determine target name from fullname (maybe including path components)
+ # Place optional prefix and postfix around basename
+
+ dir = os.path.dirname(fullname)
+ name = os.path.basename(fullname)
+
+ if dir and not ".." in dir and not os.path.isabs(dir):
+ # Relative path is always relative to the source
+ # directory. Retain it, so that users can have files
+ # with the same in two different subdirectories.
+ name = dir + "/" + name
+
+ return name
+
def determine_output_name(self, sources):
"""Determine the name of the produced target from the
names of the sources."""
@@ -370,8 +431,8 @@
"%s: source targets have different names: cannot determine target name"
% (self.id_))
- # Names of sources might include directory. We should strip it.
- return os.path.basename(name)
+ # Names of sources might include directory. We should strip it.
+ return self.determine_target_name(sources[0].name())
def generated_targets (self, sources, prop_set, project, name):
@@ -400,19 +461,24 @@
name = self.determine_output_name(sources)
# Assign an action for each target
- action = self.action_class()
- a = action (project.manager(), sources, self.id_, prop_set)
+ action = self.action_class()
+ a = action(project.manager(), sources, self.id_, prop_set)
# Create generated target for each target type.
targets = []
pre = self.name_prefix_
post = self.name_postfix_
for t in self.target_types_:
- generated_name = pre[0] + name + post[0]
+ basename = os.path.basename(name)
+ idx = basename.find(".")
+ if idx != -1:
+ basename = basename[:idx]
+ generated_name = pre[0] + basename + post[0]
+ generated_name = os.path.join(os.path.dirname(name), generated_name)
pre = pre[1:]
post = post[1:]
- targets.append(virtual_target.FileTarget(generated_name, False, t, project, a))
+ targets.append(virtual_target.FileTarget(generated_name, t, project, a))
return [ project.manager().virtual_targets().register(t) for t in targets ]
@@ -515,20 +581,20 @@
real_source_type = source.type ()
# If there are no source types, we can consume anything
- source_types = self.source_types
+ source_types = self.source_types()
if not source_types:
- source_types = [real_source_type]
+ source_types = [real_source_type]
consumed = []
missing_types = []
- for st in self.source_types_:
+ for st in source_types:
# The 'source' if of right type already)
if real_source_type == st or type.is_derived (real_source_type, st):
consumed.append (source)
else:
missing_types.append (st)
-
+
return (consumed, missing_types)
def action_class (self):
@@ -546,7 +612,7 @@
def register (g):
""" Registers new generator instance 'g'.
"""
- id = g.id ()
+ id = g.id()
__generators [id] = g
@@ -576,6 +642,24 @@
__generators_for_toolset.setdefault(base, []).append(g)
+ # After adding a new generator that can construct new target types, we need
+ # to clear the related cached viable source target type information for
+ # constructing a specific target type or using a specific generator. Cached
+ # viable source target type lists affected by this are those containing any
+ # of the target types constructed by the new generator or any of their base
+ # target types.
+ #
+ # A more advanced alternative to clearing that cached viable source target
+ # type information would be to expand it with additional source types or
+ # even better - mark it as needing to be expanded on next use.
+ #
+ # For now we just clear all the cached viable source target type information
+ # that does not simply state 'all types' and may implement a more detailed
+ # algorithm later on if it becomes needed.
+
+ invalidate_extendable_viable_source_target_type_cache()
+
+
def register_standard (id, source_types, target_types, requirements = []):
""" Creates new instance of the 'generator' class and registers it.
Returns the creates instance.
@@ -619,11 +703,19 @@
of calling itself recusrively on source types.
"""
generators = []
-
- t = type.all_bases (target_type)
+
+ # 't0' is the initial list of target types we need to process to get a list
+ # of their viable source target types. New target types will not be added to
+ # this list.
+ t0 = type.all_bases (target_type)
+
+
+ # 't' is the list of target types which have not yet been processed to get a
+ # list of their viable source target types. This list will get expanded as
+ # we locate more target types to process.
+ t = t0
result = []
- # 't' is the list of types which are not yet processed
while t:
# Find all generators for current type.
# Unlike 'find_viable_generators' we don't care about prop_set.
@@ -645,19 +737,29 @@
all = type.all_derived (source_type)
for n in all:
if not n in result:
- t.append (n)
+
+ # Here there is no point in adding target types to
+ # the list of types to process in case they are or
+ # have already been on that list. We optimize this
+ # check by realizing that we only need to avoid the
+ # original target type's base types. Other target
+ # types that are or have been on the list of target
+ # types to process have been added to the 'result'
+ # list as well and have thus already been eliminated
+ # by the previous if.
+ if not n in t0:
+ t.append (n)
result.append (n)
-
- result = unique (result)
-
+
return result
def viable_source_types (target_type):
""" Helper rule, caches the result of '__viable_source_types_real'.
"""
- if not __viable_source_types_cache.has_key (target_type):
- __viable_source_types_cache [target_type] = __viable_source_types_real (target_type)
+ if not __viable_source_types_cache.has_key(target_type):
+ __vst_cached_types.append(target_type)
+ __viable_source_types_cache [target_type] = __viable_source_types_real (target_type)
return __viable_source_types_cache [target_type]
def viable_source_types_for_generator_real (generator):
@@ -678,20 +780,22 @@
else:
result = []
for s in source_types:
- result += type.all_derived (s) + viable_source_types (s)
- result = unique (result)
- if "*" in result:
- result = ["*"]
- return result
+ viable_sources = viable_source_types(s)
+ if viable_sources == "*":
+ result = ["*"]
+ break
+ else:
+ result.extend(type.all_derived(s) + viable_sources)
+ return unique(result)
def viable_source_types_for_generator (generator):
""" Caches the result of 'viable_source_types_for_generator'.
"""
- key = str (generator)
- if not __viable_source_types_cache.has_key (key):
- __viable_source_types_cache [key] = viable_source_types_for_generator_real (generator)
+ if not __viable_source_types_cache.has_key(generator):
+ __vstg_cached_generators.append(generator)
+ __viable_source_types_cache[generator] = viable_source_types_for_generator_real (generator)
- return __viable_source_types_cache [key]
+ return __viable_source_types_cache[generator]
def try_one_generator_really (project, name, generator, target_type, properties, sources):
""" Returns usage requirements + list of created targets.
@@ -784,7 +888,7 @@
"""
for t in targets:
if not t.type ():
- raise BaseException ("target '%s' has no type" % str (t))
+ get_manager().errors()("target '%s' has no type" % str (t))
def find_viable_generators_aux (target_type, prop_set):
""" Returns generators which can be used to construct target of specified type
@@ -840,7 +944,7 @@
m = g.match_rank(prop_set)
if m:
dout(" is viable")
- viable_generators.append(g)
+ viable_generators.append(g)
return viable_generators
@@ -848,6 +952,8 @@
key = target_type + '.' + str (prop_set)
l = __viable_generators_cache.get (key, None)
+ if not l:
+ l = []
if not l:
l = find_viable_generators_aux (target_type, prop_set)
@@ -860,6 +966,8 @@
# TODO: is this really used?
if not g in __active_generators:
viable_generators.append (g)
+ else:
+ dout(" generator %s is active, discarding" % g.id())
# Generators which override 'all'.
all_overrides = []
@@ -884,6 +992,7 @@
for g in viable_generators:
if not g.id () in overriden_ids:
result.append (g)
+
return result
@@ -894,7 +1003,7 @@
viable_generators = find_viable_generators (target_type, prop_set)
result = []
-
+
project.manager ().logger ().log (__name__, "*** %d viable generators" % len (viable_generators))
generators_that_succeeded = []
@@ -928,7 +1037,7 @@
return result;
-def construct (project, name, target_type, prop_set, sources):
+def construct (project, name, target_type, prop_set, sources, top_level=False):
""" Attempts to create target of 'target-type' with 'properties'
from 'sources'. The 'sources' are treated as a collection of
*possible* ingridients -- i.e. it is not required to consume
@@ -938,11 +1047,21 @@
Returns a list of target. When this invocation is first instance of
'construct' in stack, returns only targets of requested 'target-type',
otherwise, returns also unused sources and additionally generated
- targets.
+ targets.
+
+ If 'top-level' is set, does not suppress generators that are already
+ used in the stack. This may be useful in cases where a generator
+ has to build a metatarget -- for example a target corresponding to
+ built tool.
"""
- # TODO: Why is global needed here?
+
+ global __active_generators
+ if top_level:
+ saved_active = __active_generators
+ __active_generators = []
+
global __construct_stack
- if __construct_stack:
+ if not __construct_stack:
__ensure_type (sources)
__construct_stack.append (1)
@@ -963,5 +1082,8 @@
__construct_stack = __construct_stack [1:]
+ if top_level:
+ __active_generators = saved_active
+
return result
Modified: branches/release/tools/build/v2/build/project.jam
==============================================================================
--- branches/release/tools/build/v2/build/project.jam (original)
+++ branches/release/tools/build/v2/build/project.jam 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -294,18 +294,22 @@
: "Filenames are: " $(jamfile-to-load:D=) ;
}
- # Initialize the Jamfile module before loading.
- #
- initialize $(jamfile-module) : [ path.parent $(jamfile-to-load) ]
- : $(jamfile-to-load:BS) ;
-
- local saved-project = $(.current-project) ;
- # Now load the Jamfile in it's own context. Initialization might have loaded
- # parent Jamfiles, which might have loaded the current Jamfile with
- # use-project. Do a final check to make sure it's not loaded already.
+ # Now load the Jamfile in it's own context.
+ # The call to 'initialize' may load parent Jamfile, which might have
+ # 'use-project' statement that causes a second attempt to load the
+ # same project we're loading now. Checking inside .jamfile-modules
+ # prevents that second attempt from messing up.
if ! $(jamfile-module) in $(.jamfile-modules)
{
.jamfile-modules += $(jamfile-module) ;
+
+ # Initialize the Jamfile module before loading.
+ #
+ initialize $(jamfile-module) : [ path.parent $(jamfile-to-load) ]
+ : $(jamfile-to-load:BS) ;
+
+ local saved-project = $(.current-project) ;
+
mark-as-user $(jamfile-module) ;
modules.load $(jamfile-module) : [ path.native $(jamfile-to-load) ] : . ;
if [ MATCH ($(JAMROOT)) : $(jamfile-to-load:BS) ]
@@ -316,7 +320,7 @@
load-aux $(jamfile-module) : [ path.native $(jamfile) ] ;
}
}
- }
+
# Now do some checks.
if $(.current-project) != $(saved-project)
{
@@ -346,6 +350,7 @@
}
}
}
+ }
}
Modified: branches/release/tools/build/v2/build/project.py
==============================================================================
--- branches/release/tools/build/v2/build/project.py (original)
+++ branches/release/tools/build/v2/build/project.py 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -1,5 +1,5 @@
-# Status: being ported by Vladimir Prus
-# Base revision: 42507
+# Status: ported.
+# Base revision: 64488
# Copyright 2002, 2003 Dave Abrahams
# Copyright 2002, 2005, 2006 Rene Rivera
@@ -51,12 +51,15 @@
import string
import imp
import traceback
+import b2.util.option as option
+
+from b2.util import record_jam_to_value_mapping, qualify_jam_action
class ProjectRegistry:
def __init__(self, manager, global_build_dir):
self.manager = manager
- self.global_build_dir = None
+ self.global_build_dir = global_build_dir
self.project_rules_ = ProjectRules(self)
# The target corresponding to the project being loaded now
@@ -89,6 +92,8 @@
# via 'using' and 'import' rules in Jamfiles.
self.loaded_tool_modules_ = {}
+ self.loaded_tool_module_path_ = {}
+
# Map from project target to the list of
# (id,location) pairs corresponding to all 'use-project'
# invocations.
@@ -133,7 +138,7 @@
# If Jamfile is already loaded, don't try again.
if not mname in self.jamfile_modules:
- self.load_jamfile(jamfile_location)
+ self.load_jamfile(jamfile_location, mname)
# We want to make sure that child project are loaded only
# after parent projects. In particular, because parent projects
@@ -211,9 +216,11 @@
# must be placed in the directory referred by id.
project_module = self.module_name(location)
- if not project_module in self.jamfile_modules and \
- b2.util.path.glob([location], self.JAMROOT + self.JAMFILE):
- project_module = self.load(location)
+ if not project_module in self.jamfile_modules:
+ if b2.util.path.glob([location], self.JAMROOT + self.JAMFILE):
+ project_module = self.load(location)
+ else:
+ project_module = None
return project_module
@@ -280,46 +287,56 @@
Please consult the documentation at 'http://boost.org/boost-build2'."""
% (dir, string.join(self.JAMFILE)))
- return jamfile_glob[0]
+ if jamfile_glob:
+ return jamfile_glob[0]
- def load_jamfile(self, dir):
+ def load_jamfile(self, dir, jamfile_module):
"""Load a Jamfile at the given directory. Returns nothing.
Will attempt to load the file as indicated by the JAMFILE patterns.
Effect of calling this rule twice with the same 'dir' is underfined."""
# See if the Jamfile is where it should be.
+ is_jamroot = False
jamfile_to_load = b2.util.path.glob([dir], self.JAMROOT)
if not jamfile_to_load:
jamfile_to_load = self.find_jamfile(dir)
else:
+ if len(jamfile_to_load) > 1:
+ get_manager().errors()("Multiple Jamfiles found at '%s'\n" +\
+ "Filenames are: %s"
+ % (dir, [os.path.basename(j) for j in jamfile_to_load]))
+
+ is_jamroot = True
jamfile_to_load = jamfile_to_load[0]
-
- # The module of the jamfile.
- dir = os.path.realpath(os.path.dirname(jamfile_to_load))
-
- jamfile_module = self.module_name (dir)
- # Initialize the jamfile module before loading.
- #
- self.initialize(jamfile_module, dir, os.path.basename(jamfile_to_load))
-
- saved_project = self.current_project
+ dir = os.path.dirname(jamfile_to_load)
+ if not dir:
+ dir = "."
self.used_projects[jamfile_module] = []
- # Now load the Jamfile in it's own context.
- # Initialization might have load parent Jamfiles, which might have
- # loaded the current Jamfile with use-project. Do a final check to make
- # sure it's not loaded already.
+ # Now load the Jamfile in it's own context.
+ # The call to 'initialize' may load parent Jamfile, which might have
+ # 'use-project' statement that causes a second attempt to load the
+ # same project we're loading now. Checking inside .jamfile-modules
+ # prevents that second attempt from messing up.
if not jamfile_module in self.jamfile_modules:
self.jamfile_modules[jamfile_module] = True
- # FIXME:
- # mark-as-user $(jamfile-module) ;
+ # Initialize the jamfile module before loading.
+ #
+ self.initialize(jamfile_module, dir, os.path.basename(jamfile_to_load))
+
+ saved_project = self.current_project
bjam.call("load", jamfile_module, jamfile_to_load)
basename = os.path.basename(jamfile_to_load)
-
+
+ if is_jamroot:
+ jamfile = self.find_jamfile(dir, no_errors=True)
+ if jamfile:
+ bjam.call("load", jamfile_module, jamfile)
+
# Now do some checks
if self.current_project != saved_project:
self.manager.errors()(
@@ -331,7 +348,7 @@
actual value %s""" % (jamfile_module, saved_project, self.current_project))
if self.global_build_dir:
- id = self.attribute(jamfile_module, "id")
+ id = self.attributeDefault(jamfile_module, "id", None)
project_root = self.attribute(jamfile_module, "project-root")
location = self.attribute(jamfile_module, "location")
@@ -357,7 +374,6 @@
the same file.
"""
- self.initialize(jamfile_module)
self.used_projects[jamfile_module] = []
bjam.call("load", jamfile_module, file)
self.load_used_projects(jamfile_module)
@@ -385,34 +401,43 @@
# source paths are correct.
if not location:
location = ""
- else:
- location = b2.util.path.relpath(os.getcwd(), location)
attributes = ProjectAttributes(self.manager, location, module_name)
self.module2attributes[module_name] = attributes
+ python_standalone = False
if location:
attributes.set("source-location", [location], exact=1)
- else:
- attributes.set("source-location", "", exact=1)
+ elif not module_name in ["test-config", "site-config", "user-config", "project-config"]:
+ # This is a standalone project with known location. Set source location
+ # so that it can declare targets. This is intended so that you can put
+ # a .jam file in your sources and use it via 'using'. Standard modules
+ # (in 'tools' subdir) may not assume source dir is set.
+ module = sys.modules[module_name]
+ attributes.set("source-location", self.loaded_tool_module_path_[module_name], exact=1)
+ python_standalone = True
attributes.set("requirements", property_set.empty(), exact=True)
attributes.set("usage-requirements", property_set.empty(), exact=True)
- attributes.set("default-build", [], exact=True)
+ attributes.set("default-build", property_set.empty(), exact=True)
attributes.set("projects-to-build", [], exact=True)
attributes.set("project-root", None, exact=True)
attributes.set("build-dir", None, exact=True)
- self.project_rules_.init_project(module_name)
+ self.project_rules_.init_project(module_name, python_standalone)
jamroot = False
parent_module = None;
- if module_name == "site-config":
+ if module_name == "test-config":
# No parent
pass
+ elif module_name == "site-config":
+ parent_module = "test-config"
elif module_name == "user-config":
parent_module = "site-config"
+ elif module_name == "project-config":
+ parent_module = "user-config"
elif location and not self.is_jamroot(basename):
# We search for parent/project-root only if jamfile was specified
# --- i.e
@@ -422,7 +447,12 @@
# It's either jamroot, or standalone project.
# If it's jamroot, inherit from user-config.
if location:
- parent_module = "user-config" ;
+ # If project-config module exist, inherit from it.
+ if self.module2attributes.has_key("project-config"):
+ parent_module = "project-config"
+ else:
+ parent_module = "user-config" ;
+
jamroot = True ;
if parent_module:
@@ -482,8 +512,8 @@
parent_dir = os.path.join(os.getcwd(), parent_location)
build_dir = os.path.join(parent_build_dir,
- b2.util.path.relpath(parent_dir,
- our_dir))
+ os.path.relpath(our_dir, parent_dir))
+ attributes.set("build-dir", build_dir, exact=True)
def register_id(self, id, module):
"""Associate the given id with the given project module."""
@@ -493,6 +523,9 @@
"""Returns the project which is currently being loaded."""
return self.current_project
+ def set_current(self, c):
+ self.current_project = c
+
def push_current(self, project):
"""Temporary changes the current project to 'project'. Should
be followed by 'pop-current'."""
@@ -515,8 +548,7 @@
try:
return self.module2attributes[project].get(attribute)
except:
- print "Sucks", project, attribute
- raise "Sucks"
+ raise BaseException("No attribute '%s' for project" % (attribute, project))
def attributeDefault(self, project, attribute, default):
"""Returns the value of the specified attribute in the
@@ -543,7 +575,7 @@
# that id is not equal to the 'id' parameter.
if self.id2module.has_key(id) and self.id2module[id] != project_module:
self.manager.errors()(
-"""Attempt to redeclare already existing project id '%s'""" % id)
+"""Attempt to redeclare already existing project id '%s' at location '%s'""" % (id, location))
self.id2module[id] = project_module
self.current_module = saved_project
@@ -558,12 +590,12 @@
return self.project_rules_
def glob_internal(self, project, wildcards, excludes, rule_name):
- location = project.get("source-location")
+ location = project.get("source-location")[0]
result = []
callable = b2.util.path.__dict__[rule_name]
- paths = callable(location, wildcards, excludes)
+ paths = callable([location], wildcards, excludes)
has_dir = 0
for w in wildcards:
if os.path.dirname(w):
@@ -571,11 +603,21 @@
break
if has_dir or rule_name != "glob":
+ result = []
# The paths we've found are relative to current directory,
# but the names specified in sources list are assumed to
# be relative to source directory of the corresponding
- # prject. So, just make the name absolute.
- result = [os.path.join(os.getcwd(), p) for p in paths]
+ # prject. Either translate them or make absolute.
+
+ for p in paths:
+ rel = os.path.relpath(p, location)
+ # If the path is below source location, use relative path.
+ if not ".." in rel:
+ result.append(rel)
+ else:
+ # Otherwise, use full path just to avoid any ambiguities.
+ result.append(os.path.abspath(p))
+
else:
# There were not directory in wildcard, so the files are all
# in the source directory of the project. Just drop the
@@ -585,29 +627,36 @@
return result
def load_module(self, name, extra_path=None):
- """Classic Boost.Build 'modules' are in fact global variables.
- Therefore, try to find an already loaded Python module called 'name' in sys.modules.
- If the module ist not loaded, find it Boost.Build search
- path and load it. The new module is not entered in sys.modules.
- The motivation here is to have disjoint namespace of modules
- loaded via 'import/using' in Jamfile, and ordinary Python
- modules. We don't want 'using foo' in Jamfile to load ordinary
- Python module 'foo' which is going to not work. And we
- also don't want 'import foo' in regular Python module to
- accidentally grab module named foo that is internal to
- Boost.Build and intended to provide interface to Jamfiles."""
+ """Load a Python module that should be useable from Jamfiles.
+ There are generally two types of modules Jamfiles might want to
+ use:
+ - Core Boost.Build. Those are imported using plain names, e.g.
+ 'toolset', so this function checks if we have module named
+ b2.package.module already.
+ - Python modules in the same directory as Jamfile. We don't
+ want to even temporary add Jamfile's directory to sys.path,
+ since then we might get naming conflicts between standard
+ Python modules and those.
+ """
+
+ # See if we loaded module of this name already
existing = self.loaded_tool_modules_.get(name)
if existing:
return existing
+ # See if we have a module b2.whatever.<name>, where <name>
+ # is what is passed to this function
modules = sys.modules
for class_name in modules:
- if name is class_name:
+ parts = class_name.split('.')
+ if name is class_name or parts[0] == "b2" \
+ and parts[-1] == name.replace("-", "_"):
module = modules[class_name]
self.loaded_tool_modules_[name] = module
return module
-
+
+ # Lookup a module in BOOST_BUILD_PATH
path = extra_path
if not path:
path = []
@@ -622,14 +671,14 @@
if not location:
self.manager.errors()("Cannot find module '%s'" % name)
- mname = "__build_build_temporary__"
+ mname = name + "__for_jamfile"
file = open(location)
- try:
+ try:
# TODO: this means we'll never make use of .pyc module,
# which might be a problem, or not.
+ self.loaded_tool_module_path_[mname] = location
module = imp.load_module(mname, file, os.path.basename(location),
(".py", "r", imp.PY_SOURCE))
- del sys.modules[mname]
self.loaded_tool_modules_[name] = module
return module
finally:
@@ -694,7 +743,7 @@
self.attributes = {}
self.usage_requirements = None
- def set(self, attribute, specification, exact):
+ def set(self, attribute, specification, exact=False):
"""Set the named attribute from the specification given by the user.
The value actually set may be different."""
@@ -717,15 +766,16 @@
non_free = property.remove("free", unconditional)
if non_free:
- pass
- # FIXME:
- #errors.error "usage-requirements" $(specification) "have non-free properties" $(non-free) ;
+ get_manager().errors()("usage-requirements %s have non-free properties %s" \
+ % (specification, non_free))
- t = property.translate_paths(specification, self.location)
+ t = property.translate_paths(
+ property.create_from_strings(specification, allow_condition=True),
+ self.location)
existing = self.__dict__.get("usage-requirements")
if existing:
- new = property_set.create(existing.raw() + t)
+ new = property_set.create(existing.all() + t)
else:
new = property_set.create(t)
self.__dict__["usage-requirements"] = new
@@ -737,13 +787,20 @@
elif attribute == "source-location":
source_location = []
for path in specification:
- source_location += os.path.join(self.location, path)
+ source_location.append(os.path.join(self.location, path))
self.__dict__["source-location"] = source_location
elif attribute == "build-dir":
- self.__dict__["build-dir"] = os.path.join(self.location, specification)
-
- elif not attribute in ["id", "default-build", "location",
+ self.__dict__["build-dir"] = os.path.join(self.location, specification[0])
+
+ elif attribute == "id":
+ id = specification[0]
+ if id[0] != '/':
+ id = "/" + id
+ self.manager.projects().register_id(id, self.project_module)
+ self.__dict__["id"] = id
+
+ elif not attribute in ["default-build", "location",
"source-location", "parent",
"projects-to-build", "project-root"]:
self.manager.errors()(
@@ -788,31 +845,44 @@
self.rules = {}
self.local_names = [x for x in self.__class__.__dict__
if x not in ["__init__", "init_project", "add_rule",
- "error_reporting_wrapper", "add_rule_for_type"]]
+ "error_reporting_wrapper", "add_rule_for_type", "reverse"]]
self.all_names_ = [x for x in self.local_names]
+ def _import_rule(self, bjam_module, name, callable):
+ if hasattr(callable, "bjam_signature"):
+ bjam.import_rule(bjam_module, name, self.make_wrapper(callable), callable.bjam_signature)
+ else:
+ bjam.import_rule(bjam_module, name, self.make_wrapper(callable))
+
+
def add_rule_for_type(self, type):
- rule_name = type.lower();
+ rule_name = type.lower().replace("_", "-")
- def xpto (name, sources, requirements = [], default_build = None, usage_requirements = []):
+ def xpto (name, sources = [], requirements = [], default_build = [], usage_requirements = []):
return self.manager_.targets().create_typed_target(
type, self.registry.current(), name[0], sources,
requirements, default_build, usage_requirements)
- self.add_rule(type.lower(), xpto)
+ self.add_rule(rule_name, xpto)
def add_rule(self, name, callable):
self.rules[name] = callable
self.all_names_.append(name)
+ # Add new rule at global bjam scope. This might not be ideal,
+ # added because if a jamroot does 'import foo' where foo calls
+ # add_rule, we need to import new rule to jamroot scope, and
+ # I'm lazy to do this now.
+ self._import_rule("", name, callable)
+
def all_names(self):
return self.all_names_
- def call_and_report_errors(self, callable, *args):
+ def call_and_report_errors(self, callable, *args, **kw):
result = None
try:
self.manager_.errors().push_jamfile_context()
- result = callable(*args)
+ result = callable(*args, **kw)
except ExceptionWithUserContext, e:
e.report()
except Exception, e:
@@ -822,19 +892,31 @@
e.report()
finally:
self.manager_.errors().pop_jamfile_context()
-
+
return result
def make_wrapper(self, callable):
"""Given a free-standing function 'callable', return a new
callable that will call 'callable' and report all exceptins,
using 'call_and_report_errors'."""
- def wrapper(*args):
- self.call_and_report_errors(callable, *args)
+ def wrapper(*args, **kw):
+ return self.call_and_report_errors(callable, *args, **kw)
return wrapper
- def init_project(self, project_module):
+ def init_project(self, project_module, python_standalone=False):
+ if python_standalone:
+ m = sys.modules[project_module]
+
+ for n in self.local_names:
+ if n != "import_":
+ setattr(m, n, getattr(self, n))
+
+ for n in self.rules:
+ setattr(m, n, self.rules[n])
+
+ return
+
for n in self.local_names:
# Using 'getattr' here gives us a bound method,
# while using self.__dict__[r] would give unbound one.
@@ -845,12 +927,10 @@
else:
n = string.replace(n, "_", "-")
- bjam.import_rule(project_module, n,
- self.make_wrapper(v))
+ self._import_rule(project_module, n, v)
for n in self.rules:
- bjam.import_rule(project_module, n,
- self.make_wrapper(self.rules[n]))
+ self._import_rule(project_module, n, self.rules[n])
def project(self, *args):
@@ -863,9 +943,7 @@
args = args[1:]
if id:
- if id[0] != '/':
- id = '/' + id
- self.registry.register_id (id, jamfile_module)
+ attributes.set('id', [id])
explicit_build_dir = None
for a in args:
@@ -883,19 +961,24 @@
# If we try to set build dir for user-config, we'll then
# try to inherit it, with either weird, or wrong consequences.
if location and location == attributes.get("project-root"):
+ # Re-read the project id, since it might have been changed in
+ # the project's attributes.
+ id = attributes.get('id')
+
# This is Jamroot.
if id:
if explicit_build_dir and os.path.isabs(explicit_build_dir):
- self.register.manager.errors()(
+ self.registry.manager.errors()(
"""Absolute directory specified via 'build-dir' project attribute
Don't know how to combine that with the --build-dir option.""")
rid = id
if rid[0] == '/':
rid = rid[1:]
-
- p = os.path.join(self.registry.global_build_dir,
- rid, explicit_build_dir)
+
+ p = os.path.join(self.registry.global_build_dir, rid)
+ if explicit_build_dir:
+ p = os.path.join(p, explicit_build_dir)
attributes.set("build-dir", p, exact=1)
elif explicit_build_dir:
self.registry.manager.errors()(
@@ -914,7 +997,9 @@
path is adjusted to be relative to the invocation directory. The given
value path is taken to be either absolute, or relative to this project
root."""
- self.registry.current().add_constant(name[0], value, path=1)
+ if len(value) > 1:
+ self.registry.manager.error()("path constant should have one element")
+ self.registry.current().add_constant(name[0], value[0], path=1)
def use_project(self, id, where):
# See comment in 'load' for explanation why we record the
@@ -930,9 +1015,10 @@
attributes.set("projects-to-build", now + dir, exact=True)
def explicit(self, target_names):
- t = self.registry.current()
- for n in target_names:
- t.mark_target_as_explicit(n)
+ self.registry.current().mark_targets_as_explicit(target_names)
+
+ def always(self, target_names):
+ self.registry.current().mark_targets_as_alays(target_names)
def glob(self, wildcards, excludes=None):
return self.registry.glob_internal(self.registry.current(),
@@ -950,7 +1036,7 @@
bad = 1
if bad:
- self.registry.manager().errors()(
+ self.registry.manager.errors()(
"The patterns to 'glob-tree' may not include directory")
return self.registry.glob_internal(self.registry.current(),
wildcards, excludes, "glob_tree")
@@ -962,9 +1048,8 @@
# will expect the module to be found even though
# the directory is not in BOOST_BUILD_PATH.
# So temporary change the search path.
- jamfile_module = self.registry.current().project_module()
- attributes = self.registry.attributes(jamfile_module)
- location = attributes.get("location")
+ current = self.registry.current()
+ location = current.get('location')
m = self.registry.load_module(toolset[0], [location])
if not m.__dict__.has_key("init"):
@@ -972,20 +1057,32 @@
"Tool module '%s' does not define the 'init' method" % toolset[0])
m.init(*args)
+ # The above might have clobbered .current-project. Restore the correct
+ # value.
+ self.registry.set_current(current)
def import_(self, name, names_to_import=None, local_names=None):
name = name[0]
+ py_name = name
+ if py_name == "os":
+ py_name = "os_j"
jamfile_module = self.registry.current().project_module()
attributes = self.registry.attributes(jamfile_module)
location = attributes.get("location")
- m = self.registry.load_module(name, [location])
+ saved = self.registry.current()
+
+ m = self.registry.load_module(py_name, [location])
for f in m.__dict__:
v = m.__dict__[f]
+ f = f.replace("_", "-")
if callable(v):
- bjam.import_rule(jamfile_module, name + "." + f, v)
+ qn = name + "." + f
+ self._import_rule(jamfile_module, qn, v)
+ record_jam_to_value_mapping(qualify_jam_action(qn, jamfile_module), v)
+
if names_to_import:
if not local_names:
@@ -996,7 +1093,9 @@
"""The number of names to import and local names do not match.""")
for n, l in zip(names_to_import, local_names):
- bjam.import_rule(jamfile_module, l, m.__dict__[n])
+ self._import_rule(jamfile_module, l, m.__dict__[n])
+
+ self.registry.set_current(saved)
def conditional(self, condition, requirements):
"""Calculates conditional requirements for multiple requirements
@@ -1012,3 +1111,10 @@
return [c + r for r in requirements]
else:
return [c + ":" + r for r in requirements]
+
+ def option(self, name, value):
+ name = name[0]
+ if not name in ["site-config", "user-config", "project-config"]:
+ get_manager().errors()("The 'option' rule may be used only in site-config or user-config")
+
+ option.set(name, value[0])
Modified: branches/release/tools/build/v2/build/property.jam
==============================================================================
--- branches/release/tools/build/v2/build/property.jam (original)
+++ branches/release/tools/build/v2/build/property.jam 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -112,12 +112,48 @@
# Separate condition and property.
local s = [ MATCH (.*):(<.*) : $(p) ] ;
# Split condition into individual properties.
- local c = [ regex.split $(s[1]) "," ] ;
+ local condition = [ regex.split $(s[1]) "," ] ;
# Evaluate condition.
- if $(c) in $(context)
+ if ! [ MATCH (!).* : $(condition:G=) ]
{
- result += $(s[2]) ;
+ # Only positive checks
+ if $(condition) in $(context)
+ {
+ result += $(s[2]) ;
+ }
}
+ else
+ {
+ # Have negative checks
+ local fail ;
+ while $(condition)
+ {
+ local c = $(condition[1]) ;
+ local m = [ MATCH !(.*) : $(c) ] ;
+ if $(m)
+ {
+ local p = $(m:G=$(c:G)) ;
+ if $(p) in $(context)
+ {
+ fail = true ;
+ c = ;
+ }
+ }
+ else
+ {
+ if ! $(c) in $(context)
+ {
+ fail = true ;
+ c = ;
+ }
+ }
+ condition = $(condition[2-]) ;
+ }
+ if ! $(fail)
+ {
+ result += $(s[2]) ;
+ }
+ }
}
return $(result) ;
}
Modified: branches/release/tools/build/v2/build/property.py
==============================================================================
--- branches/release/tools/build/v2/build/property.py (original)
+++ branches/release/tools/build/v2/build/property.py 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -1,5 +1,5 @@
# Status: ported, except for tests and --abbreviate-paths.
-# Base revision: 40480
+# Base revision: 64070
#
# Copyright 2001, 2002, 2003 Dave Abrahams
# Copyright 2006 Rene Rivera
@@ -10,18 +10,101 @@
import re
from b2.util.utility import *
from b2.build import feature
-from b2.util import sequence, set
+from b2.util import sequence, qualify_jam_action
+import b2.util.set
+from b2.manager import get_manager
__re_two_ampersands = re.compile ('&&')
__re_comma = re.compile (',')
__re_split_condition = re.compile ('(.*):(<.*)')
-__re_toolset_feature = re.compile ('^(<toolset>|<toolset->)')
-__re_os_feature = re.compile ('^(<os>)')
__re_split_conditional = re.compile (r'(.+):<(.+)')
__re_colon = re.compile (':')
__re_has_condition = re.compile (r':<')
__re_separate_condition_and_property = re.compile (r'(.*):(<.*)')
+class Property(object):
+
+ __slots__ = ('_feature', '_value', '_condition')
+
+ def __init__(self, f, value, condition = []):
+ if type(f) == type(""):
+ f = feature.get(f)
+ # At present, single property has a single value.
+ assert type(value) != type([])
+ assert(f.free() or value.find(':') == -1)
+ self._feature = f
+ self._value = value
+ self._condition = condition
+
+ def feature(self):
+ return self._feature
+
+ def value(self):
+ return self._value
+
+ def condition(self):
+ return self._condition
+
+ def to_raw(self):
+ result = "<" + self._feature.name() + ">" + str(self._value)
+ if self._condition:
+ result = ",".join(str(p) for p in self._condition) + ':' + result
+ return result
+
+ def __str__(self):
+ return self.to_raw()
+
+ def __hash__(self):
+ # FIXME: consider if this class should be value-is-identity one
+ return hash((self._feature, self._value, tuple(self._condition)))
+
+ def __cmp__(self, other):
+ return cmp((self._feature, self._value, self._condition),
+ (other._feature, other._value, other._condition))
+
+
+def create_from_string(s, allow_condition=False):
+
+ condition = []
+ import types
+ if not isinstance(s, types.StringType):
+ print type(s)
+ if __re_has_condition.search(s):
+
+ if not allow_condition:
+ raise BaseException("Conditional property is not allowed in this context")
+
+ m = __re_separate_condition_and_property.match(s)
+ condition = m.group(1)
+ s = m.group(2)
+
+ # FIXME: break dependency cycle
+ from b2.manager import get_manager
+
+ feature_name = get_grist(s)
+ if not feature_name:
+ if feature.is_implicit_value(s):
+ f = feature.implied_feature(s)
+ value = s
+ else:
+ raise get_manager().errors()("Invalid property '%s' -- unknown feature" % s)
+ else:
+ f = feature.get(feature_name)
+
+ value = get_value(s)
+ if not value:
+ get_manager().errors()("Invalid property '%s' -- no value specified" % s)
+
+
+ if condition:
+ condition = [create_from_string(x) for x in condition.split(',')]
+
+ return Property(f, value, condition)
+
+def create_from_strings(string_list, allow_condition=False):
+
+ return [create_from_string(s, allow_condition) for s in string_list]
+
def reset ():
""" Clear the module state. This is mainly for testing purposes.
"""
@@ -62,51 +145,18 @@
else:
return 0
-def abbreviate_dashed(string):
- # FIXME: string.abbreviate?
- return [string.abbreviate(part) for part in string.split('-')].join('-')
-
def identify(string):
return string
-# FIXME: --abbreviate-paths
-
-def as_path (properties):
- """ Returns a path which represents the given expanded property set.
- """
- key = '-'.join (properties)
-
- if not __results.has_key (key):
- # trim redundancy
- properties = feature.minimize (properties)
-
- # sort according to path_order
- properties.sort (path_order)
-
- components = []
- for p in properties:
- pg = get_grist (p)
- # FIXME: abbrev?
- if pg:
- f = ungrist (pg)
- components.append (f + '-' + replace_grist (p, ''))
-
- else:
- components.append (p)
-
- __results [key] = '/'.join (components)
-
- return __results [key]
-
+# Uses Property
def refine (properties, requirements):
""" Refines 'properties' by overriding any non-free properties
for which a different value is specified in 'requirements'.
Conditional requirements are just added without modification.
Returns the resulting list of properties.
"""
- # The result has no duplicates, so we store it in a map
- # TODO: use a set from Python 2.4?
- result = {}
+ # The result has no duplicates, so we store it in a set
+ result = set()
# Records all requirements.
required = {}
@@ -115,31 +165,23 @@
# Record them so that we can handle 'properties'.
for r in requirements:
# Don't consider conditional requirements.
- if not is_conditional (r):
- # Note: cannot use local here, so take an ugly name
- required [get_grist (r)] = replace_grist (r, '')
+ if not r.condition():
+ required[r.feature()] = r
for p in properties:
# Skip conditional properties
- if is_conditional (p):
- result [p] = None
+ if p.condition():
+ result.add(p)
# No processing for free properties
- elif 'free' in feature.attributes (get_grist (p)):
- result [p] = None
+ elif p.feature().free():
+ result.add(p)
else:
- if required.has_key (get_grist (p)):
- required_value = required [get_grist (p)]
-
- value = replace_grist (p, '')
-
- if value != required_value:
- result [replace_grist (required_value, get_grist (p))] = None
- else:
- result [p] = None
+ if required.has_key(p.feature()):
+ result.add(required[p.feature()])
else:
- result [p] = None
+ result.add(p)
- return result.keys () + requirements
+ return sequence.unique(list(result) + requirements)
def translate_paths (properties, path):
""" Interpret all path properties in 'properties' as relative to 'path'
@@ -149,47 +191,33 @@
result = []
for p in properties:
- split = split_conditional (p)
- condition = ''
-
- if split:
- condition = split [0]
- p = split [1]
-
- if get_grist (p) and 'path' in feature.attributes (get_grist (p)):
- values = __re_two_ampersands.split (forward_slashes (replace_grist (p, "")))
+ if p.feature().path():
+ values = __re_two_ampersands.split(p.value())
+
+ new_value = "&&".join(os.path.join(path, v) for v in values)
- t = [os.path.join(path, v) for v in values]
- t = '&&'.join (t)
- tp = replace_grist (t, get_grist (p)).replace("\\", "/")
- result.append (condition + tp)
+ if new_value != p.value():
+ result.append(Property(p.feature(), new_value, p.condition()))
+ else:
+ result.append(p)
else:
- result.append (condition + p)
+ result.append (p)
return result
-def translate_indirect(specification, context_module):
+def translate_indirect(properties, context_module):
"""Assumes that all feature values that start with '@' are
names of rules, used in 'context-module'. Such rules can be
either local to the module or global. Qualified local rules
with the name of the module."""
result = []
- for p in specification:
- if p[0] == '@':
- m = p[1:]
- if not '.' in p:
- # This is unqualified rule name. The user might want
- # to set flags on this rule name, and toolset.flag
- # auto-qualifies the rule name. Need to do the same
- # here so set flag setting work.
- # We can arrange for toolset.flag to *not* auto-qualify
- # the argument, but then two rules defined in two Jamfiles
- # will conflict.
- m = context_module + "." + m
-
- result.append(get_grist(p) + "@" + m)
+ for p in properties:
+ if p.value()[0] == '@':
+ q = qualify_jam_action(p.value()[1:], context_module)
+ get_manager().engine().register_bjam_action(q)
+ result.append(Property(p.feature(), '@' + q, p.condition()))
else:
result.append(p)
@@ -210,60 +238,27 @@
result = []
for p in properties:
- s = __re_split_condition.match (p)
-
- if not s:
- result.append (p)
+ if not p.condition():
+ result.append(p)
else:
- condition = s.group (1)
-
- # Condition might include several elements
- condition = __re_comma.split (condition)
-
- value = s.group (2)
+ expanded = []
+ for c in p.condition():
- e = []
- for c in condition:
-
- cg = get_grist (c)
- if __re_toolset_feature.match (cg) or __re_os_feature.match (cg):
+ if c.feature().name().startswith("toolset") or c.feature().name() == "os":
# It common that condition includes a toolset which
# was never defined, or mentiones subfeatures which
# were never defined. In that case, validation will
# only produce an spirious error, so don't validate.
- e.append (feature.expand_subfeatures (c, True))
-
+ expanded.extend(feature.expand_subfeatures ([c], True))
else:
- e.append (feature.expand_subfeatures (c))
-
- if e == condition:
- result.append (p)
+ expanded.extend(feature.expand_subfeatures([c]))
- else:
- individual_subfeatures = Set.difference (e, condition)
- result.append (','.join (individual_subfeatures) + ':' + value)
+ result.append(Property(p.feature(), p.value(), expanded))
return result
-def make (specification):
- """ Converts implicit values into full properties.
- """
- result = []
- for e in specification:
- if get_grist (e):
- result.append (e)
-
- elif feature.is_implicit_value (e):
- f = feature.implied_feature (e)
- result.append (f + e)
-
- else:
- raise InvalidProperty ("'%s' is not a valid for property specification" % e)
-
- return result
-
-
+# FIXME: this should go
def split_conditional (property):
""" If 'property' is conditional property, returns
condition and the property, e.g
@@ -278,13 +273,6 @@
return None
-def is_conditional (property):
- """ Returns True if a property is conditional.
- """
- if __re_colon.search (replace_grist (property, '')):
- return True
- else:
- return False
def select (features, properties):
""" Selects properties which correspond to any of the given features.
@@ -298,8 +286,7 @@
def validate_property_sets (sets):
for s in sets:
- validate(feature.split(s))
-
+ validate(s.all())
def evaluate_conditionals_in_context (properties, context):
""" Removes all conditional properties which conditions are not met
@@ -307,59 +294,24 @@
in conditions are looked up in 'context'
"""
base = []
- conditionals = []
+ conditional = []
for p in properties:
- if __re_has_condition.search (p):
- conditionals.append (p)
+ if p.condition():
+ conditional.append (p)
else:
base.append (p)
- result = base
- for p in conditionals:
-
- # Separate condition and property
- s = __re_separate_condition_and_property.match (p)
+ result = base[:]
+ for p in conditional:
- # Split condition into individual properties
- conditions = s.group (1).split (',')
-
- # Evaluate condition
- if set.contains (c, context):
- result.append (s.group (2))
+ # Evaluate condition
+ # FIXME: probably inefficient
+ if all(x in context for x in p.condition()):
+ result.append(Property(p.feature(), p.value()))
return result
-def expand_subfeatures_in_conditions(properties):
-
- result = []
- for p in properties:
-
- s = __re_separate_condition_and_property.match(p)
- if not s:
- result.append(p)
- else:
- condition = s.group(1)
- # Condition might include several elements
- condition = condition.split(",")
- value = s.group(2)
-
- e = []
- for c in condition:
- # It common that condition includes a toolset which
- # was never defined, or mentiones subfeatures which
- # were never defined. In that case, validation will
- # only produce an spirious error, so prevent
- # validation by passing 'true' as second parameter.
- e.extend(feature.expand_subfeatures(c, dont_validate=True))
-
- if e == condition:
- result.append(p)
- else:
- individual_subfeatures = set.difference(e, condition)
- result.append(",".join(individual_subfeatures) + ":" + value)
-
- return result
def change (properties, feature, value = None):
""" Returns a modified version of properties with all values of the
@@ -389,30 +341,8 @@
"""
msg = None
- f = get_grist (property)
- if f:
- value = get_value (property)
-
- if not feature.valid (f):
- f = ungrist (get_grist (property)) # Ungrist for better error messages
- msg = "Unknown feature '%s'" % f
-
- elif value and not 'free' in feature.attributes (f):
- feature.validate_value_string (f, value)
-
- elif not value:
- f = ungrist (get_grist (property)) # Ungrist for better error messages
- msg = "No value specified for feature '%s'" % f
-
- else:
- f = feature.implied_feature (property)
- feature.validate_value_string (f, property)
-
- if msg:
- # FIXME: don't use globals like this. Import here to
- # break circular dependency.
- from b2.manager import get_manager
- get_manager().errors()("Invalid property '%s': %s" % (property, msg))
+ if not property.feature().free():
+ feature.validate_value_string (property.feature(), property.value())
###################################################################
@@ -466,10 +396,37 @@
properties in 'properties' that have any of 'attributes'."""
result = []
for e in properties:
- if set.intersection(attributes, feature.attributes(get_grist(e))):
+ if b2.util.set.intersection(attributes, feature.attributes(get_grist(e))):
result.append(e)
return result
+def translate_dependencies(properties, project_id, location):
+
+ result = []
+ for p in properties:
+
+ if not p.feature().dependency():
+ result.append(p)
+ else:
+ v = p.value()
+ m = re.match("(.*)//(.*)", v)
+ if m:
+ rooted = m.group(1)
+ if rooted[0] == '/':
+ # Either project id or absolute Linux path, do nothing.
+ pass
+ else:
+ rooted = os.path.join(os.getcwd(), location, rooted)
+
+ result.append(Property(p.feature(), rooted + "//" + m.group(2), p.condition()))
+
+ elif os.path.isabs(v):
+ result.append(p)
+ else:
+ result.append(Property(p.feature(), project_id + "//" + v, p.condition()))
+
+ return result
+
class PropertyMap:
""" Class which maintains a property set -> string mapping.
@@ -499,7 +456,7 @@
for i in range(0, len(self.__properties)):
p = self.__properties[i]
- if set.contains (p, properties):
+ if b2.util.set.contains (p, properties):
matches.append (i)
match_ranks.append(len(p))
Modified: branches/release/tools/build/v2/build/property_set.py
==============================================================================
--- branches/release/tools/build/v2/build/property_set.py (original)
+++ branches/release/tools/build/v2/build/property_set.py 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -8,9 +8,14 @@
from b2.util.utility import *
import property, feature, string
+import b2.build.feature
from b2.exceptions import *
from b2.util.sequence import unique
from b2.util.set import difference
+from b2.util import cached
+
+from b2.manager import get_manager
+
def reset ():
""" Clear the module state. This is mainly for testing purposes.
@@ -28,13 +33,20 @@
""" Creates a new 'PropertySet' instance for the given raw properties,
or returns an already existing one.
"""
- raw_properties.sort ()
- raw_properties = unique (raw_properties)
-
- key = '-'.join (raw_properties)
+ # FIXME: propagate to callers.
+ if len(raw_properties) > 0 and isinstance(raw_properties[0], property.Property):
+ x = raw_properties
+ else:
+ x = [property.create_from_string(ps) for ps in raw_properties]
+ x.sort()
+ x = unique (x)
+
+ # FIXME: can we do better, e.g. by directly computing
+ # has value of the list?
+ key = tuple(x)
if not __cache.has_key (key):
- __cache [key] = PropertySet (raw_properties)
+ __cache [key] = PropertySet(x)
return __cache [key]
@@ -43,9 +55,10 @@
that all properties are valid and converting incidental
properties into gristed form.
"""
- property.validate (raw_properties)
+ properties = [property.create_from_string(s) for s in raw_properties]
+ property.validate(properties)
- return create (property.make (raw_properties))
+ return create(properties)
def empty ():
""" Returns PropertySet with empty set of properties.
@@ -56,13 +69,16 @@
"""Creates a property-set from the input given by the user, in the
context of 'jamfile-module' at 'location'"""
- property.validate(raw_properties)
-
- specification = property.translate_paths(raw_properties, location)
- specification = property.translate_indirect(specification, jamfile_module)
- specification = property.expand_subfeatures_in_conditions(specification)
- specification = property.make(specification)
- return create(specification)
+ properties = property.create_from_strings(raw_properties, True)
+ properties = property.translate_paths(properties, location)
+ properties = property.translate_indirect(properties, jamfile_module)
+
+ project_id = get_manager().projects().attributeDefault(jamfile_module, 'id', None)
+ if not project_id:
+ project_id = os.path.abspath(location)
+ properties = property.translate_dependencies(properties, project_id, location)
+ properties = property.expand_subfeatures_in_conditions(properties)
+ return create(properties)
def refine_from_user_input(parent_requirements, specification, jamfile_module,
@@ -91,21 +107,21 @@
else:
add_requirements.append(r)
- if remove_requirements:
- # Need to create property set, so that path features
- # and indirect features are translated just like they
- # are in project requirements.
- ps = create_from_user_input(remove_requirements,
- jamfile_module, location)
-
- parent_requirements = create(difference(parent_requirements.raw(),
- ps.raw()))
- specification = add_requirements
+ if remove_requirements:
+ # Need to create property set, so that path features
+ # and indirect features are translated just like they
+ # are in project requirements.
+ ps = create_from_user_input(remove_requirements,
+ jamfile_module, location)
+
+ parent_requirements = create(difference(parent_requirements.all(),
+ ps.all()))
+ specification = add_requirements
- requirements = create_from_user_input(specification,
- jamfile_module, location)
+ requirements = create_from_user_input(specification,
+ jamfile_module, location)
- return parent_requirements.refine(requirements)
+ return parent_requirements.refine(requirements)
class PropertySet:
""" Class for storing a set of properties.
@@ -126,9 +142,16 @@
- several operations, like and refine and as_path are provided. They all use
caching whenever possible.
"""
- def __init__ (self, raw_properties = []):
+ def __init__ (self, properties = []):
+
+
+ raw_properties = []
+ for p in properties:
+ raw_properties.append(p.to_raw())
- self.raw_ = raw_properties
+ self.all_ = properties
+ self.all_raw_ = raw_properties
+ self.all_set_ = set(properties)
self.incidental_ = []
self.free_ = []
@@ -155,6 +178,9 @@
# Cache for the expanded composite properties
self.composites_ = None
+ # Cache for property set with expanded subfeatures
+ self.subfeatures_ = None
+
# Cache for the property set containing propagated properties.
self.propagated_ps_ = None
@@ -174,39 +200,45 @@
raise BaseException ("Invalid property: '%s'" % p)
att = feature.attributes (get_grist (p))
-
+
+ if 'propagated' in att:
+ self.propagated_.append (p)
+
+ if 'link_incompatible' in att:
+ self.link_incompatible.append (p)
+
+ for p in properties:
+
# A feature can be both incidental and free,
# in which case we add it to incidental.
- if 'incidental' in att:
- self.incidental_.append (p)
- elif 'free' in att:
- self.free_.append (p)
+ if p.feature().incidental():
+ self.incidental_.append(p)
+ elif p.feature().free():
+ self.free_.append(p)
else:
- self.base_.append (p)
-
- if 'dependency' in att:
+ self.base_.append(p)
+
+ if p.condition():
+ self.conditional_.append(p)
+ else:
+ self.non_conditional_.append(p)
+
+ if p.feature().dependency():
self.dependency_.append (p)
else:
self.non_dependency_.append (p)
-
- if property.is_conditional (p):
- self.conditional_.append (p)
- else:
- self.non_conditional_.append (p)
-
- if 'propagated' in att:
- self.propagated_.append (p)
+
- if 'link_incompatible' in att:
- self.link_incompatible.append (p)
+ def all(self):
+ return self.all_
def raw (self):
""" Returns the list of stored properties.
"""
- return self.raw_
+ return self.all_raw_
def __str__(self):
- return string.join(self.raw_)
+ return ' '.join(str(p) for p in self.all_)
def base (self):
""" Returns properties that are neither incidental nor free.
@@ -218,6 +250,9 @@
"""
return self.free_
+ def non_free(self):
+ return self.base_ + self.incidental_
+
def dependency (self):
""" Returns dependency properties.
"""
@@ -246,33 +281,33 @@
def refine (self, requirements):
""" Refines this set's properties using the requirements passed as an argument.
"""
- str_req = str (requirements)
- if not self.refined_.has_key (str_req):
- r = property.refine (self.raw (), requirements.raw ())
+ assert isinstance(requirements, PropertySet)
+ if not self.refined_.has_key (requirements):
+ r = property.refine(self.all_, requirements.all_)
- self.refined_ [str_req] = create (r)
+ self.refined_[requirements] = create(r)
- return self.refined_ [str_req]
+ return self.refined_[requirements]
def expand (self):
if not self.expanded_:
- expanded = feature.expand (self.raw_)
- self.expanded_ = create (expanded)
+ expanded = feature.expand(self.all_)
+ self.expanded_ = create(expanded)
return self.expanded_
- def expand_componsite(self):
- if not self.componsites_:
- self.composites_ = create(feature.expand_composires(self.raw_))
- return self.composites_
+ def expand_subfeatures(self):
+ if not self.subfeatures_:
+ self.subfeatures_ = create(feature.expand_subfeatures(self.all_))
+ return self.subfeatures_
def evaluate_conditionals(self, context=None):
if not context:
context = self
if not self.evaluated_.has_key(context):
+ # FIXME: figure why the call messes up first parameter
self.evaluated_[context] = create(
- property.evaluate_conditionals_in_context(self.raw_,
- context.raw()))
+ property.evaluate_conditionals_in_context(self.all(), context))
return self.evaluated_[context]
@@ -282,14 +317,40 @@
return self.propagated_ps_
def add_defaults (self):
+ # FIXME: this caching is invalidated when new features
+ # are declare inside non-root Jamfiles.
if not self.defaults_:
- expanded = feature.add_defaults(self.raw_)
+ expanded = feature.add_defaults(self.all_)
self.defaults_ = create(expanded)
return self.defaults_
def as_path (self):
if not self.as_path_:
- self.as_path_ = property.as_path(self.base_)
+
+ def path_order (p1, p2):
+
+ i1 = p1.feature().implicit()
+ i2 = p2.feature().implicit()
+
+ if i1 != i2:
+ return i2 - i1
+ else:
+ return cmp(p1.feature().name(), p2.feature().name())
+
+ # trim redundancy
+ properties = feature.minimize(self.base_)
+
+ # sort according to path_order
+ properties.sort (path_order)
+
+ components = []
+ for p in properties:
+ if p.feature().implicit():
+ components.append(p.value())
+ else:
+ components.append(p.feature().name() + "-" + p.value())
+
+ self.as_path_ = '/'.join (components)
return self.as_path_
@@ -306,7 +367,7 @@
# change the location of generated targets
l = self.get ('<location>')
if l:
- computed = l
+ computed = l[0]
is_relative = False
else:
@@ -334,16 +395,16 @@
is_relative = True
self.target_path_ = (computed, is_relative)
-
+
return self.target_path_
def add (self, ps):
""" Creates a new property set containing the properties in this one,
plus the ones of the property set passed as argument.
"""
- if not self.added_.has_key (str (ps)):
- self.added_ [str (ps)] = create (self.raw_ + ps.raw ())
- return self.added_ [str (ps)]
+ if not self.added_.has_key(ps):
+ self.added_[ps] = create(self.all_ + ps.all())
+ return self.added_[ps]
def add_raw (self, properties):
""" Creates a new property set containing the properties in this one,
@@ -355,14 +416,34 @@
def get (self, feature):
""" Returns all values of 'feature'.
"""
+ if type(feature) == type([]):
+ feature = feature[0]
+ if not isinstance(feature, b2.build.feature.Feature):
+ feature = b2.build.feature.get(feature)
+
if not self.feature_map_:
self.feature_map_ = {}
- for v in self.raw_:
- key = get_grist (v)
- if not self.feature_map_.has_key (key):
- self.feature_map_ [key] = []
- self.feature_map_ [get_grist (v)].append (replace_grist (v, ''))
-
- return self.feature_map_.get (feature, [])
+ for v in self.all_:
+ if not self.feature_map_.has_key(v.feature()):
+ self.feature_map_[v.feature()] = []
+ self.feature_map_[v.feature()].append(v.value())
+
+ return self.feature_map_.get(feature, [])
+
+ @cached
+ def get_properties(self, feature):
+ """Returns all contained properties associated with 'feature'"""
+
+ if not isinstance(feature, b2.build.feature.Feature):
+ feature = b2.build.feature.get(feature)
+
+ result = []
+ for p in self.all_:
+ if p.feature() == feature:
+ result.append(p)
+ return result
+
+ def __contains__(self, item):
+ return item in self.all_set_
Modified: branches/release/tools/build/v2/build/scanner.py
==============================================================================
--- branches/release/tools/build/v2/build/scanner.py (original)
+++ branches/release/tools/build/v2/build/scanner.py 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -31,6 +31,7 @@
import property
import bjam
+import os
from b2.exceptions import *
from b2.manager import get_manager
@@ -103,7 +104,7 @@
# Common scanner class, which can be used when there's only one
# kind of includes (unlike C, where "" and <> includes have different
# search paths).
-def CommonScanner(Scanner):
+class CommonScanner(Scanner):
def __init__ (self, includes):
Scanner.__init__(self)
@@ -114,8 +115,8 @@
target_path = os.path.normpath(os.path.dirname(binding[0]))
bjam.call("mark-included", target, matches)
- engine.set_target_variable(matches, "SEARCH",
- [target_path] + self.includes_)
+ get_manager().engine().set_target_variable(matches, "SEARCH",
+ [target_path] + self.includes)
get_manager().scanners().propagate(self, matches)
class ScannerRegistry:
Modified: branches/release/tools/build/v2/build/targets.jam
==============================================================================
--- branches/release/tools/build/v2/build/targets.jam (original)
+++ branches/release/tools/build/v2/build/targets.jam 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -297,14 +297,14 @@
# Add 'target' to the list of targets in this project that should be build
# only by explicit request
#
- rule mark-target-as-explicit ( target-name )
+ rule mark-target-as-explicit ( target-name * )
{
# Record the name of the target, not instance, since this rule is called
# before main target instances are created.
self.explicit-targets += $(target-name) ;
}
- rule mark-target-as-always ( target-name )
+ rule mark-target-as-always ( target-name * )
{
# Record the name of the target, not instance, since this rule is called
# before main target instances are created.
Modified: branches/release/tools/build/v2/build/targets.py
==============================================================================
--- branches/release/tools/build/v2/build/targets.py (original)
+++ branches/release/tools/build/v2/build/targets.py 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -1,7 +1,5 @@
-# Status: being ported by Vladimir Prus
-# Still to do: call toolset.requirements when those are ported.
-# Remember the location of target.
-# Base revision: 40480
+# Status: ported.
+# Base revision: 64488
# Copyright Vladimir Prus 2002-2007.
# Copyright Rene Rivera 2006.
@@ -76,14 +74,19 @@
import os.path
import sys
+from b2.manager import get_manager
+
from b2.util.utility import *
-import property, project, virtual_target, property_set, feature, generators
+import property, project, virtual_target, property_set, feature, generators, toolset
from virtual_target import Subvariant
from b2.exceptions import *
from b2.util.sequence import unique
-from b2.util import set, path
+from b2.util import path, bjam_signature
from b2.build.errors import user_error_checkpoint
+import b2.build.build_request as build_request
+
+import b2.util.set
_re_separate_target_from_properties = re.compile (r'^([^<]*)(/(<.*))?$')
class TargetRegistry:
@@ -98,6 +101,8 @@
self.debug_building_ = "--debug-building" in bjam.variable("ARGV")
+ self.targets_ = []
+
def main_target_alternative (self, target):
""" Registers the specified target as a main target alternatives.
Returns 'target'.
@@ -105,7 +110,7 @@
target.project ().add_alternative (target)
return target
- def main_target_sources (self, sources, main_target_name, no_remaning=0):
+ def main_target_sources (self, sources, main_target_name, no_renaming=0):
"""Return the list of sources to use, if main target rule is invoked
with 'sources'. If there are any objects in 'sources', they are treated
as main target instances, and the name of such targets are adjusted to
@@ -114,17 +119,20 @@
result = []
for t in sources:
+
+ t = b2.util.jam_to_value_maybe(t)
+
if isinstance (t, AbstractTarget):
name = t.name ()
if not no_renaming:
- new_name = main_target_name + '__' + name
- t.rename (new_name)
+ name = main_target_name + '__' + name
+ t.rename (name)
# Inline targets are not built by default.
p = t.project()
- p.mark_target_as_explicit(name)
- result.append (new_name)
+ p.mark_targets_as_explicit([name])
+ result.append(name)
else:
result.append (t)
@@ -142,12 +150,11 @@
main target
'project' is the project where the main taret is to be declared."""
- # FIXME: revive after toolset.requirements are ported.
- #specification.append(toolset.requirements)
+ specification.extend(toolset.requirements())
requirements = property_set.refine_from_user_input(
project.get("requirements"), specification,
- project.project_module, project.get("location"))
+ project.project_module(), project.get("location"))
return requirements
@@ -178,23 +185,19 @@
project: Project where the main target is to be declared
"""
if specification:
- result = specification
-
+ return property_set.create_with_validation(specification)
else:
- result = project.get ('default-build')
-
- return property_set.create_with_validation (result)
+ return project.get ('default-build')
def start_building (self, main_target_instance):
""" Helper rules to detect cycles in main target references.
"""
if self.targets_being_built_.has_key(id(main_target_instance)):
names = []
- for t in self.targets_being_built_.values():
+ for t in self.targets_being_built_.values() + [main_target_instance]:
names.append (t.full_name())
- raise Recursion ("Recursion in main target references"
- "the following target are being built currently: '%s'" % names)
+ get_manager().errors()("Recursion in main target references\n")
self.targets_being_built_[id(main_target_instance)] = main_target_instance
@@ -227,6 +230,16 @@
if self.debug_building_:
print self.indent_ + message
+ def push_target(self, target):
+ self.targets_.append(target)
+
+ def pop_target(self):
+ self.targets_ = self.targets_[:-1]
+
+ def current(self):
+ return self.targets_[0]
+
+
class GenerateResult:
def __init__ (self, ur=None, targets=None):
@@ -235,6 +248,7 @@
self.__usage_requirements = ur
self.__targets = targets
+ assert all(isinstance(t, virtual_target.VirtualTarget) for t in targets)
if not self.__usage_requirements:
self.__usage_requirements = property_set.empty ()
@@ -340,9 +354,6 @@
self.default_build_ = default_build
self.build_dir_ = None
-
- if parent_project:
- self.inherit (parent_project)
# A cache of IDs
self.ids_cache_ = {}
@@ -358,7 +369,10 @@
self.main_target_ = {}
# Targets marked as explicit.
- self.explicit_targets_ = []
+ self.explicit_targets_ = set()
+
+ # Targets marked as always
+ self.always_targets_ = set()
# The constants defined for this project.
self.constants_ = {}
@@ -366,6 +380,10 @@
# Whether targets for all main target are already created.
self.built_main_targets_ = 0
+ if parent_project:
+ self.inherit (parent_project)
+
+
# TODO: This is needed only by the 'make' rule. Need to find the
# way to make 'make' work without this method.
def project_module (self):
@@ -379,8 +397,7 @@
if not self.build_dir_:
self.build_dir_ = self.get ('build-dir')
if not self.build_dir_:
- self.build_dir_ = os.path.join (os.path.dirname(
- self.project_.get ('location')), 'bin')
+ self.build_dir_ = os.path.join(self.project_.get ('location'), 'bin')
return self.build_dir_
@@ -388,7 +405,7 @@
""" Generates all possible targets contained in this project.
"""
self.manager_.targets().log(
- "Building project '%s' with '%s'" % (self.name (), ps.raw ()))
+ "Building project '%s' with '%s'" % (self.name (), str(ps)))
self.manager_.targets().increase_indent ()
result = GenerateResult ()
@@ -417,17 +434,20 @@
# Collect all projects referenced via "projects-to-build" attribute.
self_location = self.get ('location')
for pn in self.get ('projects-to-build'):
- result.append (self.find(pn))
+ result.append (self.find(pn + "/"))
return result
- def mark_target_as_explicit (self, target_name):
+ def mark_targets_as_explicit (self, target_names):
"""Add 'target' to the list of targets in this project
that should be build only by explicit request."""
# Record the name of the target, not instance, since this
# rule is called before main target instaces are created.
- self.explicit_.append(target_name)
+ self.explicit_targets_.update(target_names)
+
+ def mark_targets_as_always(self, target_names):
+ self.always_targets_.update(target_names)
def add_alternative (self, target_instance):
""" Add new target alternative.
@@ -542,6 +562,9 @@
if not self.main_target_.has_key (name):
t = MainTarget (name, self.project_)
self.main_target_ [name] = t
+
+ if name in self.always_targets_:
+ a.always()
self.main_target_ [name].add_alternative (a)
@@ -555,10 +578,19 @@
"""
if path:
- value = os.path.join(self.location_, value)
+ l = self.location_
+ if not l:
+ # Project corresponding to config files do not have
+ # 'location' attribute, but do have source location.
+ # It might be more reasonable to make every project have
+ # a location and use some other approach to prevent buildable
+ # targets in config files, but that's for later.
+ l = get('source-location')
+
+ value = os.path.join(l, value)
# Now make the value absolute path
value = os.path.join(os.getcwd(), value)
-
+
self.constants_[name] = value
bjam.call("set-variable", self.project_module(), name, value)
@@ -566,7 +598,7 @@
for c in parent_project.constants_:
# No need to pass the type. Path constants were converted to
# absolute paths already by parent.
- self.add-constant(parent_project.constants_[c])
+ self.add_constant(c, parent_project.constants_[c])
# Import rules from parent
this_module = self.project_module()
@@ -576,7 +608,7 @@
if not rules:
rules = []
user_rules = [x for x in rules
- if x not in self.manager().projects().project_rules()]
+ if x not in self.manager().projects().project_rules().all_names()]
if user_rules:
bjam.call("import-rules-from-parent", parent_module, this_module, user_rules)
@@ -594,10 +626,10 @@
d = target.default_build ()
if self.alternatives_ and self.default_build_ != d:
- raise BaseException ("Default build must be identical in all alternatives\n"
+ get_manager().errors()("default build must be identical in all alternatives\n"
"main target is '%s'\n"
"with '%s'\n"
- "differing from previous default build: '%s'" % (full_name (), d.raw (), self.default_build_.raw ()))
+ "differing from previous default build: '%s'" % (self.full_name (), d.raw (), self.default_build_.raw ()))
else:
self.default_build_ = d
@@ -628,23 +660,26 @@
if len (self.alternatives_) == 1:
return self.alternatives_ [0]
+ if debug:
+ print "Property set for selection:", property_set
+
for v in self.alternatives_:
properties = v.match (property_set, debug)
- if properties:
+ if properties is not None:
if not best:
best = v
best_properties = properties
else:
- if set.equal (properties, best_properties):
+ if b2.util.set.equal (properties, best_properties):
return None
- elif set.contains (properties, best_properties):
+ elif b2.util.set.contains (properties, best_properties):
# Do nothing, this alternative is worse
pass
- elif set.contains (best_properties, properties):
+ elif b2.util.set.contains (best_properties, properties):
best = v
best_properties = properties
@@ -654,53 +689,7 @@
return best
def apply_default_build (self, property_set):
- # 1. First, see what properties from default_build
- # are already present in property_set.
-
- raw = property_set.raw ()
- specified_features = get_grist (raw)
-
- defaults_to_apply = []
- for d in self.default_build_.raw ():
- if not get_grist (d) in specified_features:
- defaults_to_apply.append (d)
-
- # 2. If there's any defaults to be applied, form the new
- # build request. Pass it throw 'expand-no-defaults', since
- # default_build might contain "release debug", which will
- # result in two property_sets.
- result = []
- if defaults_to_apply:
-
- # We have to compress subproperties here to prevent
- # property lists like:
- #
- # <toolset>msvc <toolset-msvc:version>7.1 <threading>multi
- #
- # from being expanded into:
- #
- # <toolset-msvc:version>7.1/<threading>multi
- # <toolset>msvc/<toolset-msvc:version>7.1/<threading>multi
- #
- # due to cross-product property combination. That may
- # be an indication that
- # build_request.expand-no-defaults is the wrong rule
- # to use here.
- compressed = feature.compress-subproperties (raw)
-
- properties = build_request.expand_no_defaults (compressed, defaults_to_apply)
-
- if properties:
- for p in properties:
- result.append (property_set.create (feature.expand (feature.split (p))))
-
- else:
- result .append (property_set.empty ())
-
- else:
- result.append (property_set)
-
- return result
+ return apply_default_build(property_set, self.default_build_)
def generate (self, ps):
""" Select an alternative for this main target, by finding all alternatives
@@ -735,12 +724,11 @@
best_alternative = self.__select_alternatives (prop_set, debug=0)
if not best_alternative:
- self.__select_alternatives(prop_set, debug=1)
- raise NoBestMatchingAlternative (
- "Failed to build '%s'\n"
- "with properties '%s'\n"
- "because no best-matching alternative could be found."
- % (full_name, prop_set.raw ()))
+ # FIXME: revive.
+ # self.__select_alternatives(prop_set, debug=1)
+ self.manager_.errors()(
+ "No best alternative for '%s'.\n"
+ % (self.full_name(),))
result = best_alternative.generate (prop_set)
@@ -777,7 +765,7 @@
def location (self):
# Returns the location of target. Needed by 'testing.jam'
if not self.file_location_:
- source_location = self.project_.get ('source-location')
+ source_location = self.project_.get('source-location')
for src_dir in source_location:
location = os.path.join(src_dir, self.name())
@@ -788,6 +776,50 @@
return self.file_location_
+def resolve_reference(target_reference, project):
+ """ Given a target_reference, made in context of 'project',
+ returns the AbstractTarget instance that is referred to, as well
+ as properties explicitly specified for this reference.
+ """
+ # Separate target name from properties override
+ split = _re_separate_target_from_properties.match (target_reference)
+ if not split:
+ raise BaseException ("Invalid reference: '%s'" % target_reference)
+
+ id = split.group (1)
+
+ sproperties = []
+
+ if split.group (3):
+ sproperties = property.create_from_strings(feature.split(split.group(3)))
+ sproperties = feature.expand_composites(sproperties)
+
+ # Find the target
+ target = project.find (id)
+
+ return (target, property_set.create(sproperties))
+
+def generate_from_reference(target_reference, project, property_set):
+ """ Attempts to generate the target given by target reference, which
+ can refer both to a main target or to a file.
+ Returns a list consisting of
+ - usage requirements
+ - generated virtual targets, if any
+ target_reference: Target reference
+ project: Project where the reference is made
+ property_set: Properties of the main target that makes the reference
+ """
+ target, sproperties = resolve_reference(target_reference, project)
+
+ # Take properties which should be propagated and refine them
+ # with source-specific requirements.
+ propagated = property_set.propagated()
+ rproperties = propagated.refine(sproperties)
+
+ return target.generate(rproperties)
+
+
+
class BasicTarget (AbstractTarget):
""" Implements the most standard way of constructing main target
alternative from sources. Allows sources to be either file or
@@ -799,7 +831,7 @@
for s in sources:
if get_grist (s):
- raise InvalidSource ("property '%s' found in the 'sources' parameter for '%s'" (s, name))
+ raise InvalidSource ("property '%s' found in the 'sources' parameter for '%s'" % (s, name))
self.sources_ = sources
@@ -821,18 +853,28 @@
# A cache for build requests
self.request_cache = {}
- self.user_context_ = self.manager_.errors().capture_user_context()
+ # Result of 'capture_user_context' has everything. For example, if this
+ # target is declare as result of loading Jamfile which was loaded when
+ # building target B which was requested from A, then we'll have A, B and
+ # Jamroot location in context. We only care about Jamroot location, most
+ # of the times.
+ self.user_context_ = self.manager_.errors().capture_user_context()[-1:]
+
+ self.always_ = False
+
+ def always(self):
+ self.always_ = True
def sources (self):
""" Returns the list of AbstractTargets which are used as sources.
The extra properties specified for sources are not represented.
- The only used of this rule at the moment is the '--dump-test'
+ The only used of this rule at the moment is the '--dump-tests'
feature of the test system.
"""
if self.source_targets_ == None:
self.source_targets_ = []
for s in self.sources_:
- self.source_targets_.append (self.resolve_reference (s, self.project_))
+ self.source_targets_.append(resolve_reference(s, self.project_)[0])
return self.source_targets_
@@ -842,45 +884,29 @@
def default_build (self):
return self.default_build_
- def resolve_reference (self, target_reference, project):
- """ Given a target_reference, made in context of 'project',
- returns the AbstractTarget instance that is referred to, as well
- as properties explicitly specified for this reference.
- """
- # Separate target name from properties override
- split = _re_separate_target_from_properties.match (target_reference)
- if not split:
- raise BaseException ("Invalid reference: '%s'" % target_reference)
-
- id = split.group (1)
-
- sproperties = []
-
- if split.group (3):
- sproperties = property.make (feature.split (split.group (3)))
- sproperties = self.manager.features ().expand_composites (sproperties)
-
- # Find the target
- target = project.find (id)
-
- return (target, property_set.create (sproperties))
-
def common_properties (self, build_request, requirements):
""" Given build request and requirements, return properties
common to dependency build request and target build
properties.
"""
- # For optimization, we add free requirements directly,
+ # For optimization, we add free unconditional requirements directly,
# without using complex algorithsm.
- # This gives the complex algorithm better chance of caching results.
- free = requirements.free ()
- non_free = property_set.create (requirements.base () + requirements.incidental ())
-
- key = str (build_request) + '-' + str (non_free)
- if not self.request_cache.has_key (key):
- self.request_cache [key] = self.__common_properties2 (build_request, non_free)
+ # This gives the complex algorithm better chance of caching results.
+ # The exact effect of this "optimization" is no longer clear
+ free_unconditional = []
+ other = []
+ for p in requirements.all():
+ if p.feature().free() and not p.condition() and p.feature().name() != 'conditional':
+ free_unconditional.append(p)
+ else:
+ other.append(p)
+ other = property_set.create(other)
+
+ key = (build_request, other)
+ if not self.request_cache.has_key(key):
+ self.request_cache[key] = self.__common_properties2 (build_request, other)
- return self.request_cache [key].add_raw (free)
+ return self.request_cache[key].add_raw(free_unconditional)
# Given 'context' -- a set of already present properties, and 'requirements',
# decide which extra properties should be applied to 'context'.
@@ -907,26 +933,24 @@
# <threading>single
#
# might come from project's requirements.
-
unconditional = feature.expand(requirements.non_conditional())
-
- raw = context.raw()
- raw = property.refine(raw, unconditional)
-
+
+ context = context.refine(property_set.create(unconditional))
+
# We've collected properties that surely must be present in common
# properties. We now try to figure out what other properties
# should be added in order to satisfy rules (4)-(6) from the docs.
- conditionals = requirements.conditional()
+ conditionals = property_set.create(requirements.conditional())
# It's supposed that #conditionals iterations
# should be enough for properties to propagate along conditions in any
# direction.
- max_iterations = len(conditionals) +\
+ max_iterations = len(conditionals.all()) +\
len(requirements.get("<conditional>")) + 1
added_requirements = []
- current = raw
+ current = context
# It's assumed that ordinary conditional requirements can't add
# <indirect-conditional> properties, and that rules referred
@@ -934,16 +958,24 @@
# <indirect-conditional> properties. So the list of indirect conditionals
# does not change.
indirect = requirements.get("<conditional>")
- indirect = [s[1:] for s in indirect]
ok = 0
for i in range(0, max_iterations):
- e = property.evaluate_conditionals_in_context(conditionals, current)
+ e = conditionals.evaluate_conditionals(current).all()[:]
# Evaluate indirect conditionals.
for i in indirect:
- e.extend(bjam.call(i, current))
+ i = b2.util.jam_to_value_maybe(i)
+ if callable(i):
+ # This is Python callable, yeah.
+ e.extend(i(current))
+ else:
+ # Name of bjam function. Because bjam is unable to handle
+ # list of Property, pass list of strings.
+ br = b2.util.call_jam_function(i[1:], [str(p) for p in current.all()])
+ if br:
+ e.extend(property.create_from_strings(br))
if e == added_requirements:
# If we got the same result, we've found final properties.
@@ -955,7 +987,7 @@
# Recompute 'current' using initial properties and conditional
# requirements.
added_requirements = e
- current = property.refine(raw, feature.expand(e))
+ current = context.refine(property_set.create(feature.expand(e)))
if not ok:
self.manager().errors()("Can't evaluate conditional properties "
@@ -965,7 +997,7 @@
if what == "added":
return property_set.create(unconditional + added_requirements)
elif what == "refined":
- return property_set.create(current)
+ return current
else:
self.manager().errors("Invalid value of the 'what' parameter")
@@ -975,14 +1007,14 @@
# TODO: There is possibility that we've added <foo>bar, which is composite
# and expands to <foo2>bar2, but default value of <foo2> is not bar2,
# in which case it's not clear what to do.
- #
+ #
build_request = build_request.add_defaults()
# Featured added by 'add-default' can be composite and expand
# to features without default values -- so they are not added yet.
# It could be clearer/faster to expand only newly added properties
# but that's not critical.
build_request = build_request.expand()
-
+
return self.evaluate_requirements(requirements, build_request,
"refined")
@@ -1000,12 +1032,12 @@
# build request just to select this variant.
bcondition = self.requirements_.base ()
ccondition = self.requirements_.conditional ()
- condition = set.difference (bcondition, ccondition)
+ condition = b2.util.set.difference (bcondition, ccondition)
if debug:
- print " next alternative: required properties:", str(condition)
+ print " next alternative: required properties:", [str(p) for p in condition]
- if set.contains (condition, property_set.raw ()):
+ if b2.util.set.contains (condition, property_set.all()):
if debug:
print " matched"
@@ -1014,31 +1046,41 @@
else:
return None
+
+
+ def generate_dependency_targets (self, target_ids, property_set):
+ targets = []
+ usage_requirements = []
+ for id in target_ids:
+
+ result = generate_from_reference(id, self.project_, property_set)
+ targets += result.targets()
+ usage_requirements += result.usage_requirements().all()
+
+ return (targets, usage_requirements)
- def generate_dependencies (self, dependencies, property_set):
+ def generate_dependency_properties(self, properties, ps):
""" Takes a target reference, which might be either target id
or a dependency property, and generates that target using
'property_set' as build request.
Returns a tuple (result, usage_requirements).
"""
- result_var = []
+ result_properties = []
usage_requirements = []
- for dependency in dependencies:
- grist = get_grist (dependency)
- id = replace_grist (dependency, '')
-
- result = self.generate_from_reference (id, self.project_, property_set)
-
- # FIXME:
- # TODO: this is a problem: the grist must be kept and the value
- # is the object itself. This won't work in python.
- targets = [ self.manager_.register_object (x) for x in result.targets () ]
+ for p in properties:
+
+ result = generate_from_reference(p.value(), self.project_, ps)
+
+ for t in result.targets():
+ result_properties.append(property.Property(p.feature(), t))
- result_var += replace_grist (targets, grist)
- usage_requirements += result.usage_requirements ().raw ()
+ usage_requirements += result.usage_requirements().all()
+
+ return (result_properties, usage_requirements)
+
+
- return (result_var, usage_requirements)
@user_error_checkpoint
def generate (self, ps):
@@ -1060,8 +1102,10 @@
"Command line free features: '%s'" % str (cf.raw ()))
self.manager().targets().log(
"Target requirements: %s'" % str (self.requirements().raw ()))
+
+ self.manager().targets().push_target(self)
- if not self.generated_.has_key (str (ps)):
+ if not self.generated_.has_key(ps):
# Apply free features form the command line. If user
# said
@@ -1071,45 +1115,55 @@
rproperties = self.common_properties (ps, self.requirements_)
self.manager().targets().log(
- "Common properties are '%s'" % str (rproperties.raw ()))
-
- if rproperties.get("<build>") != "no":
+ "Common properties are '%s'" % str (rproperties))
+
+ if rproperties.get("<build>") != ["no"]:
result = GenerateResult ()
properties = rproperties.non_dependency ()
-
- (p, u) = self.generate_dependencies (rproperties.dependency (), rproperties)
+
+ (p, u) = self.generate_dependency_properties (rproperties.dependency (), rproperties)
properties += p
+ assert all(isinstance(p, property.Property) for p in properties)
usage_requirements = u
- (source_targets, u) = self.generate_dependencies (self.sources_, rproperties)
+ (source_targets, u) = self.generate_dependency_targets (self.sources_, rproperties)
usage_requirements += u
self.manager_.targets().log(
"Usage requirements for '%s' are '%s'" % (self.name_, usage_requirements))
- rproperties = property_set.create (properties + usage_requirements)
+ # FIXME:
+
+ rproperties = property_set.create(properties + usage_requirements)
usage_requirements = property_set.create (usage_requirements)
self.manager_.targets().log(
- "Build properties: '%s'" % str(rproperties.raw()))
+ "Build properties: '%s'" % str(rproperties))
- extra = rproperties.get ('<source>')
- source_targets += replace_grist (extra, '')
- source_targets = replace_references_by_objects (self.manager (), source_targets)
+ source_targets += rproperties.get('<source>')
# We might get duplicate sources, for example if
# we link to two library which have the same <library> in
# usage requirements.
- source_targets = unique (source_targets)
+ # Use stable sort, since for some targets the order is
+ # important. E.g. RUN_PY target need python source to come
+ # first.
+ source_targets = unique(source_targets, stable=True)
+
+ # FIXME: figure why this call messes up source_targets in-place
+ result = self.construct (self.name_, source_targets[:], rproperties)
- result = self.construct (self.name_, source_targets, rproperties)
if result:
assert len(result) == 2
gur = result [0]
result = result [1]
+ if self.always_:
+ for t in result:
+ t.always()
+
s = self.create_subvariant (
result,
self.manager().virtual_targets().recent_targets(), ps,
@@ -1122,45 +1176,34 @@
self.manager_.targets().log (
"Usage requirements from '%s' are '%s'" %
- (self.name, str(rproperties.raw())))
+ (self.name(), str(rproperties)))
- self.generated_ [str (ps)] = GenerateResult (ur, result)
+ self.generated_[ps] = GenerateResult (ur, result)
else:
- self.generated_ [str (ps)] = GenerateResult (property_set.empty(), [])
+ self.generated_[ps] = GenerateResult (property_set.empty(), [])
else:
- self.manager().targets().log(
- "Skipping build: <build>no in common properties")
-
- # We're here either because there's error computing
- # properties, or there's <build>no in properties.
- # In the latter case we don't want any diagnostic.
- # In the former case, we need diagnostics. TODOo
- self.generated_ [str (ps)] = GenerateResult (rproperties, [])
+ # If we just see <build>no, we cannot produce any reasonable
+ # diagnostics. The code that adds this property is expected
+ # to explain why a target is not built, for example using
+ # the configure.log-component-configuration function.
+
+ # If this target fails to build, add <build>no to properties
+ # to cause any parent target to fail to build. Except that it
+ # - does not work now, since we check for <build>no only in
+ # common properties, but not in properties that came from
+ # dependencies
+ # - it's not clear if that's a good idea anyway. The alias
+ # target, for example, should not fail to build if a dependency
+ # fails.
+ self.generated_[ps] = GenerateResult(
+ property_set.create(["<build>no"]), [])
else:
self.manager().targets().log ("Already built")
+ self.manager().targets().pop_target()
self.manager().targets().decrease_indent()
- return self.generated_ [str (ps)]
-
- def generate_from_reference (self, target_reference, project, property_set):
- """ Attempts to generate the target given by target reference, which
- can refer both to a main target or to a file.
- Returns a list consisting of
- - usage requirements
- - generated virtual targets, if any
- target_reference: Target reference
- project: Project where the reference is made
- property_set: Properties of the main target that makes the reference
- """
- target, sproperties = self.resolve_reference (target_reference, project)
-
- # Take properties which should be propagated and refine them
- # with source-specific requirements.
- propagated = property_set.propagated ()
- rproperties = propagated.refine (sproperties)
-
- return target.generate (rproperties)
+ return self.generated_[ps]
def compute_usage_requirements (self, subvariant):
""" Given the set of generated targets, and refined build
@@ -1173,7 +1216,7 @@
# We generate all dependency properties and add them,
# as well as their usage requirements, to result.
- (r1, r2) = self.generate_dependencies (xusage_requirements.dependency (), rproperties)
+ (r1, r2) = self.generate_dependency_properties(xusage_requirements.dependency (), rproperties)
extra = r1 + r2
result = property_set.create (xusage_requirements.non_dependency () + extra)
@@ -1238,21 +1281,25 @@
def __init__ (self, name, project, type, sources, requirements, default_build, usage_requirements):
BasicTarget.__init__ (self, name, project, sources, requirements, default_build, usage_requirements)
self.type_ = type
+
+ def __jam_repr__(self):
+ return b2.util.value_to_jam(self)
def type (self):
return self.type_
def construct (self, name, source_targets, prop_set):
+
r = generators.construct (self.project_, name, self.type_,
- property_set.create (prop_set.raw () + ['<main-target-type>' + self.type_]),
- source_targets)
+ prop_set.add_raw(['<main-target-type>' + self.type_]),
+ source_targets, True)
if not r:
print "warning: Unable to construct '%s'" % self.full_name ()
# Are there any top-level generators for this type/property set.
if not generators.find_viable_generators (self.type_, prop_set):
- print "error: no generators were found for type '$(self.type)'"
+ print "error: no generators were found for type '" + self.type_ + "'"
print "error: and the requested properties"
print "error: make sure you've configured the needed tools"
print "See http://boost.org/boost-build2/doc/html/bbv2/advanced/configuration.html"
@@ -1262,3 +1309,93 @@
return r
+def apply_default_build(property_set, default_build):
+ # 1. First, see what properties from default_build
+ # are already present in property_set.
+
+ specified_features = set(p.feature() for p in property_set.all())
+
+ defaults_to_apply = []
+ for d in default_build.all():
+ if not d.feature() in specified_features:
+ defaults_to_apply.append(d)
+
+ # 2. If there's any defaults to be applied, form the new
+ # build request. Pass it throw 'expand-no-defaults', since
+ # default_build might contain "release debug", which will
+ # result in two property_sets.
+ result = []
+ if defaults_to_apply:
+
+ # We have to compress subproperties here to prevent
+ # property lists like:
+ #
+ # <toolset>msvc <toolset-msvc:version>7.1 <threading>multi
+ #
+ # from being expanded into:
+ #
+ # <toolset-msvc:version>7.1/<threading>multi
+ # <toolset>msvc/<toolset-msvc:version>7.1/<threading>multi
+ #
+ # due to cross-product property combination. That may
+ # be an indication that
+ # build_request.expand-no-defaults is the wrong rule
+ # to use here.
+ compressed = feature.compress_subproperties(property_set.all())
+
+ result = build_request.expand_no_defaults(
+ b2.build.property_set.create([p]) for p in (compressed + defaults_to_apply))
+
+ else:
+ result.append (property_set)
+
+ return result
+
+
+def create_typed_metatarget(name, type, sources, requirements, default_build, usage_requirements):
+
+ from b2.manager import get_manager
+ t = get_manager().targets()
+
+ project = get_manager().projects().current()
+
+ return t.main_target_alternative(
+ TypedTarget(name, project, type,
+ t.main_target_sources(sources, name),
+ t.main_target_requirements(requirements, project),
+ t.main_target_default_build(default_build, project),
+ t.main_target_usage_requirements(usage_requirements, project)))
+
+
+def create_metatarget(klass, name, sources, requirements=[], default_build=[], usage_requirements=[]):
+ from b2.manager import get_manager
+ t = get_manager().targets()
+
+ project = get_manager().projects().current()
+
+ return t.main_target_alternative(
+ klass(name, project,
+ t.main_target_sources(sources, name),
+ t.main_target_requirements(requirements, project),
+ t.main_target_default_build(default_build, project),
+ t.main_target_usage_requirements(usage_requirements, project)))
+
+def metatarget_function_for_class(class_):
+
+ @bjam_signature((["name"], ["sources", "*"], ["requirements", "*"],
+ ["default_build", "*"], ["usage_requirements", "*"]))
+ def create_metatarget(name, sources, requirements = [], default_build = None, usage_requirements = []):
+
+ from b2.manager import get_manager
+ t = get_manager().targets()
+
+ project = get_manager().projects().current()
+
+ return t.main_target_alternative(
+ class_(name, project,
+ t.main_target_sources(sources, name),
+ t.main_target_requirements(requirements, project),
+ t.main_target_default_build(default_build, project),
+ t.main_target_usage_requirements(usage_requirements, project)))
+
+ return create_metatarget
Modified: branches/release/tools/build/v2/build/toolset.py
==============================================================================
--- branches/release/tools/build/v2/build/toolset.py (original)
+++ branches/release/tools/build/v2/build/toolset.py 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -10,9 +10,12 @@
""" Support for toolset definition.
"""
-import feature, property, generators
+import feature, property, generators, property_set
+import b2.util.set
+from b2.util import cached, qualify_jam_action
from b2.util.utility import *
-from b2.util import set
+from b2.util import bjam_signature
+from b2.manager import get_manager
__re_split_last_segment = re.compile (r'^(.+)\.([^\.])*')
__re_two_ampersands = re.compile ('(&&)')
@@ -67,28 +70,13 @@
# FIXME: --ignore-toolset-requirements
# FIXME: using
-def normalize_condition (property_sets):
- """ Expands subfeatures in each property set.
- e.g
- <toolset>gcc-3.2
- will be converted to
- <toolset>gcc/<toolset-version>3.2
-
- TODO: does this one belong here or in feature?
- """
- result = []
- for p in property_sets:
- split = feature.split (p)
- expanded = feature.expand_subfeatures (split)
- result.append ('/'.join (expanded))
-
- return result
-
# FIXME push-checking-for-flags-module ....
# FIXME: investigate existing uses of 'hack-hack' parameter
# in jam code.
-
-def flags (rule_or_module, variable_name, condition, values = []):
+
+@bjam_signature((["rule_or_module", "variable_name", "condition", "*"],
+ ["values", "*"]))
+def flags(rule_or_module, variable_name, condition, values = []):
""" Specifies the flags (variables) that must be set on targets under certain
conditions, described by arguments.
rule_or_module: If contains dot, should be a rule name.
@@ -127,6 +115,19 @@
is specified, then the value of 'feature'
will be added.
"""
+ caller = bjam.caller()[:-1]
+ if not '.' in rule_or_module and caller.startswith("Jamfile"):
+ # Unqualified rule name, used inside Jamfile. Most likely used with
+ # 'make' or 'notfile' rules. This prevents setting flags on the entire
+ # Jamfile module (this will be considered as rule), but who cares?
+ # Probably, 'flags' rule should be split into 'flags' and
+ # 'flags-on-module'.
+ rule_or_module = qualify_jam_action(rule_or_module, caller)
+ else:
+ # FIXME: revive checking that we don't set flags for a different
+ # module unintentionally
+ pass
+
if condition and not replace_grist (condition, ''):
# We have condition in the form '<feature>', that is, without
# value. That's a previous syntax:
@@ -138,63 +139,60 @@
condition = None
if condition:
- property.validate_property_sets (condition)
- condition = normalize_condition ([condition])
+ transformed = []
+ for c in condition:
+ # FIXME: 'split' might be a too raw tool here.
+ pl = [property.create_from_string(s) for s in c.split('/')]
+ pl = feature.expand_subfeatures(pl);
+ transformed.append(property_set.create(pl))
+ condition = transformed
+
+ property.validate_property_sets(condition)
__add_flag (rule_or_module, variable_name, condition, values)
-def set_target_variables (manager, rule_or_module, targets, properties):
+def set_target_variables (manager, rule_or_module, targets, ps):
"""
"""
- key = rule_or_module + '.' + str (properties)
- settings = __stv.get (key, None)
- if not settings:
- settings = __set_target_variables_aux (manager, rule_or_module, properties)
-
- __stv [key] = settings
+ settings = __set_target_variables_aux(manager, rule_or_module, ps)
if settings:
for s in settings:
for target in targets:
manager.engine ().set_target_variable (target, s [0], s[1], True)
-def find_property_subset (property_sets, properties):
+def find_satisfied_condition(conditions, ps):
"""Returns the first element of 'property-sets' which is a subset of
'properties', or an empty list if no such element exists."""
-
- prop_keys = get_grist(properties)
- for s in property_sets:
- # Handle value-less properties like '<architecture>' (compare with
- # '<architecture>x86').
-
- set = feature.split(s)
-
- # Find the set of features that
- # - have no property specified in required property set
- # - are omitted in build property set
- default_props = []
- for i in set:
- # If $(i) is a value-less property it should match default
- # value of an optional property. See the first line in the
- # example below:
- #
- # property set properties result
- # <a> <b>foo <b>foo match
- # <a> <b>foo <a>foo <b>foo no match
- # <a>foo <b>foo <b>foo no match
- # <a>foo <b>foo <a>foo <b>foo match
- if not (get_value(i) or get_grist(i) in prop_keys):
- default_props.append(i)
-
- # FIXME: can this be expressed in a more pythonic way?
- has_all = 1
- for i in set:
- if i not in (properties + default_props):
- has_all = 0
- break
- if has_all:
- return s
+ features = set(p.feature() for p in ps.all())
+
+ for condition in conditions:
+
+ found_all = True
+ for i in condition.all():
+
+ found = False
+ if i.value():
+ found = i.value() in ps.get(i.feature())
+ else:
+ # Handle value-less properties like '<architecture>' (compare with
+ # '<architecture>x86').
+ # If $(i) is a value-less property it should match default
+ # value of an optional property. See the first line in the
+ # example below:
+ #
+ # property set properties result
+ # <a> <b>foo <b>foo match
+ # <a> <b>foo <a>foo <b>foo no match
+ # <a>foo <b>foo <b>foo no match
+ # <a>foo <b>foo <a>foo <b>foo match
+ found = not i.feature() in features
+
+ found_all = found_all and found
+
+ if found_all:
+ return condition
return None
@@ -239,7 +237,7 @@
call it as needed."""
for f in __module_flags.get(base, []):
- if not f.condition or set.difference(f.condition, prohibited_properties):
+ if not f.condition or b2.util.set.difference(f.condition, prohibited_properties):
match = __re_first_group.match(f.rule)
rule_ = None
if match:
@@ -290,7 +288,8 @@
######################################################################################
# Private functions
-def __set_target_variables_aux (manager, rule_or_module, properties):
+@cached
+def __set_target_variables_aux (manager, rule_or_module, ps):
""" Given a rule name and a property set, returns a list of tuples of
variables names and values, which must be set on targets for that
rule/properties combination.
@@ -299,12 +298,12 @@
for f in __flags.get(rule_or_module, []):
- if not f.condition or find_property_subset (f.condition, properties):
+ if not f.condition or find_satisfied_condition (f.condition, ps):
processed = []
for v in f.values:
# The value might be <feature-name> so needs special
# treatment.
- processed += __handle_flag_value (manager, v, properties)
+ processed += __handle_flag_value (manager, v, ps)
for r in processed:
result.append ((f.variable_name, r))
@@ -314,40 +313,35 @@
if next:
result.extend(__set_target_variables_aux(
- manager, next.group(1), properties))
+ manager, next.group(1), ps))
return result
-def __handle_flag_value (manager, value, properties):
+def __handle_flag_value (manager, value, ps):
result = []
if get_grist (value):
- matches = property.select (value, properties)
- for p in matches:
- att = feature.attributes (get_grist (p))
-
- ungristed = replace_grist (p, '')
+ f = feature.get(value)
+ values = ps.get(f)
+
+ for value in values:
- if 'dependency' in att:
+ if f.dependency():
# the value of a dependency feature is a target
# and must be actualized
- # FIXME: verify that 'find' actually works, ick!
- result.append (manager.targets ().find (ungristed).actualize ())
+ result.append(value.actualize())
- elif 'path' in att or 'free' in att:
- values = []
+ elif f.path() or f.free():
# Treat features with && in the value
# specially -- each &&-separated element is considered
# separate value. This is needed to handle searched
# libraries, which must be in specific order.
- if not __re_two_ampersands.search (ungristed):
- values.append (ungristed)
+ if not __re_two_ampersands.search(value):
+ result.append(value)
else:
- values.extend(value.split ('&&'))
-
- result.extend(values)
+ result.extend(value.split ('&&'))
else:
result.append (ungristed)
else:
@@ -369,6 +363,8 @@
__module_flags.setdefault(m, []).append(f)
__flags.setdefault(rule_or_module, []).append(f)
+__requirements = []
+
def requirements():
"""Return the list of global 'toolset requirements'.
Those requirements will be automatically added to the requirements of any main target."""
@@ -380,9 +376,9 @@
they were specified literally. For best results, all requirements added should
be conditional or indirect conditional."""
- # FIXME:
#if ! $(.ignore-requirements)
#{
+ print "XXXX", requirements
__requirements.extend(requirements)
#}
Modified: branches/release/tools/build/v2/build/type.py
==============================================================================
--- branches/release/tools/build/v2/build/type.py (original)
+++ branches/release/tools/build/v2/build/type.py 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -14,6 +14,8 @@
from b2.util.utility import replace_grist, os_name
from b2.exceptions import *
from b2.build import feature, property, scanner
+from b2.util import bjam_signature
+
__re_hyphen = re.compile ('-')
@@ -53,7 +55,7 @@
reset ()
-
+@bjam_signature((["type"], ["suffixes", "*"], ["base_type", "?"]))
def register (type, suffixes = [], base_type = None):
""" Registers a target type, possibly derived from a 'base-type'.
If 'suffixes' are provided, they list all the suffixes that mean a file is of 'type'.
@@ -83,7 +85,7 @@
if len (suffixes) > 0:
# Generated targets of 'type' will use the first of 'suffixes'
- # (this may be overriden)
+ # (this may be overriden)
set_generated_target_suffix (type, [], suffixes [0])
# Specify mapping from suffixes to type
@@ -97,10 +99,21 @@
feature.compose ('<target-type>' + type, replace_grist (base_type, '<base-target-type>'))
feature.compose ('<base-target-type>' + type, '<base-target-type>' + base_type)
+ import b2.build.generators as generators
+ # Adding a new derived type affects generator selection so we need to
+ # make the generator selection module update any of its cached
+ # information related to a new derived type being defined.
+ generators.update_cached_information_with_a_new_type(type)
+
# FIXME: resolving recursive dependency.
from b2.manager import get_manager
get_manager().projects().project_rules().add_rule_for_type(type)
+# FIXME: quick hack.
+def type_from_rule_name(rule_name):
+ return rule_name.upper().replace("-", "_")
+
+
def register_suffixes (suffixes, type):
""" Specifies that targets with suffix from 'suffixes' have the type 'type'.
If a different type is already specified for any of syffixes, issues an error.
@@ -141,6 +154,12 @@
return None
+def base(type):
+ """Returns a base type for the given type or nothing in case the given type is
+ not derived."""
+
+ return __types[type]['base']
+
def all_bases (type):
""" Returns type and all of its bases, in the order of their distance from type.
"""
@@ -175,6 +194,7 @@
# TODO: remove this method
return is_derived (type, base)
+@bjam_signature((["type"], ["properties", "*"], ["suffix"]))
def set_generated_target_suffix (type, properties, suffix):
""" Sets a target suffix that should be used when generating target
of 'type' with the specified properties. Can be called with
@@ -209,6 +229,7 @@
# should be used.
#
# Usage example: library names use the "lib" prefix on unix.
+@bjam_signature((["type"], ["properties", "*"], ["suffix"]))
def set_generated_target_prefix(type, properties, prefix):
set_generated_target_ps(0, type, properties, prefix)
@@ -243,7 +264,7 @@
# Note that if the string is empty (""), but not null, we consider
# suffix found. Setting prefix or suffix to empty string is fine.
- if result:
+ if result is not None:
found = True
type = __types [type]['base']
@@ -257,8 +278,8 @@
with the specified properties. If not suffix were specified for
'type', returns suffix for base type, if any.
"""
- key = str(is_suffix) + type + str(prop_set)
- v = __target_suffixes_cache.get (key, None)
+ key = (is_suffix, type, prop_set)
+ v = __target_suffixes_cache.get(key, None)
if not v:
v = generated_target_ps_real(is_suffix, type, prop_set.raw())
Modified: branches/release/tools/build/v2/build/virtual-target.jam
==============================================================================
--- branches/release/tools/build/v2/build/virtual-target.jam (original)
+++ branches/release/tools/build/v2/build/virtual-target.jam 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -760,7 +760,20 @@
DEPENDS $(actual-targets) : $(self.actual-sources)
$(self.dependency-only-sources) ;
-
+
+ # This works around a bug with -j and actions that
+ # produce multiple target, where:
+ # - dependency on the first output is found, and
+ # the action is started
+ # - dependency on the second output is found, and
+ # bjam noticed that command is already running
+ # - instead of waiting for the command, dependents
+ # of the second targets are immediately updated.
+ if $(actual-targets[2])
+ {
+ INCLUDES $(actual-targets) : $(actual-targets) ;
+ }
+
# Action name can include additional argument to rule, which should
# not be passed to 'set-target-variables'
toolset.set-target-variables
Modified: branches/release/tools/build/v2/build/virtual_target.py
==============================================================================
--- branches/release/tools/build/v2/build/virtual_target.py (original)
+++ branches/release/tools/build/v2/build/virtual_target.py 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -1,6 +1,5 @@
-# Status: being ported by Vladimir Prus
-# Essentially ported, minor fixme remain.
-# Base revision: 40480
+# Status: ported.
+# Base revision: 64488.
#
# Copyright (C) Vladimir Prus 2002. Permission to copy, use, modify, sell and
# distribute this software is granted provided this copyright notice appears in
@@ -61,9 +60,12 @@
# but in builtin.jam modules. They are shown in the diagram to give
# the big picture.
+import bjam
+
import re
import os.path
import string
+import types
from b2.util import path, utility, set
from b2.util.utility import add_grist, get_grist, ungrist, replace_grist, get_value
@@ -71,7 +73,12 @@
from b2.tools import common
from b2.exceptions import *
import b2.build.type
-import type
+import b2.build.property_set as property_set
+
+import b2.build.property as property
+
+from b2.manager import get_manager
+from b2.util import bjam_signature
__re_starts_with_at = re.compile ('^@(.*)')
@@ -103,7 +110,10 @@
and equal action. If such target is found it is retured and 'target' is not registered.
Otherwise, 'target' is registered and returned.
"""
- signature = target.path() + "-" + target.name()
+ if target.path():
+ signature = target.path() + "-" + target.name()
+ else:
+ signature = "-" + target.name()
result = None
if not self.cache_.has_key (signature):
@@ -121,8 +131,10 @@
if a1 and a2 and a1.action_name () == a2.action_name () and a1.sources () == a2.sources ():
ps1 = a1.properties ()
ps2 = a2.properties ()
- p1 = ps1.base () + ps1.free () + ps1.dependency ()
- p2 = ps2.base () + ps2.free () + ps2.dependency ()
+ p1 = ps1.base () + ps1.free () +\
+ b2.util.set.difference(ps1.dependency(), ps1.incidental())
+ p2 = ps2.base () + ps2.free () +\
+ b2.util.set.difference(ps2.dependency(), ps2.incidental())
if p1 == p2:
result = t
@@ -133,9 +145,6 @@
# TODO: Don't append if we found pre-existing target?
self.recent_targets_.append(result)
self.all_targets_.append(result)
-
- result.set_id(self.next_id_)
- self.next_id_ = self.next_id_+1
return result
@@ -154,15 +163,12 @@
if self.files_.has_key (path):
return self.files_ [path]
- file_type = type.type (file)
+ file_type = b2.build.type.type (file)
- result = FileTarget (file, False, file_type, project,
+ result = FileTarget (file, file_type, project,
None, file_location)
self.files_ [path] = result
-
- result.set_id(self.next_id_)
- self.next_id_ = self.next_id_+1
-
+
return result
def recent_targets(self):
@@ -183,7 +189,7 @@
# Returns all targets from 'targets' with types
# equal to 'type' or derived from it.
def select_by_type(self, type, targets):
- return [t for t in targets if type.is_sybtype(t.type(), type)]
+ return [t for t in targets if b2.build.type.is_sybtype(t.type(), type)]
def register_actual_name (self, actual_name, virtual_target):
if self.actual_.has_key (actual_name):
@@ -210,17 +216,19 @@
if not properties_added: properties_added = "none"
# FIXME: Revive printing of real location.
- raise BaseException ("Duplicate name of actual target: '%s'\n"
- "previous virtual target '%s'\n"
- "created from '%s'\n"
- "another virtual target '%s'\n"
- "created from '%s'\n"
- "added properties: '%s'\n"
- "removed properties: '%s'\n" % (actual_name,
- self.actual_ [actual_name], "loc", #cmt1.location (),
- virtual_target,
- "loc", #cmt2.location (),
- properties_added, properties_removed))
+ get_manager().errors()(
+ "Duplicate name of actual target: '%s'\n"
+ "previous virtual target '%s'\n"
+ "created from '%s'\n"
+ "another virtual target '%s'\n"
+ "created from '%s'\n"
+ "added properties: '%s'\n"
+ "removed properties: '%s'\n"
+ % (actual_name,
+ self.actual_ [actual_name], "loc", #cmt1.location (),
+ virtual_target,
+ "loc", #cmt2.location (),
+ properties_added, properties_removed))
else:
self.actual_ [actual_name] = virtual_target
@@ -230,7 +238,7 @@
""" Appends the suffix appropriate to 'type/property_set' combination
to the specified name and returns the result.
"""
- suffix = type.generated_target_suffix (file_type, prop_set)
+ suffix = b2.build.type.generated_target_suffix (file_type, prop_set)
if suffix:
return specified_name + '.' + suffix
@@ -249,6 +257,7 @@
self.name_ = name
self.project_ = project
self.dependencies_ = []
+ self.always_ = False
# Caches if dapendencies for scanners have already been set.
self.made_ = {}
@@ -269,15 +278,6 @@
"""
return self.project_
- def set_id(self, id):
- self.id_ = id
-
- def __hash__(self):
- return self.id_
-
- def __cmp__(self, other):
- return self.id_ - other.id_
-
def depends (self, d):
""" Adds additional instances of 'VirtualTarget' that this
one depends on.
@@ -287,6 +287,9 @@
def dependencies (self):
return self.dependencies_
+ def always(self):
+ self.always_ = True
+
def actualize (self, scanner = None):
""" Generates all the actual targets and sets up build actions for
this target.
@@ -301,6 +304,9 @@
"""
actual_name = self.actualize_no_scanner ()
+ if self.always_:
+ bjam.call("ALWAYS", actual_name)
+
if not scanner:
return actual_name
@@ -366,7 +372,7 @@
type: optional type of this target.
"""
- def __init__ (self, name, exact, type, project, action = None):
+ def __init__ (self, name, type, project, action = None, exact=False):
VirtualTarget.__init__ (self, name, project)
self.type_ = type
@@ -528,16 +534,22 @@
if tag:
- rule_names = [t[:1] for t in tag if t[0] == '@']
- if rule_names:
- if len(tag) > 1:
- self.manager_.errors()(
-"""<tag>@rulename is present but is not the only <tag> feature""")
-
- self.name_ = bjam.call(rule_names[0], specified_name, self.type_, ps)
+ if len(tag) > 1:
+ get_manager().errors()(
+ """<tag>@rulename is present but is not the only <tag> feature""")
+
+ tag = tag[0]
+ if callable(tag):
+ self.name_ = tag(specified_name, self.type_, ps)
else:
- self.manager_.errors()(
-"""The value of the <tag> feature must be '@rule-nane'""")
+ if not tag[0] == '@':
+ self.manager_.errors()("""The value of the <tag> feature must be '@rule-nane'""")
+
+ exported_ps = b2.util.value_to_jam(ps, methods=True)
+ self.name_ = b2.util.call_jam_function(
+ tag[1:], specified_name, self.type_, exported_ps)
+ if self.name_:
+ self.name_ = self.name_[0]
# If there's no tag or the tag rule returned nothing.
if not tag or not self.name_:
@@ -566,11 +578,16 @@
return name
+@bjam_signature((["specified_name"], ["type"], ["property_set"]))
def add_prefix_and_suffix(specified_name, type, property_set):
"""Appends the suffix appropriate to 'type/property-set' combination
to the specified name and returns the result."""
- suffix = b2.build.type.generated_target_suffix(type, property_set)
+ property_set = b2.util.jam_to_value_maybe(property_set)
+
+ suffix = ""
+ if type:
+ suffix = b2.build.type.generated_target_suffix(type, property_set)
# Handle suffixes for which no leading dot is desired. Those are
# specified by enclosing them in <...>. Needed by python so it
@@ -580,7 +597,9 @@
elif suffix:
suffix = "." + suffix
- prefix = b2.build.type.generated_target_prefix(type, property_set)
+ prefix = ""
+ if type:
+ prefix = b2.build.type.generated_target_prefix(type, property_set)
if specified_name.startswith(prefix):
prefix = ""
@@ -607,14 +626,20 @@
- the value passed to the 'suffix' method, if any, or
- the suffix which correspond to the target's type.
"""
- def __init__ (self, name, exact, type, project, action = None, path=None):
- AbstractFileTarget.__init__ (self, name, exact, type, project, action)
+ def __init__ (self, name, type, project, action = None, path=None, exact=False):
+ AbstractFileTarget.__init__ (self, name, type, project, action, exact)
self.path_ = path
+ def __str__(self):
+ if self.type_:
+ return self.name_ + "." + self.type_
+ else:
+ return self.name_
+
def clone_with_different_type(self, new_type):
- return FileTarget(self.name_, 1, new_type, self.project_,
- self.action_, self.path_)
+ return FileTarget(self.name_, new_type, self.project_,
+ self.action_, self.path_, exact=True)
def actualize_location (self, target):
engine = self.project_.manager_.engine ()
@@ -655,6 +680,11 @@
# for test.o will be <ptest/bin/gcc/debug>test.o and the target
# we create below will be <e>test.o
engine.add_dependency("<e>%s" % get_value(target), target)
+
+ # Allow bjam <path-to-file>/<file> to work. This won't catch all
+ # possible ways to refer to the path (relative/absolute, extra ".",
+ # various "..", but should help in obvious cases.
+ engine.add_dependency("<e>%s" % (os.path.join(path, get_value(target))), target)
else:
# This is a source file.
@@ -666,13 +696,13 @@
"""
if not self.path_:
if self.action_:
- p = self.action_.properties ()
- target_path = p.target_path ()
+ p = self.action_.properties ()
+ (target_path, relative_to_build_dir) = p.target_path ()
- if target_path [1] == True:
+ if relative_to_build_dir:
# Indicates that the path is relative to
# build dir.
- target_path = os.path.join (self.project_.build_dir (), target_path [0])
+ target_path = os.path.join (self.project_.build_dir (), target_path)
# Store the computed path, so that it's not recomputed
# any more
@@ -683,8 +713,8 @@
class NotFileTarget(AbstractFileTarget):
- def __init__(self, name, project):
- AbstractFileTarget.__init__(name, project)
+ def __init__(self, name, project, action):
+ AbstractFileTarget.__init__(self, name, None, project, action)
def path(self):
"""Returns nothing, to indicate that target path is not known."""
@@ -692,7 +722,8 @@
def actualize_location(self, target):
bjam.call("NOTFILE", target)
- bjam.call("ALWAYS", taget)
+ bjam.call("ALWAYS", target)
+ bjam.call("NOUPDATE", target)
class Action:
@@ -704,11 +735,16 @@
not establish dependency relationship, but should do everything else.
"""
def __init__ (self, manager, sources, action_name, prop_set):
+ assert(isinstance(prop_set, property_set.PropertySet))
+ assert type(sources) == types.ListType
self.sources_ = sources
self.action_name_ = action_name
if not prop_set:
prop_set = property_set.empty()
self.properties_ = prop_set
+ if not all(isinstance(v, VirtualTarget) for v in prop_set.get('implicit-dependency')):
+ import pdb
+ pdb.set_trace()
self.manager_ = manager
self.engine_ = self.manager_.engine ()
@@ -724,6 +760,10 @@
def add_targets (self, targets):
self.targets_ += targets
+
+ def replace_targets (old_targets, new_targets):
+ self.targets_ = [t for t in targets if not t in old_targets] + new_targets
+
def targets (self):
return self.targets_
@@ -746,6 +786,8 @@
ps = self.properties ()
properties = self.adjust_properties (ps)
+
+
actual_targets = []
for i in self.targets ():
@@ -755,16 +797,30 @@
self.engine_.add_dependency (actual_targets, self.actual_sources_ + self.dependency_only_sources_)
- raw_properties = properties.raw ()
+ # This works around a bug with -j and actions that
+ # produce multiple target, where:
+ # - dependency on the first output is found, and
+ # the action is started
+ # - dependency on the second output is found, and
+ # bjam noticed that command is already running
+ # - instead of waiting for the command, dependents
+ # of the second targets are immediately updated.
+ if len(actual_targets) > 1:
+ bjam.call("INCLUDES", actual_targets, actual_targets)
# FIXME: check the comment below. Was self.action_name_ [1]
# Action name can include additional argument to rule, which should not
# be passed to 'set-target-variables'
# FIXME: breaking circular dependency
import toolset
- toolset.set_target_variables (self.manager_, self.action_name_, actual_targets, raw_properties)
+ toolset.set_target_variables (self.manager_, self.action_name_, actual_targets, properties)
engine = self.manager_.engine ()
+
+ # FIXME: this is supposed to help --out-xml option, but we don't
+ # implement that now, and anyway, we should handle it in Python,
+ # not but putting variables on bjam-level targets.
+ bjam.call("set-target-variable", actual_targets, ".action", repr(self))
self.manager_.engine ().set_update_action (self.action_name_, actual_targets, self.actual_sources_,
properties)
@@ -772,7 +828,7 @@
# Since we set up creating action here, we also set up
# action for cleaning up
self.manager_.engine ().set_update_action ('common.Clean', 'clean-all',
- actual_targets, None)
+ actual_targets)
return actual_targets
@@ -790,9 +846,10 @@
# i = self.manager_.get_object (i)
if i.type ():
- scanner = type.get_scanner (i.type (), prop_set)
+ scanner = b2.build.type.get_scanner (i.type (), prop_set)
- result.append (i.actualize (scanner))
+ r = i.actualize (scanner)
+ result.append (r)
return result
@@ -823,6 +880,7 @@
# if we're building just hello ("bjam hello"), 'a.h' won't be
# actualized unless we do it here.
implicit = self.properties_.get("<implicit-dependency>")
+
for i in implicit:
i.actualize()
@@ -842,7 +900,7 @@
actions which create them.
"""
def __init__ (self, manager, prop_set):
- Action.__init__ (self, manager, None, None, prop_set)
+ Action.__init__ (self, manager, [], None, prop_set)
def actualize (self):
if not self.actualized_:
@@ -858,11 +916,14 @@
def __init__(self, sources, action_name, property_set):
#FIXME: should the manager parameter of Action.__init__
#be removed? -- Steven Watanabe
- Action.__init__(b2.manager.get_manager(), sources, action_name, property_set)
+ Action.__init__(self, b2.manager.get_manager(), sources, action_name, property_set)
def actualize_source_type(self, sources, property_set):
-
- return [x for source in sources for x in i.actualize()]
+
+ result = []
+ for s in sources:
+ result.append(s.actualize())
+ return result
def traverse (target, include_roots = False, include_sources = False):
""" Traverses the dependency graph of 'target' and return all targets that will
@@ -908,7 +969,7 @@
if not new_properties:
new_properties = action.properties()
- closed_action = action.__class__(action.sources(), new_action_name,
+ cloned_action = action.__class__(action.manager_, action.sources(), new_action_name,
new_properties)
cloned_targets = []
@@ -916,8 +977,8 @@
n = target.name()
# Don't modify the name of the produced targets. Strip the directory f
- cloned_target = FileTarget(n, 1, target.type(), new_project,
- cloned_action)
+ cloned_target = FileTarget(n, target.type(), new_project,
+ cloned_action, exact=True)
d = target.dependencies()
if d:
@@ -951,13 +1012,11 @@
# Pre-compose the list of other dependency graphs, on which this one
# depends
- deps = build_properties.get ('<implicit-dependency>')
+ deps = build_properties.get('<implicit-dependency>')
self.other_dg_ = []
for d in deps:
- # FIXME: the property must have the actual object here, not a string.
- value = replace_grist (d, '')
- self.other_dg_.append (value.creating_subvariant ())
+ self.other_dg_.append(d.creating_subvariant ())
self.other_dg_ = unique (self.other_dg_)
@@ -985,27 +1044,35 @@
def usage_requirements (self):
return self.usage_requirements_
- def all_referenced_targets(self):
+ def all_referenced_targets(self, result):
"""Returns all targets referenced by this subvariant,
either directly or indirectly, and either as sources,
or as dependency properties. Targets referred with
dependency property are returned a properties, not targets."""
-
+
# Find directly referenced targets.
deps = self.build_properties().dependency()
all_targets = self.sources_ + deps
# Find other subvariants.
r = []
- for t in all_targets:
- r.append(t.creating_subvariant)
+ for e in all_targets:
+ if not e in result:
+ result.add(e)
+ if isinstance(e, property.Property):
+ t = e.value()
+ else:
+ t = e
+
+ # FIXME: how can this be?
+ cs = t.creating_subvariant()
+ if cs:
+ r.append(cs)
r = unique(r)
-
for s in r:
if s != self:
- all_targets.extend(s.all_referenced_targets())
+ s.all_referenced_targets(result)
- return all_targets
def implicit_includes (self, feature, target_type):
""" Returns the properties which specify implicit include paths to
@@ -1041,7 +1108,7 @@
def compute_target_directories(self, target_type=None):
result = []
for t in self.created_targets():
- if not target_type or type.is_derived(t.type(), target_type):
+ if not target_type or b2.build.type.is_derived(t.type(), target_type):
result.append(t.path())
for d in self.other_dg_:
Modified: branches/release/tools/build/v2/build_system.py
==============================================================================
--- branches/release/tools/build/v2/build_system.py (original)
+++ branches/release/tools/build/v2/build_system.py 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -1,4 +1,6 @@
-# Status: being ported by Vladimir Prus.
+# Status: mostly ported. Missing is --out-xml support, 'configure' integration
+# and some FIXME.
+# Base revision: 64351
# Copyright 2003, 2005 Dave Abrahams
# Copyright 2006 Rene Rivera
@@ -6,6 +8,8 @@
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+
from b2.build.engine import Engine
from b2.manager import Manager
from b2.util.path import glob
@@ -17,304 +21,549 @@
from b2.build.errors import ExceptionWithUserContext
import b2.tools.common
+import b2.build.project as project
+import b2.build.virtual_target as virtual_target
+import b2.build.build_request as build_request
+
+import b2.util.regex
+
+from b2.manager import get_manager
+from b2.util import cached
+from b2.util import option
+
+
import bjam
import os
import sys
+import re
-# FIXME:
-# Returns the location of the build system. The primary use case
-# is building Boost, where it's sometimes needed to get location
-# of other components (like BoostBook files), and it's convenient
-# to use location relatively to Boost.Build path.
-#rule location ( )
-#{
-# local r = [ modules.binding build-system ] ;
-# return $(r:P) ;
-#}
-
-# FIXME:
-
-def get_boolean_option(name):
- match = "--" + name
- if match in argv:
- return 1
- else:
- return 0
+################################################################################
+#
+# Module global data.
+#
+################################################################################
+
+# Flag indicating we should display additional debugging information related to
+# locating and loading Boost Build configuration files.
+debug_config = False
+
+# Legacy option doing too many things, some of which are not even documented.
+# Should be phased out.
+# * Disables loading site and user configuration files.
+# * Disables auto-configuration for toolsets specified explicitly on the
+# command-line.
+# * Causes --toolset command-line options to be ignored.
+# * Prevents the default toolset from being used even if no toolset has been
+# configured at all.
+legacy_ignore_config = False
+
+# The cleaning is tricky. Say, if user says 'bjam --clean foo' where 'foo' is a
+# directory, then we want to clean targets which are in 'foo' as well as those
+# in any children Jamfiles under foo but not in any unrelated Jamfiles. To
+# achieve this we collect a list of projects under which cleaning is allowed.
+project_targets = []
+
+# Virtual targets obtained when building main targets references on the command
+# line. When running 'bjam --clean main_target' we want to clean only files
+# belonging to that main target so we need to record which targets are produced
+# for it.
+results_of_main_targets = []
+
+# Was an XML dump requested?
+out_xml = False
+
+# Default toolset & version to be used in case no other toolset has been used
+# explicitly by either the loaded configuration files, the loaded project build
+# scripts or an explicit toolset request on the command line. If not specified,
+# an arbitrary default will be used based on the current host OS. This value,
+# while not strictly necessary, has been added to allow testing Boost-Build's
+# default toolset usage functionality.
+default_toolset = None
+default_toolset_version = None
+
+################################################################################
+#
+# Public rules.
+#
+################################################################################
+
+# Returns the property set with the free features from the currently processed
+# build request.
+#
+def command_line_free_features():
+ return command_line_free_features
+
+# Sets the default toolset & version to be used in case no other toolset has
+# been used explicitly by either the loaded configuration files, the loaded
+# project build scripts or an explicit toolset request on the command line. For
+# more detailed information see the comment related to used global variables.
+#
+def set_default_toolset(toolset, version=None):
+ default_toolset = toolset
+ default_toolset_version = version
-def get_string_option(name):
- match = "--" + name + "="
- for arg in argv:
- if arg.startswith(match):
- return arg[len(match):]
- return None
-
-def home_directories():
- if os.name == "nt":
- result = set()
- try:
- result.add(os.environ['HOMEDRIVE'] + os.environ['HOMEPATH'])
- result.add(os.environ['HOME'])
- result.add(os.environ['USERPROFILE'])
- except KeyError:
- pass
- return list(result)
- else:
- return [os.environ['HOME']]
+pre_build_hook = []
-ignore_config = 0
-debug_config = 0
+def add_pre_build_hook(callable):
+ pre_build_hook.append(callable)
-def load_config(manager, basename, path):
- """Unless ignore-config is set, search configuration
- basename.jam in path and loads it. The jamfile module
- for that file will be loaded 'basename'."""
-
- if not ignore_config:
- found = glob(path, [basename + ".jam"])
- if found:
- found = found[0]
- if debug_config:
- print "notice: searching '%s' for '%s.jam'" % (path, basename)
- if found:
- print "notice: loading %s.jam from %s" % (basename, found)
+post_build_hook = None
- manager.projects().load_standalone(basename, found)
+def set_post_build_hook(callable):
+ post_build_hook = callable
+
+################################################################################
+#
+# Local rules.
+#
+################################################################################
+
+# Returns actual Jam targets to be used for executing a clean request.
+#
+def actual_clean_targets(targets):
+
+ # Construct a list of projects explicitly detected as targets on this build
+ # system run. These are the projects under which cleaning is allowed.
+ for t in targets:
+ if isinstance(t, b2.build.targets.ProjectTarget):
+ project_targets.append(t.project_module())
+
+ # Construct a list of targets explicitly detected on this build system run
+ # as a result of building main targets.
+ targets_to_clean = set()
+ for t in results_of_main_targets:
+ # Do not include roots or sources.
+ targets_to_clean.update(virtual_target.traverse(t))
-def main():
+ to_clean = []
+ for t in get_manager().virtual_targets().all_targets():
+
+ # Remove only derived targets.
+ if t.action():
+ p = t.project()
+ if t in targets_to_clean or should_clean_project(p.project_module()):
+ to_clean.append(t)
- global argv
- argv = bjam.variable("ARGV")
+ return [t.actualize() for t in to_clean]
- # FIXME: document this option.
- if "--profiling" in argv:
- import cProfile
- import pstats
- cProfile.runctx('main_real()', globals(), locals(), "stones.prof")
-
- stats = pstats.Stats("stones.prof")
- stats.strip_dirs()
- stats.sort_stats('time', 'calls')
- stats.print_callers(20)
+_target_id_split = re.compile("(.*)//(.*)")
+
+# Given a target id, try to find and return the corresponding target. This is
+# only invoked when there is no Jamfile in ".". This code somewhat duplicates
+# code in project-target.find but we can not reuse that code without a
+# project-targets instance.
+#
+def find_target(target_id):
+
+ projects = get_manager().projects()
+ m = _target_id_split.match(target_id)
+ if m:
+ pm = projects.find(m.group(1), ".")
else:
- main_real()
+ pm = projects.find(target_id, ".")
-def main_real():
+ if pm:
+ result = projects.target(pm)
- global ignore_config
- global debug_config
-
- boost_build_path = bjam.variable("BOOST_BUILD_PATH")
+ if m:
+ result = result.find(m.group(2))
- engine = Engine()
+ return result
- global_build_dir = get_string_option("build-dir")
- debug_config = get_boolean_option("debug-configuration")
-
- manager = Manager(engine, global_build_dir)
+def initialize_config_module(module_name, location=None):
- # This module defines types and generator and what not,
- # and depends on manager's existence
- import b2.tools.builtin
+ get_manager().projects().initialize(module_name, location)
+# Helper rule used to load configuration files. Loads the first configuration
+# file with the given 'filename' at 'path' into module with name 'module-name'.
+# Not finding the requested file may or may not be treated as an error depending
+# on the must-find parameter. Returns a normalized path to the loaded
+# configuration file or nothing if no file was loaded.
+#
+def load_config(module_name, filename, paths, must_find=False):
- # Check if we can load 'test-config.jam'. If we can, load it and
- # ignore user configs.
-
- test_config = glob(boost_build_path, ["test-config.jam"])
- if test_config:
- test_config = test_config[0]
+ if debug_config:
+ print "notice: Searching '%s' for '%s' configuration file '%s." \
+ % (paths, module_name, filename)
- if test_config:
+ where = None
+ for path in paths:
+ t = os.path.join(path, filename)
+ if os.path.exists(t):
+ where = t
+ break
+
+ if where:
+ where = os.path.realpath(where)
+
if debug_config:
- print "notice: loading testing-config.jam from '%s'" % test_config
- print "notice: user-config.jam and site-config.jam will be ignored"
+ print "notice: Loading '%s' configuration file '%s' from '%s'." \
+ % (module_name, filename, where)
- manager.projects().load_standalone("test-config", test_config)
+ # Set source location so that path-constant in config files
+ # with relative paths work. This is of most importance
+ # for project-config.jam, but may be used in other
+ # config files as well.
+ attributes = get_manager().projects().attributes(module_name) ;
+ attributes.set('source-location', os.path.dirname(where), True)
+ get_manager().projects().load_standalone(module_name, where)
+
+ else:
+ msg = "Configuration file '%s' not found in '%s'." % (filename, path)
+ if must_find:
+ get_manager().errors()(msg)
+
+ elif debug_config:
+ print msg
+
+ return where
+
+# Loads all the configuration files used by Boost Build in the following order:
+#
+# -- test-config --
+# Loaded only if specified on the command-line using the --test-config
+# command-line parameter. It is ok for this file not to exist even if specified.
+# If this configuration file is loaded, regular site and user configuration
+# files will not be. If a relative path is specified, file is searched for in
+# the current folder.
+#
+# -- site-config --
+# Always named site-config.jam. Will only be found if located on the system
+# root path (Windows), /etc (non-Windows), user's home folder or the Boost Build
+# path, in that order. Not loaded in case the test-config configuration file is
+# loaded or either the --ignore-site-config or the --ignore-config command-line
+# option is specified.
+#
+# -- user-config --
+# Named user-config.jam by default or may be named explicitly using the
+# --user-config command-line option or the BOOST_BUILD_USER_CONFIG environment
+# variable. If named explicitly the file is looked for from the current working
+# directory and if the default one is used then it is searched for in the
+# user's home directory and the Boost Build path, in that order. Not loaded in
+# case either the test-config configuration file is loaded, --ignore-config
+# command-line option is specified or an empty file name is explicitly
+# specified. If the file name has been given explicitly then the file must
+# exist.
+#
+# Test configurations have been added primarily for use by Boost Build's
+# internal unit testing system but may be used freely in other places as well.
+#
+def load_configuration_files():
+
+ # Flag indicating that site configuration should not be loaded.
+ ignore_site_config = "--ignore-site-config" in sys.argv
+ if legacy_ignore_config and debug_config:
+ print "notice: Regular site and user configuration files will be ignored"
+ print "notice: due to the --ignore-config command-line option."
+
+ initialize_config_module("test-config")
+ test_config = None
+ for a in sys.argv:
+ m = re.match("--test-config=(.*)$", a)
+ if m:
+ test_config = b2.util.unquote(m.group(1))
+ break
- ignore_config = test_config or get_boolean_option("ignore-config")
- user_path = home_directories() + boost_build_path
+ if test_config:
+ where = load_config("test-config", os.path.basename(test_config), [os.path.dirname(test_config)])
+ if where:
+ if debug_config and not legacy_ignore_config:
+ print "notice: Regular site and user configuration files will"
+ print "notice: be ignored due to the test configuration being loaded."
+ user_path = [os.path.expanduser("~")] + bjam.variable("BOOST_BUILD_PATH")
site_path = ["/etc"] + user_path
- if bjam.variable("OS") in ["NT", "CYGWIN"]:
- site_path = [os.environ("SystemRoot")] + user_path
+ if os.name in ["nt"]:
+ site_path = [os.getenv("SystemRoot")] + user_path
- load_config(manager, "site-config", site_path)
+ if ignore_site_config and not legacy_ignore_config:
+ print "notice: Site configuration files will be ignored due to the"
+ print "notice: --ignore-site-config command-line option."
+
+ initialize_config_module("site-config")
+ if not test_config and not ignore_site_config and not legacy_ignore_config:
+ load_config('site-config', 'site-config.jam', site_path)
+
+ initialize_config_module('user-config')
+ if not test_config and not legacy_ignore_config:
+
+ user_config = None
+ for a in sys.argv:
+ m = re.match("--user-config=(.*)$", a)
+ if m:
+ user_config = m.group(1)
+ break
- user_config_path = get_string_option("user-config")
- if not user_config_path:
- user_config_path = os.environ.get("BOOST_BUILD_USER_CONFIG")
+ if not user_config:
+ user_config = os.getenv("BOOST_BUILD_USER_CONFIG")
+
+ # Special handling for the case when the OS does not strip the quotes
+ # around the file name, as is the case when using Cygwin bash.
+ user_config = b2.util.unquote(user_config)
+ explicitly_requested = user_config
+ if not user_config:
+ user_config = "user-config.jam"
- if user_config_path:
- if debug_config:
- print "Loading explicitly specifier user configuration file:"
- print " %s" % user_config_path
+ if explicitly_requested:
+
+ user_config = os.path.abspath(user_config)
+
+ if debug_config:
+ print "notice: Loading explicitly specified user configuration file:"
+ print " " + user_config
- manager.projects().load_standalone("user-config", user_config_path)
+ load_config('user-config', os.path.basename(user_config), [os.path.dirname(user_config)], True)
+ else:
+ load_config('user-config', os.path.basename(user_config), user_path)
+ elif debug_config:
+ print "notice: User configuration file loading explicitly disabled." ;
+
+ # We look for project-config.jam from "." upward.
+ # I am not sure this is 100% right decision, we might as well check for
+ # it only alonside the Jamroot file. However:
+ #
+ # - We need to load project-root.jam before Jamroot
+ # - We probably would need to load project-root.jam even if there's no
+ # Jamroot - e.g. to implement automake-style out-of-tree builds.
+ if os.path.exists("project-config.jam"):
+ file = ["project-config.jam"]
else:
- load_config(manager, "user-config", user_path)
+ file = b2.util.path.glob_in_parents(".", ["project-config.jam"])
+
+ if file:
+ initialize_config_module('project-config', os.path.dirname(file[0]))
+ load_config('project-config', "project-config.jam", [os.path.dirname(file[0])], True)
+
+
+# Autoconfigure toolsets based on any instances of --toolset=xx,yy,...zz or
+# toolset=xx,yy,...zz in the command line. May return additional properties to
+# be processed as if they had been specified by the user.
+#
+def process_explicit_toolset_requests():
+
+ extra_properties = []
+
+ option_toolsets = [e for option in b2.util.regex.transform(sys.argv, "^--toolset=(.*)$")
+ for e in option.split(',')]
+ feature_toolsets = [e for option in b2.util.regex.transform(sys.argv, "^toolset=(.*)$")
+ for e in option.split(',')]
+
+ for t in option_toolsets + feature_toolsets:
+ # Parse toolset-version/properties.
+ (toolset_version, toolset, version) = re.match("(([^-/]+)-?([^/]+)?)/?.*", t).groups()
-# FIXME:
-## #
-## # Autoconfigure toolsets based on any instances of --toolset=xx,yy,...zz or
-## # toolset=xx,yy,...zz in the command line
-## #
-## local option-toolsets = [ regex.split-list [ MATCH ^--toolset=(.*) : $(argv) ] : "," ] ;
-## local feature-toolsets = [ regex.split-list [ MATCH ^toolset=(.*) : $(argv) ] : "," ] ;
-
-## # if the user specified --toolset=..., we need to add toolset=... to
-## # the build request
-## local extra-build-request ;
-
- extra_build_request = []
-
-## if ! $(ignore-config)
-## {
-## for local t in $(option-toolsets) $(feature-toolsets)
-## {
-## # Parse toolset-version/properties
-## local (t-v,t,v) = [ MATCH (([^-/]+)-?([^/]+)?)/?.* : $(t) ] ;
-## local toolset-version = $((t-v,t,v)[1]) ;
-## local toolset = $((t-v,t,v)[2]) ;
-## local version = $((t-v,t,v)[3]) ;
+ if debug_config:
+ print "notice: [cmdline-cfg] Detected command-line request for '%s': toolset= %s version=%s" \
+ % (toolset_version, toolset, version)
-## if $(debug-config)
-## {
-## ECHO notice: [cmdline-cfg] Detected command-line request for
-## $(toolset-version): toolset= \"$(toolset)\" "version= \""$(version)\" ;
-## }
+ # If the toolset is not known, configure it now.
+ known = False
+ if toolset in feature.values("toolset"):
+ known = True
+
+ if known and version and not feature.is_subvalue("toolset", toolset, "version", version):
+ known = False
+ # TODO: we should do 'using $(toolset)' in case no version has been
+ # specified and there are no versions defined for the given toolset to
+ # allow the toolset to configure its default version. For this we need
+ # to know how to detect whether a given toolset has any versions
+ # defined. An alternative would be to do this whenever version is not
+ # specified but that would require that toolsets correctly handle the
+ # case when their default version is configured multiple times which
+ # should be checked for all existing toolsets first.
+
+ if not known:
+
+ if debug_config:
+ print "notice: [cmdline-cfg] toolset '%s' not previously configured; attempting to auto-configure now" % toolset_version
+ toolset.using(toolset, version)
-## local known ;
+ else:
-## # if the toolset isn't known, configure it now.
-## if $(toolset) in [ feature.values <toolset> ]
-## {
-## known = true ;
-## }
+ if debug_config:
-## if $(known) && $(version)
-## && ! [ feature.is-subvalue toolset : $(toolset) : version : $(version) ]
-## {
-## known = ;
-## }
+ print "notice: [cmdline-cfg] toolset '%s' already configured" % toolset_version
-## if ! $(known)
-## {
-## if $(debug-config)
-## {
-## ECHO notice: [cmdline-cfg] toolset $(toolset-version)
-## not previously configured; configuring now ;
-## }
-## toolset.using $(toolset) : $(version) ;
-## }
-## else
-## {
-## if $(debug-config)
-## {
-## ECHO notice: [cmdline-cfg] toolset $(toolset-version) already configured ;
-## }
-## }
+ # Make sure we get an appropriate property into the build request in
+ # case toolset has been specified using the "--toolset=..." command-line
+ # option form.
+ if not t in sys.argv and not t in feature_toolsets:
-## # make sure we get an appropriate property into the build request in
-## # case the user used the "--toolset=..." form
-## if ! $(t) in $(argv)
-## && ! $(t) in $(feature-toolsets)
-## {
-## if $(debug-config)
-## {
-## ECHO notice: [cmdline-cfg] adding toolset=$(t) "to build request." ;
-## }
-## extra-build-request += toolset=$(t) ;
-## }
-## }
-## }
+ if debug_config:
+ print "notice: [cmdline-cfg] adding toolset=%s) to the build request." % t ;
+ extra_properties += "toolset=%s" % t
+ return extra_properties
-# FIXME:
-## if USER_MODULE in [ RULENAMES ]
-## {
-## USER_MODULE site-config user-config ;
-## }
- if get_boolean_option("version"):
- # FIXME: Move to a separate module. Include bjam
- # verision.
- print "Boost.Build M15 (Python port in development)"
- sys.exit(0)
- b2.tools.common.init(manager)
+# Returns 'true' if the given 'project' is equal to or is a (possibly indirect)
+# child to any of the projects requested to be cleaned in this build system run.
+# Returns 'false' otherwise. Expects the .project-targets list to have already
+# been constructed.
+#
+@cached
+def should_clean_project(project):
- # We always load project in "." so that 'use-project' directives has
- # any chance of been seen. Otherwise, we won't be able to refer to
- # subprojects using target ids.
+ if project in project_targets:
+ return True
+ else:
+
+ parent = get_manager().projects().attribute(project, "parent-module")
+ if parent and parent != "user-config":
+ return should_clean_project(parent)
+ else:
+ return False
+################################################################################
+#
+# main()
+# ------
+#
+################################################################################
+
+def main():
+
+ sys.argv = bjam.variable("ARGV")
+
+ # FIXME: document this option.
+ if "--profiling" in sys.argv:
+ import cProfile
+ r = cProfile.runctx('main_real()', globals(), locals(), "stones.prof")
+
+ import pstats
+ stats = pstats.Stats("stones.prof")
+ stats.strip_dirs()
+ stats.sort_stats('time', 'calls')
+ stats.print_callers(20)
+ return r
+ else:
+ try:
+ return main_real()
+ except ExceptionWithUserContext, e:
+ e.report()
+
+def main_real():
+
+ global debug_config, legacy_ignore_config, out_xml
+
+ debug_config = "--debug-configuration" in sys.argv
+ legacy_ignore_config = "--ignore_config" in sys.argv
+ out_xml = any(re.match("^--out-xml=(.*)$", a) for a in sys.argv)
+
+ engine = Engine()
+
+ global_build_dir = option.get("build-dir")
+ manager = Manager(engine, global_build_dir)
+
+ import b2.build.configure as configure
+
+ if "--version" in sys.argv:
+
+ version.report()
+ return
+
+ # This module defines types and generator and what not,
+ # and depends on manager's existence
+ import b2.tools.builtin
+
+ b2.tools.common.init(manager)
+
+ load_configuration_files()
+
+ extra_properties = []
+ # Note that this causes --toolset options to be ignored if --ignore-config
+ # is specified.
+ if not legacy_ignore_config:
+ extra_properties = process_explicit_toolset_requests()
+
+ # We always load project in "." so that 'use-project' directives have any
+ # chance of being seen. Otherwise, we would not be able to refer to
+ # subprojects using target ids.
current_project = None
- projects = manager.projects()
+ projects = get_manager().projects()
if projects.find(".", "."):
current_project = projects.target(projects.load("."))
- # FIXME: revive this logic, when loading of gcc works
- if not feature.values("<toolset>") and not ignore_config and 0:
- default_toolset = "gcc" ;
- if bjam.variable("OS") == "NT":
- default_toolset = "msvc"
-
- print "warning: No toolsets are configured." ;
- print "warning: Configuring default toolset '%s'" % default_toolset
- print "warning: If the default is wrong, you may not be able to build C++ programs."
- print "warning: Use the \"--toolset=xxxxx\" option to override our guess."
+ # In case there are no toolsets currently defined makes the build run using
+ # the default toolset.
+ if not legacy_ignore_config and not feature.values("toolset"):
+
+ dt = default_toolset
+ dtv = None
+ if default_toolset:
+ dtv = default_toolset_version
+ else:
+ dt = "gcc"
+ if os.name == 'nt':
+ dt = "msvc"
+ # FIXME:
+ #else if [ os.name ] = MACOSX
+ #{
+ # default-toolset = darwin ;
+ #}
+
+ print "warning: No toolsets are configured."
+ print "warning: Configuring default toolset '%s'." % dt
+ print "warning: If the default is wrong, your build may not work correctly."
+ print "warning: Use the \"toolset=xxxxx\" option to override our guess."
print "warning: For more configuration options, please consult"
print "warning: http://boost.org/boost-build2/doc/html/bbv2/advanced/configuration.html"
- projects.project_rules().using([default_toolset])
-
- (target_ids, properties) = b2.build.build_request.from_command_line(
- argv[1:] + extra_build_request)
+ toolset.using(dt, dtv)
+ # Parse command line for targets and properties. Note that this requires
+ # that all project files already be loaded.
+ (target_ids, properties) = build_request.from_command_line(sys.argv[1:] + extra_properties)
+
+ # Expand properties specified on the command line into multiple property
+ # sets consisting of all legal property combinations. Each expanded property
+ # set will be used for a single build run. E.g. if multiple toolsets are
+ # specified then requested targets will be built with each of them.
if properties:
- expanded = b2.build.build_request.expand_no_defaults(properties)
- xexpanded = []
- for e in expanded:
- xexpanded.append(property_set.create(feature.split(e)))
- expanded = xexpanded
+ expanded = build_request.expand_no_defaults(properties)
else:
expanded = [property_set.empty()]
- targets = []
-
- clean = get_boolean_option("clean")
- clean_all = get_boolean_option("clean-all")
-
-
- bjam_targets = []
-
- # Given a target id, try to find and return corresponding target.
- # This is only invoked when there's no Jamfile in "."
- # This code somewhat duplicates code in project-target.find but we can't reuse
- # that code without project-targets instance.
- def find_target (target_id):
- split = target_id.split("//")
- pm = None
- if len(split) > 1:
- pm = projects.find(split[0], ".")
- else:
- pm = projects.find(target_id, ".")
-
- result = None
- if pm:
- result = projects.target(pm)
+ # Check that we actually found something to build.
+ if not current_project and not target_ids:
+ get_manager().errors()("no Jamfile in current directory found, and no target references specified.")
+ # FIXME:
+ # EXIT
+
+ # Flags indicating that this build system run has been started in order to
+ # clean existing instead of create new targets. Note that these are not the
+ # final flag values as they may get changed later on due to some special
+ # targets being specified on the command line.
+ clean = "--clean" in sys.argv
+ cleanall = "--clean-all" in sys.argv
+
+ # List of explicitly requested files to build. Any target references read
+ # from the command line parameter not recognized as one of the targets
+ # defined in the loaded Jamfiles will be interpreted as an explicitly
+ # requested file to build. If any such files are explicitly requested then
+ # only those files and the targets they depend on will be built and they
+ # will be searched for among targets that would have been built had there
+ # been no explicitly requested files.
+ explicitly_requested_files = []
- if len(split) > 1:
- result = result.find(split[1])
+ # List of Boost Build meta-targets, virtual-targets and actual Jam targets
+ # constructed in this build system run.
+ targets = []
+ virtual_targets = []
+ actual_targets = []
- if not current_project and not target_ids:
- print "error: no Jamfile in current directory found, and no target references specified."
- sys.exit(1)
+ explicitly_requested_files = []
+ # Process each target specified on the command-line and convert it into
+ # internal Boost Build target objects. Detect special clean target. If no
+ # main Boost Build targets were explictly requested use the current project
+ # as the target.
for id in target_ids:
if id == "clean":
clean = 1
@@ -328,24 +577,40 @@
if not t:
print "notice: could not find main target '%s'" % id
print "notice: assuming it's a name of file to create " ;
- bjam_targets.append(id)
+ explicitly_requested_files.append(id)
else:
targets.append(t)
if not targets:
targets = [projects.target(projects.module_name("."))]
+
+ # FIXME: put this BACK.
+ ## if [ option.get dump-generators : : true ]
+ ## {
+ ## generators.dump ;
+ ## }
+
+
+ # We wish to put config.log in the build directory corresponding
+ # to Jamroot, so that the location does not differ depending on
+ # directory where we do build. The amount of indirection necessary
+ # here is scary.
+ first_project = targets[0].project()
+ first_project_root_location = first_project.get('project-root')
+ first_project_root_module = manager.projects().load(first_project_root_location)
+ first_project_root = manager.projects().target(first_project_root_module)
+ first_build_build_dir = first_project_root.build_dir()
+ configure.set_log_file(os.path.join(first_build_build_dir, "config.log"))
+
virtual_targets = []
- # Virtual targets obtained when building main targets references on
- # the command line. When running
- #
- # bjam --clean main_target
- #
- # we want to clean the files that belong only to that main target,
- # so we need to record which targets are produced.
- results_of_main_targets = []
+ global results_of_main_targets
+ # Now that we have a set of targets to build and a set of property sets to
+ # build the targets with, we can start the main build process by using each
+ # property set to generate virtual targets from all of our listed targets
+ # and any of their dependants.
for p in expanded:
manager.set_command_line_free_features(property_set.create(p.free()))
@@ -360,78 +625,248 @@
except Exception:
raise
- # The cleaning is tricky. Say, if
- # user says:
- #
- # bjam --clean foo
- #
- # where 'foo' is a directory, then we want to clean targets
- # which are in 'foo' or in any children Jamfiles, but not in any
- # unrelated Jamfiles. So, we collect the list of project under which
- # cleaning is allowed.
- #
- projects_to_clean = []
- targets_to_clean = []
- if clean or clean_all:
- for t in targets:
- if isinstance(t, ProjectTarget):
- projects_to_clean.append(t.project_module())
-
- for t in results_of_main_targets:
- # Don't include roots or sources.
- targets_to_clean += b2.build.virtual_target.traverse(t)
-
- targets_to_clean = unique(targets_to_clean)
-
- is_child_cache_ = {}
-
- # Returns 'true' if 'project' is a child of 'current-project',
- # possibly indirect, or is equal to 'project'.
- # Returns 'false' otherwise.
- def is_child (project):
-
- r = is_child_cache_.get(project, None)
- if not r:
- if project in projects_to_clean:
- r = 1
- else:
- parent = manager.projects().attribute(project, "parent-module")
- if parent and parent != "user-config":
- r = is_child(parent)
- else:
- r = 0
+ # Convert collected virtual targets into actual raw Jam targets.
+ for t in virtual_targets:
+ actual_targets.append(t.actualize())
- is_child_cache_[project] = r
- return r
+ # FIXME: restore
+## # If XML data output has been requested prepare additional rules and targets
+## # so we can hook into Jam to collect build data while its building and have
+## # it trigger the final XML report generation after all the planned targets
+## # have been built.
+## if $(.out-xml)
+## {
+## # Get a qualified virtual target name.
+## rule full-target-name ( target )
+## {
+## local name = [ $(target).name ] ;
+## local project = [ $(target).project ] ;
+## local project-path = [ $(project).get location ] ;
+## return $(project-path)//$(name) ;
+## }
- actual_targets = []
- for t in virtual_targets:
- actual_targets.append(t.actualize())
+## # Generate an XML file containing build statistics for each constituent.
+## #
+## rule out-xml ( xml-file : constituents * )
+## {
+## # Prepare valid XML header and footer with some basic info.
+## local nl = "
+## " ;
+## local jam = [ version.jam ] ;
+## local os = [ modules.peek : OS OSPLAT JAMUNAME ] "" ;
+## local timestamp = [ modules.peek : JAMDATE ] ;
+## local cwd = [ PWD ] ;
+## local command = $(.sys.argv) ;
+## local bb-version = [ version.boost-build ] ;
+## .header on $(xml-file) =
+## "<?xml version=\"1.0\" encoding=\"utf-8\"?>"
+## "$(nl)<build format=\"1.0\" version=\"$(bb-version)\">"
+## "$(nl) <jam version=\"$(jam:J=.)\" />"
+## "$(nl) <os name=\"$(os[1])\" platform=\"$(os[2])\"><![CDATA[$(os[3-]:J= )]]></os>"
+## "$(nl) <timestamp><![CDATA[$(timestamp)]]></timestamp>"
+## "$(nl) <directory><![CDATA[$(cwd)]]></directory>"
+## "$(nl) <command><![CDATA[\"$(command:J=\" \")\"]]></command>"
+## ;
+## .footer on $(xml-file) =
+## "$(nl)</build>" ;
+
+## # Generate the target dependency graph.
+## .contents on $(xml-file) +=
+## "$(nl) <targets>" ;
+## for local t in [ virtual-target.all-targets ]
+## {
+## local action = [ $(t).action ] ;
+## if $(action)
+## # If a target has no action, it has no dependencies.
+## {
+## local name = [ full-target-name $(t) ] ;
+## local sources = [ $(action).sources ] ;
+## local dependencies ;
+## for local s in $(sources)
+## {
+## dependencies += [ full-target-name $(s) ] ;
+## }
+
+## local path = [ $(t).path ] ;
+## local jam-target = [ $(t).actual-name ] ;
+
+## .contents on $(xml-file) +=
+## "$(nl) <target>"
+## "$(nl) <name><![CDATA[$(name)]]></name>"
+## "$(nl) <dependencies>"
+## "$(nl) <dependency><![CDATA[$(dependencies)]]></dependency>"
+## "$(nl) </dependencies>"
+## "$(nl) <path><![CDATA[$(path)]]></path>"
+## "$(nl) <jam-target><![CDATA[$(jam-target)]]></jam-target>"
+## "$(nl) </target>"
+## ;
+## }
+## }
+## .contents on $(xml-file) +=
+## "$(nl) </targets>" ;
+## # Build $(xml-file) after $(constituents). Do so even if a
+## # constituent action fails and regenerate the xml on every bjam run.
+## INCLUDES $(xml-file) : $(constituents) ;
+## ALWAYS $(xml-file) ;
+## __ACTION_RULE__ on $(xml-file) = build-system.out-xml.generate-action ;
+## out-xml.generate $(xml-file) ;
+## }
- bjam.call("NOTFILE", "all")
- bjam.call("DEPENDS", "all", actual_targets)
+## # The actual build actions are here; if we did this work in the actions
+## # clause we would have to form a valid command line containing the
+## # result of @(...) below (the name of the XML file).
+## #
+## rule out-xml.generate-action ( args * : xml-file
+## : command status start end user system : output ? )
+## {
+## local contents =
+## [ on $(xml-file) return $(.header) $(.contents) $(.footer) ] ;
+## local f = @($(xml-file):E=$(contents)) ;
+## }
- if bjam_targets:
- bjam.call("UPDATE", ["<e>%s" % x for x in bjam_targets])
- elif clean_all:
- bjam.call("UPDATE", "clean-all")
- elif clean:
- to_clean = []
- for t in manager.virtual_targets().all_targets():
- p = t.project()
+## # Nothing to do here; the *real* actions happen in
+## # out-xml.generate-action.
+## actions quietly out-xml.generate { }
+
+## # Define the out-xml file target, which depends on all the targets so
+## # that it runs the collection after the targets have run.
+## out-xml $(.out-xml) : $(actual-targets) ;
+
+## # Set up a global __ACTION_RULE__ that records all the available
+## # statistics about each actual target in a variable "on" the --out-xml
+## # target.
+## #
+## rule out-xml.collect ( xml-file : target : command status start end user
+## system : output ? )
+## {
+## local nl = "
+## " ;
+## # Open the action with some basic info.
+## .contents on $(xml-file) +=
+## "$(nl) <action status=\"$(status)\" start=\"$(start)\" end=\"$(end)\" user=\"$(user)\" system=\"$(system)\">" ;
+
+## # If we have an action object we can print out more detailed info.
+## local action = [ on $(target) return $(.action) ] ;
+## if $(action)
+## {
+## local action-name = [ $(action).action-name ] ;
+## local action-sources = [ $(action).sources ] ;
+## local action-props = [ $(action).properties ] ;
+
+## # The qualified name of the action which we created the target.
+## .contents on $(xml-file) +=
+## "$(nl) <name><![CDATA[$(action-name)]]></name>" ;
+
+## # The sources that made up the target.
+## .contents on $(xml-file) +=
+## "$(nl) <sources>" ;
+## for local source in $(action-sources)
+## {
+## local source-actual = [ $(source).actual-name ] ;
+## .contents on $(xml-file) +=
+## "$(nl) <source><![CDATA[$(source-actual)]]></source>" ;
+## }
+## .contents on $(xml-file) +=
+## "$(nl) </sources>" ;
+
+## # The properties that define the conditions under which the
+## # target was built.
+## .contents on $(xml-file) +=
+## "$(nl) <properties>" ;
+## for local prop in [ $(action-props).raw ]
+## {
+## local prop-name = [ MATCH ^<(.*)>$ : $(prop:G) ] ;
+## .contents on $(xml-file) +=
+## "$(nl) <property name=\"$(prop-name)\"><![CDATA[$(prop:G=)]]></property>" ;
+## }
+## .contents on $(xml-file) +=
+## "$(nl) </properties>" ;
+## }
- # Remove only derived targets.
- if t.action() and \
- (t in targets_to_clean or is_child(p.project_module())):
- to_clean.append(t)
+## local locate = [ on $(target) return $(LOCATE) ] ;
+## locate ?= "" ;
+## .contents on $(xml-file) +=
+## "$(nl) <jam-target><![CDATA[$(target)]]></jam-target>"
+## "$(nl) <path><![CDATA[$(target:G=:R=$(locate))]]></path>"
+## "$(nl) <command><![CDATA[$(command)]]></command>"
+## "$(nl) <output><![CDATA[$(output)]]></output>" ;
+## .contents on $(xml-file) +=
+## "$(nl) </action>" ;
+## }
- to_clean_actual = [t.actualize() for t in to_clean]
- manager.engine().set_update_action('common.Clean', 'clean',
- to_clean_actual, None)
+## # When no __ACTION_RULE__ is set "on" a target, the search falls back to
+## # the global module.
+## module
+## {
+## __ACTION_RULE__ = build-system.out-xml.collect
+## [ modules.peek build-system : .out-xml ] ;
+## }
+
+## IMPORT
+## build-system :
+## out-xml.collect
+## out-xml.generate-action
+## : :
+## build-system.out-xml.collect
+## build-system.out-xml.generate-action
+## ;
+## }
+
+ j = option.get("jobs")
+ if j:
+ bjam.call("set-variable", PARALLELISM, j)
+
+ k = option.get("keep-going", "true", "true")
+ if k in ["on", "yes", "true"]:
+ bjam.call("set-variable", "KEEP_GOING", "1")
+ elif k in ["off", "no", "false"]:
+ bjam.call("set-variable", "KEEP_GOING", "0")
+ else:
+ print "error: Invalid value for the --keep-going option"
+ sys.exit()
+
+ # The 'all' pseudo target is not strictly needed expect in the case when we
+ # use it below but people often assume they always have this target
+ # available and do not declare it themselves before use which may cause
+ # build failures with an error message about not being able to build the
+ # 'all' target.
+ bjam.call("NOTFILE", "all")
+ # And now that all the actual raw Jam targets and all the dependencies
+ # between them have been prepared all that is left is to tell Jam to update
+ # those targets.
+ if explicitly_requested_files:
+ # Note that this case can not be joined with the regular one when only
+ # exact Boost Build targets are requested as here we do not build those
+ # requested targets but only use them to construct the dependency tree
+ # needed to build the explicitly requested files.
+ # FIXME: add $(.out-xml)
+ bjam.call("UPDATE", ["<e>%s" % x for x in explicitly_requested_files])
+ elif cleanall:
+ bjam.call("UPDATE", "clean-all")
+ elif clean:
+ manager.engine().set_update_action("common.Clean", "clean",
+ actual_clean_targets(targets))
bjam.call("UPDATE", "clean")
+ else:
+ # FIXME:
+ #configure.print-configure-checks-summary ;
+
+ if pre_build_hook:
+ for h in pre_build_hook:
+ h()
+
+ bjam.call("DEPENDS", "all", actual_targets)
+ ok = bjam.call("UPDATE_NOW", "all") # FIXME: add out-xml
+ if post_build_hook:
+ post_build_hook(ok)
+ # Prevent automatic update of the 'all' target, now that
+ # we have explicitly updated what we wanted.
+ bjam.call("UPDATE")
+ if manager.errors().count() == 0:
+ return ["ok"]
else:
- bjam.call("UPDATE", "all")
+ return []
Modified: branches/release/tools/build/v2/doc/jamfile.jam
==============================================================================
--- branches/release/tools/build/v2/doc/jamfile.jam (original)
+++ branches/release/tools/build/v2/doc/jamfile.jam 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -17,7 +17,7 @@
<xsl:param>boost.root=../../../../..
;
-xml jam_docs : ../../../jam/doc/bjam.qbk ;
+xml jam_docs : bjam.qbk ;
if ! $(BOOST_ROOT)
{
Modified: branches/release/tools/build/v2/doc/src/install.xml
==============================================================================
--- branches/release/tools/build/v2/doc/src/install.xml (original)
+++ branches/release/tools/build/v2/doc/src/install.xml 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -6,92 +6,57 @@
<title>Installation</title>
<para>
- This section describes how to install Boost.Build from a
- released <ulink
- url="http://sourceforge.net/project/showfiles.php?group_id=7586&package_id=8041">Boost
- source distribution</ulink>
- or <ulink url="http://sourceforge.net/cvs/?group_id=7586">CVS
- image</ulink>.
- <footnote>
- <para>Note that packages prepared for
- Unix/Linux systems usually make their own choices about where to
- put things and even which parts of Boost to include. When we
- say “released source distribution” we mean a
- distribution of Boost as released on its SourceForge
- <ulink url="http://sourceforge.net/project/showfiles.php?group_id=7586&package_id=8041">project
- page</ulink>.
- </para>
- </footnote>
-All paths are given relative to
- the <firstterm>Boost.Build v2 root directory</firstterm>, which is
-
- <!-- the normal location of the document you are reading.
- Boost.Build is -->
-
- <!-- That is a lie AFAICT, at least in a Boost distro. You need
- to say something that will be true if you want to cover BBv2 as
- distributed separately -->
-
- located in the <filename>tools/build/v2</filename> subdirectory
- of a full Boost <ulink url="http://sourceforge.net/project/showfiles.php?group_id=7586&package_id=8041">distribution</ulink>.
- <footnote>
- <para>The Boost.Build subset of boost is also distributed
- separately, for those who are only interested in getting a
- build tool. The top-level directory of a <ulink
- url="http://sourceforge.net/project/showfiles.php?group_id=7586&package_id=80982">Boost.Build
- distribution</ulink> contains all the subdirectories of the
- <filename>tools/build/v2</filename> subdirectory from a full
- Boost distribution, so it is itself a valid Boost.Build root
- directory. It also contains the
- <filename>tools/jam/src</filename> subdirectory of a
- full Boost distribution, so you can rebuild Boost.Jam from
- source.
- </para>
- </footnote>
+ To install Boost.Build from an official release or a nightly build, as
+ available on the <ulink url="http://boost.org/boost-build2">official web site</ulink>,
+ follow these steps:
</para>
<orderedlist>
<listitem>
<simpara>
- Boost.Build uses <ulink
- url= "../../tools/jam/index.html">Boost.Jam</ulink>, an
- extension of the <ulink
- url="http://www.perforce.com/jam/jam.html">Perforce
- Jam</ulink> portable <command>make</command> replacement. The
- recommended way to get Boost.Jam is to <emphasis
- role="bold"><ulink
- url= "http://sourceforge.net/project/showfiles.php?group_id=7586&package_id=72941">download
- a prebuilt executable</ulink></emphasis> from SourceForge.
- If a prebuilt executable is not provided for your platform
- or you are using Boost's sources in an unreleased state, it
- may be necessary to
- <link linkend="jam.building">build <command>bjam</command>
- from sources</link> included in the Boost source tree.
+ Unpack the release. On the command line, go to the root of the
+ unpacked tree.
</simpara>
</listitem>
<listitem>
- <para>
-
- To install Boost.Jam, copy the executable,
- called <command>bjam</command>
- or <command>bjam.exe</command> to a location accessible in
- your <envar>PATH</envar>. Go to the Boost.Build root
- directory and
- run <command>bjam <option>--version</option></command>. You
- should see:
-
- <screen>
- Boost.Build V2 (Milestone N)
- Boost.Jam xx.xx.xx
- </screen>
+ <simpara>
+ Run either <command>.\bootstrap.bat</command> (on Windows), or
+ <command>./bootstrap.sh</command> (on other operating systmes).
+ </simpara>
+ </listitem>
- where N is the version of Boost.Build you're using.
- </para>
+ <listitem>
+ <simpara>
+ Run
+ <screen>./bjam install --prefix=<replaceable>PREFIX</replaceable></screen>
+ where <replaceable>PREFIX</replaceable> is a directory where you
+ want Boost.Build to be installed.
+ </simpara>
</listitem>
<listitem>
<simpara>
+ Optionally, add <filename><replaceable>PREFIX</replaceable>/bin</filename>
+ to your <envar>PATH</envar> environment variable.
+ </simpara>
+ </listitem>
+ </orderedlist>
+
+ <para>If you are not using Boost.Build package, but rather the version
+ bundled with the Boost C++ Libraries, the above commands should be run
+ in the <filename>tools/build/v2</filename> directory.</para>
+
+ <para>
+ Now that Boost.Build is installed, you can try some of examples. Copy
+ <filename><replaceable>PREFIX</replaceable>/share/boost-build/examples/hello</filename>
+ to a different directory, then change to that directory and run:
+<screen><filename><replaceable>PREFIX</replaceable>/bin/bjam</filename></screen>
+ A simple executable should be build.
+ </para>
+
+ <!--
+ <simpara>
Configure Boost.Build to recognize the build resources (such
as compilers and libraries) you have installed on your
system. Open the
@@ -101,14 +66,8 @@
are located.
</simpara>
</listitem>
+ -->
- <listitem>
- <simpara>
- You should now be able to go to the
- <filename>example/hello/</filename> directory and run
- <command>bjam</command> there. A simple application will be
- built. You can also play with other projects in the
- <filename>example/</filename> directory.
<!-- This part should not go into intoduction docs, but we need to
place it somewhere.
@@ -119,47 +78,9 @@
from accidentally overwriting your config when updating.</para>
-->
- </simpara>
- </listitem>
- </orderedlist>
-
- <para>
- If you are using Boost's CVS state, be sure to
- rebuild <command>bjam</command> even if you have a previous
- version. The CVS version of Boost.Build requires the CVS
- version of Boost.Jam.
- </para>
-
- <para>
- When <command>bjam</command> is invoked, it always needs to be
- able to find the Boost.Build root directory, where the
- interpreted source code of Boost.Build is located. There are
- two ways to tell <command>bjam</command> about the root directory:
- </para>
-
- <itemizedlist>
- <listitem>
- <simpara>
- Set the environment variable <envar>BOOST_BUILD_PATH</envar>
- to the absolute path of the Boost.Build root directory.
- </simpara>
- </listitem>
-
- <listitem>
- <para>
- At the root directory of your project or in any of its
- parent directories, create a file called
- <filename>boost-build.jam</filename>, with a single line:
-<programlisting>
-boost-build <replaceable>/path/to/boost.build</replaceable> ;
-</programlisting>
-
- </para>
- </listitem>
- </itemizedlist>
-
- <bridgehead>Information for distributors</bridgehead>
+ <!--
+ <bridgehead>Information for distributors</bridgehead>
<para>
If you're planning to package Boost.Build for a Linux distribution,
@@ -203,7 +124,8 @@
</para>
-
+ -->
+
</chapter>
Modified: branches/release/tools/build/v2/doc/src/overview.xml
==============================================================================
--- branches/release/tools/build/v2/doc/src/overview.xml (original)
+++ branches/release/tools/build/v2/doc/src/overview.xml 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -711,20 +711,6 @@
</varlistentry>
<varlistentry>
- <term><option>-d+2</option></term>
- <listitem>
- <para>Show commands as they are executed.</para>
- </listitem>
- </varlistentry>
-
- <varlistentry>
- <term><option>-d0</option></term>
- <listitem>
- <para>Supress all informational messages.</para>
- </listitem>
- </varlistentry>
-
- <varlistentry>
<term><option>-q</option></term>
<listitem>
<para>Stop at first error, as opposed to continuing to build targets
@@ -771,7 +757,63 @@
<literal>user-config.jam</literal> configuration files.
</para>
</listitem>
- </varlistentry>
+ </varlistentry>
+
+ <varlistentry>
+ <term><option>-d0</option></term>
+ <listitem>
+ <para>Supress all informational messages.</para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><option>-d <replaceable>N</replaceable></option></term>
+ <listitem>
+ <para>Enable cummulative debugging levels from 1 to n. Values are:
+ <orderedlist>
+ <listitem>Show the actions taken for building targets, as they are executed (the default).</listitem>
+ <listitem>Show "quiet" actions and display all action text, as they are executed.</listitem>
+ <listitem>Show dependency analysis, and target/source timestamps/paths.</listitem>
+ <listitem>Show arguments and timming of shell invocations.</listitem>
+ <listitem>Show rule invocations and variable expansions.</listitem>
+ <listitem>Show directory/header file/archive scans, and attempts at binding to targets.</listitem>
+ <listitem>Show variable settings.</listitem>
+ <listitem>Show variable fetches, variable expansions, and evaluation of '"if"' expressions.</listitem>
+ <listitem>Show variable manipulation, scanner tokens, and memory usage.</listitem>
+ <listitem>Show profile information for rules, both timing and memory.</listitem>
+ <listitem>Show parsing progress of Jamfiles.</listitem>
+ <listitem>Show graph of target dependencies.</listitem>
+ <listitem>Show change target status (fate).</listitem>
+ </orderedlist>
+ </para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><option>-d +<replaceable>N</replaceable></option></term>
+ <listitem>
+ <para>Enable debugging level <replaceable>N</replaceable>.</para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><option>-o <replaceable>file</replaceable></option></term>
+ <listitem>
+ <para>Write the updating actions to the specified file instead of running them.
+ </para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><option>-s <replaceable>var</replaceable>=<replaceable>value</replaceable></option></term>
+ <listitem>
+ <para>Set the variable <replaceable>var</replaceable> to
+ <replaceable>value</replaceable> in the global scope of the jam
+ language interpreter, overriding variables imported from the
+ environment.
+ </para>
+ </listitem>
+ </varlistentry>
</variablelist>
</section>
Modified: branches/release/tools/build/v2/doc/src/reference.xml
==============================================================================
--- branches/release/tools/build/v2/doc/src/reference.xml (original)
+++ branches/release/tools/build/v2/doc/src/reference.xml 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -95,7 +95,7 @@
<term><literal>unit-test</literal></term>
<listitem><para>Creates an executable that will be automatically run. See
- <xref linkend="bbv2.tutorial.testing"/>.</para></listitem>
+ <xref linkend="bbv2.builtins.testing"/>.</para></listitem>
</varlistentry>
<varlistentry>
@@ -107,7 +107,7 @@
<term><literal>run-fail</literal></term>
<listitem><para>Specialized rules for testing. See
- <xref linkend="bbv2.tutorial.testing"/>.</para></listitem>
+ <xref linkend="bbv2.builtins.testing"/>.</para></listitem>
</varlistentry>
Modified: branches/release/tools/build/v2/doc/src/standalone.xml
==============================================================================
--- branches/release/tools/build/v2/doc/src/standalone.xml (original)
+++ branches/release/tools/build/v2/doc/src/standalone.xml 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -39,10 +39,8 @@
<appendix id="bbv2.jam">
<title>Boost.Jam Documentation</title>
<xi:include href="jam_docs.xml" parse="xml"
- xpointer="xpointer(id('jam.intro')|id('jam.intro')/following-sibling::*)"/>
+ xpointer="xpointer(id('jam.building')|id('jam.building')/following-sibling::*)"/>
</appendix>
-
- <xi:include href="v1_vs_v2.xml"/>
<index/>
Modified: branches/release/tools/build/v2/doc/src/tutorial.xml
==============================================================================
--- branches/release/tools/build/v2/doc/src/tutorial.xml (original)
+++ branches/release/tools/build/v2/doc/src/tutorial.xml 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -431,10 +431,6 @@
</tip>
</section>
- <section id="bbv2.tutorial.testing">
- <title>Testing</title>
- </section>
-
<section id="bbv2.tutorial.linkage">
<title>Static and shared libaries</title>
Modified: branches/release/tools/build/v2/doc/src/userman.xml
==============================================================================
--- branches/release/tools/build/v2/doc/src/userman.xml (original)
+++ branches/release/tools/build/v2/doc/src/userman.xml 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -36,6 +36,5 @@
<!-- Appendicies -->
<!-- <xi:include href="architecture.xml"/> -->
- <xi:include href="v1_vs_v2.xml"/>
</part>
Deleted: branches/release/tools/build/v2/doc/src/v1_vs_v2.xml
==============================================================================
--- branches/release/tools/build/v2/doc/src/v1_vs_v2.xml 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
+++ (empty file)
@@ -1,111 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE appendix PUBLIC "-//Boost//DTD BoostBook XML V1.0//EN"
- "http://www.boost.org/tools/boostbook/dtd/boostbook.dtd">
-
-<!-- Copyright 2006 Vladimir Prus -->
-<!-- Distributed under the Boost Software License, Version 1.0. -->
-<!-- (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) -->
-
-<appendix id="bbv2.vs_v1">
- <title>Differences to Boost.Build V1</title>
- <!-- "Differences to" is the British English usage. American
- English is "differences from." You can use the former but be
- sure you know what you're doing -->
-
- <para>While Boost.Build V2 is based on the same ideas as Boost.Build V1,
- some of the syntax was changed, and some new important features were
- added. This chapter describes most of the changes.</para>
-
- <section id="bbv2.overview.differences_to_v1.configuration">
- <title>Configuration</title>
-
- <para>In V1, toolsets were configured by environment variables. If you
- wanted to use two versions of the same toolset, you had to create a new
- toolset module that would set the variables and then invoke the base
- toolset. In V2, toolsets are configured by the
- <functionname>using</functionname>, and you can easily configure several
- versions of a toolset. See <xref
- linkend="bbv2.overview.configuration"/> for details.
- </para>
-
- </section>
-
- <section id="bbv2.overview.differences_to_v1.jamfiles">
- <title>Writing Jamfiles</title>
-
- <para>Probably one of the most important differences in V2 Jamfiles is
- the use of project requirements. In V1, if several targets had the same
- requirements (for example, a common <code>#include</code> path), it was necessary to
- manually write the requirements or use a helper rule or template target. In V2, the
- common properties can be specified with the <code>requirements</code> project
- attribute, as documented in <xref linkend="bbv2.overview.projects"/>.
- </para>
-
- <para><link linkend="bbv2.tutorial.libs">Usage requirements</link>
- also help to simplify Jamfiles.
- <!-- Simplify, simplify, simplify! You could go through the
- entire document several times and make changes like that
- one -->
- If a library requires
- all clients to use specific <code>#include</code> paths or macros when compiling
- code that depends on the library, that information can be cleanly
- represented.</para>
-
- <para>The difference between <code>lib</code> and <code>dll</code> targets in V1 is completely
- eliminated in V2. There's only one library target type, <code>lib</code>, which can create
- either static or shared libraries depending on the value of the
- <link linkend="bbv2.overview.builtins.features.link"><varname><link></varname>
- feature</link>. If your target should be only built in one way<!--"variant" has a different meaning here-->, you
- can add <code><link>shared</code> or <code><link>static</code> to its requirements.
- </para>
-
- <para>The syntax for referring to other targets was changed a bit. While
- in V1 one would use:
-<programlisting>
-exe a : a.cpp <lib>../foo/bar ;
-</programlisting>
- the V2 syntax is:
-<programlisting>
-exe a : a.cpp ../foo//bar ;
-</programlisting>
- Note that you don't need to specify the type of other target, but the
- last element should be separated from the others by a double slash to indicate that
- you're referring to target <filename>bar</filename> in project <filename>../foo</filename>, and not to
- project <filename>../foo/bar</filename>.
- </para>
-
-
- </section>
-
- <section id="bbv2.overview.differences_to_v1.build_process">
- <title>Build process</title>
-
- <para>The command line syntax in V2 is completely different. For example
-<programlisting>
-bjam -sTOOLS=msvc -sBUILD=release some_target
-</programlisting>
- now becomes:
-<programlisting>
-bjam toolset=msvc variant=release some_target
-</programlisting>
- or, using implicit features, just:
-<programlisting>
-bjam msvc release some_target
-</programlisting>
- See <link linkend="bbv2.overview.invocation">the reference</link> for a
- complete description of the syntax.
- </para>
-
-
- </section>
- </appendix>
-
-
-<!--
- Local Variables:
- mode: xml
- sgml-indent-data: t
- sgml-parent-document: ("userman.xml" "chapter")
- sgml-set-face: t
- End:
--->
Modified: branches/release/tools/build/v2/engine/src/Jambase
==============================================================================
--- branches/release/tools/build/v2/engine/src/Jambase (original)
+++ branches/release/tools/build/v2/engine/src/Jambase 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -128,9 +128,13 @@
# Boost.Build files.
local search-path = $(BOOST_BUILD_PATH) $(BOOST_ROOT) ;
+ local self = [ SELF_PATH ] ;
+ local boost-build-relative = ../../share/boost-build ;
+ local self-based-path = [ NORMALIZE_PATH $(boost-build-relative:R=$(self)) ] ;
local boost-build-files =
[ find-to-root [ PWD ] : boost-build.jam ]
+ [ GLOB $(self-based-path) : boost-build.jam ]
# Another temporary measure so Jam works with Boost.Build v1.
[ GLOB $(search-path) : boost-build.jam ] ;
@@ -152,6 +156,7 @@
}
ECHO "Attempted search from" [ PWD ] "up to the root" ;
+ ECHO "at" $(self-based-path) ;
ECHO "and in these directories from BOOST_BUILD_PATH and BOOST_ROOT: "$(search-path:J=", ")"." ;
EXIT "Please consult the documentation at 'http://www.boost.org'." ;
}
Modified: branches/release/tools/build/v2/engine/src/build.jam
==============================================================================
--- branches/release/tools/build/v2/engine/src/build.jam (original)
+++ branches/release/tools/build/v2/engine/src/build.jam 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -193,6 +193,14 @@
[ opt --debug : --no_inlining ]
-I$(--python-include) -I$(--extra-include)
: -L$(--python-lib[1]) -l$(--python-lib[2]) ;
+## Clang Linux 2.8+
+toolset clang clang : "-o " : -D
+ : -Wno-unused -Wno-format
+ [ opt --release : -Os ]
+ [ opt --debug : -g -O0 -fno-inline ]
+ [ opt --profile : -finline-functions -g ]
+ -I$(--python-include) -I$(--extra-include)
+ : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
## MacOSX Darwin, using GCC 2.9.x, 3.x
toolset darwin cc : "-o " : -D
:
@@ -215,6 +223,13 @@
[ opt --debug : -s -O3 -fno-inline -pg ]
-I$(--python-include) -I$(--extra-include)
: -L$(--python-lib[1]) -l$(--python-lib[2]) ;
+## Intel C/C++ for Darwin
+toolset intel-darwin icc : "-o " : -D
+ :
+ [ opt --release : -O3 ]
+ [ opt --debug : -g -O0 -p ]
+ -I$(--python-include) -I$(--extra-include)
+ : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
## Intel C/C++ for Linux
toolset intel-linux icc : "-o " : -D
:
Modified: branches/release/tools/build/v2/engine/src/build.sh
==============================================================================
--- branches/release/tools/build/v2/engine/src/build.sh (original)
+++ branches/release/tools/build/v2/engine/src/build.sh 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -29,8 +29,8 @@
echo "### ./build.sh gcc"
echo "###"
echo "### Toolsets supported by this script are:"
- echo "### acc, como, darwin, gcc, intel-linux, kcc, kylix, mipspro,"
- echo "### mingw(msys), pathscale, pgi, qcc, sun, sunpro, tru64cxx, vacpp"
+ echo "### acc, como, darwin, gcc, intel-darwin, intel-linux, kcc, kylix,"
+ echo "### mipspro, mingw(msys), pathscale, pgi, qcc, sun, sunpro, tru64cxx, vacpp"
echo "###"
echo "### A special toolset; cc, is available which is used as a fallback"
echo "### when a more specific toolset is not found and the cc command is"
@@ -136,6 +136,10 @@
BOOST_JAM_CC=cc
;;
+ intel-darwin)
+ BOOST_JAM_CC=icc
+ ;;
+
intel-linux)
if test -r /opt/intel/cc/9.0/bin/iccvars.sh ; then
BOOST_JAM_TOOLSET_ROOT=/opt/intel/cc/9.0/
@@ -204,6 +208,11 @@
BOOST_JAM_CC=cc
;;
+ clang*)
+ BOOST_JAM_CC="clang -Wno-unused -Wno-format"
+ BOOST_JAM_TOOLSET=clang
+ ;;
+
tru64cxx)
BOOST_JAM_CC=cc
;;
Modified: branches/release/tools/build/v2/engine/src/builtins.c
==============================================================================
--- branches/release/tools/build/v2/engine/src/builtins.c (original)
+++ branches/release/tools/build/v2/engine/src/builtins.c 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -26,6 +26,8 @@
#include "timestamp.h"
#include "md5.h"
#include <ctype.h>
+# include <sys/types.h>
+# include <sys/wait.h>
#if defined(USE_EXECUNIX)
# include <sys/types.h>
@@ -394,6 +396,11 @@
builtin_precious, 0, args );
}
+ {
+ char * args [] = { 0 };
+ bind_builtin( "SELF_PATH", builtin_self_path, 0, args );
+ }
+
/* Initialize builtin modules. */
init_set();
init_path();
@@ -1346,6 +1353,8 @@
{
original_noexec = globs.noexec;
globs.noexec = 0;
+ original_quitquick = globs.quitquick;
+ globs.quitquick = 0;
}
if (continue_)
@@ -1364,6 +1373,7 @@
if (force)
{
globs.noexec = original_noexec;
+ globs.quitquick = original_quitquick;
}
if (continue_)
@@ -1733,6 +1743,22 @@
return L0;
}
+LIST *builtin_self_path( PARSE *parse, FRAME *frame )
+{
+ extern char *saved_argv0;
+ char *p = executable_path (saved_argv0);
+ if (p)
+ {
+ LIST* result = list_new (0, newstr (p));
+ free(p);
+ return result;
+ }
+ else
+ {
+ return L0;
+ }
+}
+
#ifdef HAVE_PYTHON
@@ -1928,8 +1954,12 @@
/*
- * Accepts three arguments: module name, rule name and Python callable. Creates
- * a bjam rule with the specified name in the specified module, which will
+ * Accepts four arguments:
+ * - module name
+ * - rule name,
+ * - Python callable.
+ * - (optional) bjam language function signature.
+ * Creates a bjam rule with the specified name in the specified module, which will
* invoke the Python callable.
*/
@@ -1938,10 +1968,12 @@
char * module;
char * rule;
PyObject * func;
+ PyObject * bjam_signature = NULL;
module_t * m;
RULE * r;
- if ( !PyArg_ParseTuple( args, "ssO:import_rule", &module, &rule, &func ) )
+ if ( !PyArg_ParseTuple( args, "ssO|O:import_rule",
+ &module, &rule, &func, &bjam_signature ) )
return NULL;
if ( !PyCallable_Check( func ) )
@@ -1958,6 +1990,22 @@
Py_INCREF( func );
r->python_function = func;
+ r->arguments = 0;
+
+ if (bjam_signature)
+ {
+ argument_list * arg_list = args_new();
+ Py_ssize_t i;
+
+ Py_ssize_t s = PySequence_Size (bjam_signature);
+ for (i = 0; i < s; ++i)
+ {
+ PyObject* v = PySequence_GetItem (bjam_signature, i);
+ lol_add(arg_list->data, list_from_python (v));
+ Py_DECREF(v);
+ }
+ r->arguments = arg_list;
+ }
Py_INCREF( Py_None );
return Py_None;
@@ -2061,6 +2109,13 @@
return result;
}
+PyObject * bjam_caller( PyObject * self, PyObject * args )
+{
+ PyObject *result = PyString_FromString(
+ frame_before_python_call->prev->module->name);
+ return result;
+}
+
#endif /* #ifdef HAVE_PYTHON */
@@ -2142,6 +2197,14 @@
#endif
+static char * rtrim(char *s)
+{
+ char *p = s;
+ while(*p) ++p;
+ for(--p; p >= s && isspace(*p); *p-- = 0);
+ return s;
+}
+
LIST * builtin_shell( PARSE * parse, FRAME * frame )
{
LIST * command = lol_get( frame->args, 0 );
@@ -2153,6 +2216,7 @@
int exit_status = -1;
int exit_status_opt = 0;
int no_output_opt = 0;
+ int strip_eol_opt = 0;
/* Process the variable args options. */
{
@@ -2168,6 +2232,10 @@
{
no_output_opt = 1;
}
+ else if ( strcmp("strip-eol", arg->string) == 0 )
+ {
+ strip_eol_opt = 1;
+ }
arg = lol_get( frame->args, ++a );
}
}
@@ -2189,6 +2257,8 @@
buffer[ret] = 0;
if ( !no_output_opt )
{
+ if ( strip_eol_opt )
+ rtrim(buffer);
string_append( &s, buffer );
}
}
@@ -2202,6 +2272,10 @@
/* The command exit result next. */
if ( exit_status_opt )
{
+ if ( WIFEXITED(exit_status) )
+ exit_status = WEXITSTATUS(exit_status);
+ else
+ exit_status = -1;
sprintf( buffer, "%d", exit_status );
result = list_new( result, newstr( buffer ) );
}
Modified: branches/release/tools/build/v2/engine/src/builtins.h
==============================================================================
--- branches/release/tools/build/v2/engine/src/builtins.h (original)
+++ branches/release/tools/build/v2/engine/src/builtins.h 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -60,6 +60,7 @@
LIST *builtin_file_open( PARSE *parse, FRAME *frame );
LIST *builtin_pad( PARSE *parse, FRAME *frame );
LIST *builtin_precious( PARSE *parse, FRAME *frame );
+LIST *builtin_self_path( PARSE *parse, FRAME *frame );
void backtrace( FRAME *frame );
extern int last_update_now_status;
Modified: branches/release/tools/build/v2/engine/src/compile.c
==============================================================================
--- branches/release/tools/build/v2/engine/src/compile.c (original)
+++ branches/release/tools/build/v2/engine/src/compile.c 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -534,8 +534,15 @@
inner->prev_user = frame->module->user_module ? frame : frame->prev_user;
inner->module = frame->module; /* This gets fixed up in evaluate_rule(), below. */
inner->procedure = parse;
- for ( p = parse->left; p; p = p->left )
- lol_add( inner->args, parse_evaluate( p->right, frame ) );
+ /* Special-case LOL of length 1 where the first list is totally empty.
+ This is created when calling functions with no parameters, due to
+ the way jam grammar is written. This is OK when one jam function
+ calls another, but really not good when Jam function calls Python. */
+ if ( parse->left->left == NULL && parse->left->right->func == compile_null)
+ ;
+ else
+ for ( p = parse->left; p; p = p->left )
+ lol_add( inner->args, parse_evaluate( p->right, frame ) );
/* And invoke the rule. */
result = evaluate_rule( parse->string, inner );
@@ -707,6 +714,7 @@
LIST* value = 0;
char modifier;
LIST* arg_name = formal; /* hold the argument name for type checking */
+ int multiple = 0;
/* Stop now if a variable number of arguments are specified */
if ( name[0] == '*' && name[1] == 0 )
@@ -722,6 +730,7 @@
case '+':
case '*':
value = list_copy( 0, actual );
+ multiple = 1;
actual = 0;
/* skip an extra element for the modifier */
formal = formal->next;
@@ -738,7 +747,8 @@
}
}
- locals = addsettings( locals, VAR_SET, name, value );
+ locals = addsettings(locals, VAR_SET, name, value);
+ locals->multiple = multiple;
type_check( type_name, value, frame, rule, arg_name );
type_name = 0;
}
@@ -760,32 +770,101 @@
static int python_instance_number = 0;
+
+/* Given a Python object, return a string to use in Jam
+ code instead of said object.
+ If the object is string, use the string value
+ If the object implemenets __jam_repr__ method, use that.
+ Otherwise return 0.
+
+ The result value is newstr-ed. */
+char *python_to_string(PyObject* value)
+{
+ if (PyString_Check(value))
+ {
+ return newstr(PyString_AsString(value));
+ }
+ else
+ {
+ /* See if this is an instance that defines special __jam_repr__
+ method. */
+ if (PyInstance_Check(value)
+ && PyObject_HasAttrString(value, "__jam_repr__"))
+ {
+ PyObject* repr = PyObject_GetAttrString(value, "__jam_repr__");
+ if (repr)
+ {
+ PyObject* arguments2 = PyTuple_New(0);
+ PyObject* value2 = PyObject_Call(repr, arguments2, 0);
+ Py_DECREF(repr);
+ Py_DECREF(arguments2);
+ if (PyString_Check(value2))
+ {
+ return newstr(PyString_AsString(value2));
+ }
+ Py_DECREF(value2);
+ }
+ }
+ return 0;
+ }
+}
+
static LIST*
call_python_function(RULE* r, FRAME* frame)
{
LIST * result = 0;
- PyObject * arguments = PyTuple_New( frame->args->count );
+ PyObject * arguments = 0;
+ PyObject * kw = NULL;
int i ;
PyObject * py_result;
- for ( i = 0; i < frame->args->count; ++i )
+ if (r->arguments)
{
- PyObject * arg = PyList_New(0);
- LIST* l = lol_get( frame->args, i);
+ SETTINGS * args;
+
+ arguments = PyTuple_New(0);
+ kw = PyDict_New();
+
+ for (args = collect_arguments(r, frame); args; args = args->next)
+ {
+ PyObject *key = PyString_FromString(args->symbol);
+ PyObject *value = 0;
+ if (args->multiple)
+ value = list_to_python(args->value);
+ else {
+ if (args->value)
+ value = PyString_FromString(args->value->string);
+ }
- for ( ; l; l = l->next )
+ if (value)
+ PyDict_SetItem(kw, key, value);
+ Py_DECREF(key);
+ Py_XDECREF(value);
+ }
+ }
+ else
+ {
+ arguments = PyTuple_New( frame->args->count );
+ for ( i = 0; i < frame->args->count; ++i )
{
- PyObject * v = PyString_FromString(l->string);
- /* Steals reference to 'v' */
- PyList_Append( arg, v );
+ PyObject * arg = PyList_New(0);
+ LIST* l = lol_get( frame->args, i);
+
+ for ( ; l; l = l->next )
+ {
+ PyObject * v = PyString_FromString(l->string);
+ PyList_Append( arg, v );
+ Py_DECREF(v);
+ }
+ /* Steals reference to 'arg' */
+ PyTuple_SetItem( arguments, i, arg );
}
- /* Steals reference to 'arg' */
- PyTuple_SetItem( arguments, i, arg );
}
frame_before_python_call = frame;
- py_result = PyObject_CallObject( r->python_function, arguments );
- Py_DECREF( arguments );
+ py_result = PyObject_Call( r->python_function, arguments, kw );
+ Py_DECREF(arguments);
+ Py_XDECREF(kw);
if ( py_result != NULL )
{
if ( PyList_Check( py_result ) )
@@ -795,54 +874,31 @@
for ( i = 0; i < size; ++i )
{
PyObject * item = PyList_GetItem( py_result, i );
- if ( PyString_Check( item ) )
- {
- result = list_new( result,
- newstr( PyString_AsString( item ) ) );
- }
- else
- {
+ char *s = python_to_string (item);
+ if (!s) {
fprintf( stderr, "Non-string object returned by Python call.\n" );
+ } else {
+ result = list_new (result, s);
}
}
}
- else if ( PyInstance_Check( py_result ) )
- {
- static char instance_name[1000];
- static char imported_method_name[1000];
- module_t * m;
- PyObject * method;
- PyObject * method_name = PyString_FromString("foo");
- RULE * r;
-
- fprintf(stderr, "Got instance!\n");
-
- snprintf(instance_name, 1000,
- "pyinstance%d", python_instance_number);
- snprintf(imported_method_name, 1000,
- "pyinstance%d.foo", python_instance_number);
- ++python_instance_number;
-
- m = bindmodule(instance_name);
-
- /* This is expected to get bound method. */
- method = PyObject_GetAttr(py_result, method_name);
-
- r = bindrule( imported_method_name, root_module() );
-
- r->python_function = method;
-
- result = list_new(0, newstr(instance_name));
-
- Py_DECREF( method_name );
- }
else if ( py_result == Py_None )
{
result = L0;
}
- else
+ else
{
- fprintf(stderr, "Non-list object returned by Python call\n");
+ char *s = python_to_string(py_result);
+ if (s)
+ result = list_new(0, s);
+ else
+ /* We have tried all we could. Return empty list. There are
+ cases, e.g. feature.feature function that should return
+ value for the benefit of Python code and which also can be
+ called by Jam code, where no sensible value can be
+ returned. We cannot even emit a warning, since there will
+ be a pile of them. */
+ result = L0;
}
Py_DECREF( py_result );
Modified: branches/release/tools/build/v2/engine/src/jam.c
==============================================================================
--- branches/release/tools/build/v2/engine/src/jam.c (original)
+++ branches/release/tools/build/v2/engine/src/jam.c 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -208,8 +208,11 @@
extern PyObject * bjam_define_action( PyObject * self, PyObject * args );
extern PyObject * bjam_variable ( PyObject * self, PyObject * args );
extern PyObject * bjam_backtrace ( PyObject * self, PyObject * args );
+ extern PyObject * bjam_caller ( PyObject * self, PyObject * args );
#endif
+char *saved_argv0;
+
int main( int argc, char * * argv, char * * arg_environ )
{
int n;
@@ -221,6 +224,8 @@
char * * arg_v = argv;
char const * progname = argv[0];
+ saved_argv0 = argv[0];
+
BJAM_MEM_INIT();
# ifdef OS_MAC
@@ -292,7 +297,14 @@
anyhow++;
if ( ( s = getoptval( optv, 'j', 0 ) ) )
+ {
globs.jobs = atoi( s );
+ if (globs.jobs == 0)
+ {
+ printf("Invalid value for the '-j' option.\n");
+ exit(EXITBAD);
+ }
+ }
if ( ( s = getoptval( optv, 'g', 0 ) ) )
globs.newestfirst = 1;
@@ -345,6 +357,8 @@
"Obtains a variable from bjam's global module."},
{"backtrace", bjam_backtrace, METH_VARARGS,
"Returns bjam backtrace from the last call into Python."},
+ {"caller", bjam_caller, METH_VARARGS,
+ "Returns the module from which the last call into Python is made."},
{NULL, NULL, 0, NULL}
};
@@ -562,3 +576,57 @@
return status ? EXITBAD : EXITOK;
}
+
+#if defined(_WIN32)
+#include <windows.h>
+char *executable_path(char *argv0) {
+ char buf[1024];
+ DWORD ret = GetModuleFileName(NULL, buf, sizeof(buf));
+ if (ret == 0 || ret == sizeof(buf)) return NULL;
+ return strdup (buf);
+}
+#elif defined(__APPLE__) /* Not tested */
+#include <mach-o/dyld.h>
+char *executable_path(char *argv0) {
+ char buf[1024];
+ uint32_t size = sizeof(buf);
+ int ret = _NSGetExecutablePath(buf, &size);
+ if (ret != 0) return NULL;
+ return strdup(buf);
+}
+#elif defined(sun) || defined(__sun) /* Not tested */
+#include <stdlib.h>
+
+char *executable_path(char *argv0) {
+ return strdup(getexecname());
+}
+#elif defined(__FreeBSD__)
+#include <sys/sysctl.h>
+char *executable_path(char *argv0) {
+ int mib[4];
+ mib[0] = CTL_KERN;
+ mib[1] = KERN_PROC;
+ mib[2] = KERN_PROC_PATHNAME;
+ mib[3] = -1;
+ char buf[1024];
+ size_t size = sizeof(buf);
+ sysctl(mib, 4, buf, &size, NULL, 0);
+ if (size == 0 || size == sizeof(buf)) return NULL;
+ return strndup(buf, size);
+}
+#elif defined(__linux__)
+#include <unistd.h>
+char *executable_path(char *argv0) {
+ char buf[1024];
+ ssize_t ret = readlink("/proc/self/exe", buf, sizeof(buf));
+ if (ret == 0 || ret == sizeof(buf)) return NULL;
+ return strndup(buf, ret);
+}
+#else
+char *executable_path(char *argv0) {
+ /* If argv0 is absolute path, assume it's the right absolute path. */
+ if (argv0[0] == "/")
+ return strdup(argv0);
+ return NULL;
+}
+#endif
Modified: branches/release/tools/build/v2/engine/src/jambase.c
==============================================================================
--- branches/release/tools/build/v2/engine/src/jambase.c (original)
+++ branches/release/tools/build/v2/engine/src/jambase.c 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -68,8 +68,12 @@
"|| $(BOOST_ROOT) # A temporary measure so Jam works with Boost.Build v1.\n",
"{\n",
"local search-path = $(BOOST_BUILD_PATH) $(BOOST_ROOT) ;\n",
+"local self = [ SELF_PATH ] ;\n",
+"local boost-build-relative = ../../share/boost-build ;\n",
+"local self-based-path = [ NORMALIZE_PATH $(boost-build-relative:R=$(self)) ] ;\n",
"local boost-build-files =\n",
"[ find-to-root [ PWD ] : boost-build.jam ]\n",
+"[ GLOB $(self-based-path) : boost-build.jam ]\n",
"[ GLOB $(search-path) : boost-build.jam ] ;\n",
".boost-build-file = $(boost-build-files[1]) ;\n",
"if ! $(.boost-build-file)\n",
@@ -84,6 +88,7 @@
"ECHO ;\n",
"}\n",
"ECHO \"Attempted search from\" [ PWD ] \"up to the root\" ;\n",
+"ECHO \"at\" $(self-based-path) ;\n",
"ECHO \"and in these directories from BOOST_BUILD_PATH and BOOST_ROOT: \"$(search-path:J=\", \")\".\" ;\n",
"EXIT \"Please consult the documentation at 'http://www.boost.org'.\" ;\n",
"}\n",
Modified: branches/release/tools/build/v2/engine/src/lists.c
==============================================================================
--- branches/release/tools/build/v2/engine/src/lists.c (original)
+++ branches/release/tools/build/v2/engine/src/lists.c 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -303,3 +303,37 @@
list_print( lol->list[ i ] );
}
}
+
+#ifdef HAVE_PYTHON
+
+PyObject *list_to_python(LIST *l)
+{
+ PyObject *result = PyList_New(0);
+
+ for (; l; l = l->next)
+ {
+ PyObject* s = PyString_FromString(l->string);
+ PyList_Append(result, s);
+ Py_DECREF(s);
+ }
+
+ return result;
+}
+
+LIST *list_from_python(PyObject *l)
+{
+ LIST * result = 0;
+
+ Py_ssize_t i, n;
+ n = PySequence_Size(l);
+ for (i = 0; i < n; ++i)
+ {
+ PyObject *v = PySequence_GetItem(l, i);
+ result = list_new (result, newstr (PyString_AsString(v)));
+ Py_DECREF(v);
+ }
+
+ return result;
+}
+
+#endif
Modified: branches/release/tools/build/v2/engine/src/lists.h
==============================================================================
--- branches/release/tools/build/v2/engine/src/lists.h (original)
+++ branches/release/tools/build/v2/engine/src/lists.h 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -45,6 +45,10 @@
#ifndef LISTS_DWA20011022_H
# define LISTS_DWA20011022_H
+#ifdef HAVE_PYTHON
+#include <Python.h>
+#endif
+
/*
* LIST - list of strings
*/
@@ -93,5 +97,12 @@
void lol_print( LOL *lol );
void lol_build( LOL* lol, char** elements );
+#ifdef HAVE_PYTHON
+
+PyObject *list_to_python(LIST *l);
+LIST *list_from_python(PyObject *l);
+
+#endif
+
#endif
Modified: branches/release/tools/build/v2/engine/src/modules/order.c
==============================================================================
--- branches/release/tools/build/v2/engine/src/modules/order.c (original)
+++ branches/release/tools/build/v2/engine/src/modules/order.c 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -50,8 +50,10 @@
if (colors[adjacent_vertex] == white)
do_ts(graph, adjacent_vertex, colors, result_ptr);
- else if (colors[adjacent_vertex] == gray)
- ; /* This is loop. Not sure what to do... */
+ /* The vertex is either black, in which case we don't have to do
+ anything, a gray, in which case we have a loop. If we have a loop,
+ it's not clear what useful diagnostic we can emit, so we emit
+ nothing. */
}
colors[current_vertex] = black;
**result_ptr = current_vertex;
Modified: branches/release/tools/build/v2/engine/src/pathsys.h
==============================================================================
--- branches/release/tools/build/v2/engine/src/pathsys.h (original)
+++ branches/release/tools/build/v2/engine/src/pathsys.h 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -79,4 +79,13 @@
const char * path_tmpfile( void );
#endif
+/** Give the first argument to 'main', return a full path to
+ our executable. Returns null in the unlikely case it
+ cannot be determined. Caller is responsible for freeing
+ the string.
+
+ Implemented in jam.c
+*/
+char * executable_path (char *argv0);
+
#endif
Modified: branches/release/tools/build/v2/engine/src/rules.c
==============================================================================
--- branches/release/tools/build/v2/engine/src/rules.c (original)
+++ branches/release/tools/build/v2/engine/src/rules.c 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -421,6 +421,7 @@
v->symbol = newstr( symbol );
v->value = value;
v->next = head;
+ v->multiple = 0;
head = v;
}
else if ( flag == VAR_APPEND )
Modified: branches/release/tools/build/v2/engine/src/rules.h
==============================================================================
--- branches/release/tools/build/v2/engine/src/rules.h (original)
+++ branches/release/tools/build/v2/engine/src/rules.h 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -119,6 +119,7 @@
SETTINGS * next;
char * symbol; /* symbol name for var_set() */
LIST * value; /* symbol value for var_set() */
+ int multiple;
};
/* TARGETS - a chain of TARGETs. */
Modified: branches/release/tools/build/v2/engine/src/scan.c
==============================================================================
--- branches/release/tools/build/v2/engine/src/scan.c (original)
+++ branches/release/tools/build/v2/engine/src/scan.c 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -318,6 +318,8 @@
c = '\n';
else if (c == 'r')
c = '\r';
+ else if (c == 't')
+ c = '\t';
*b++ = c;
notkeyword = 1;
}
Deleted: branches/release/tools/build/v2/example/customization/jamfile.jam
==============================================================================
--- branches/release/tools/build/v2/example/customization/jamfile.jam 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
+++ (empty file)
@@ -1,7 +0,0 @@
-# Copyright 2003, 2004 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-
-exe codegen : codegen.cpp class.verbatim usage.verbatim
- t1.verbatim ;
Modified: branches/release/tools/build/v2/example/customization/jamroot.jam
==============================================================================
--- branches/release/tools/build/v2/example/customization/jamroot.jam (original)
+++ branches/release/tools/build/v2/example/customization/jamroot.jam 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -2,5 +2,8 @@
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
import verbatim ;
+
+exe codegen : codegen.cpp class.verbatim usage.verbatim
+ t1.verbatim ;
+
Modified: branches/release/tools/build/v2/example/generate/jamroot.jam
==============================================================================
--- branches/release/tools/build/v2/example/generate/jamroot.jam (original)
+++ branches/release/tools/build/v2/example/generate/jamroot.jam 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -2,30 +2,8 @@
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-import "class" : new ;
-import common ;
+import generate ;
-rule generate-example ( project name : property-set : sources * )
-{
- local result ;
- for local s in $(sources)
- {
- #local source-name = [ $(s).name ] ;
- #local source-action = [ $(s).action ] ;
- #local source-properties = [ $(source-action).properties ] ;
+import gen ;
- # Create a new action, that takes the source target and runs the
- # 'common.copy' command on it.
- local a = [ new non-scanning-action $(s) : common.copy : $(property-set)
- ] ;
-
- # Create a target to represent the action result. Uses the target name
- # passed here via the 'name' parameter and the same type and project as
- # the source.
- result += [ new file-target $(name) : [ $(s).type ] : $(project) : $(a)
- ] ;
- }
- return $(result) ;
-}
-
-generate a2 : a.cpp : <generating-rule>@generate-example ;
+generate a2 : a.cpp : <generating-rule>@gen.generate-example ;
Modified: branches/release/tools/build/v2/example/make/jamroot.jam
==============================================================================
--- branches/release/tools/build/v2/example/make/jamroot.jam (original)
+++ branches/release/tools/build/v2/example/make/jamroot.jam 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -1,9 +1,12 @@
+import feature ;
import toolset ;
path-constant HERE : . ;
make main.cpp : main_cpp.pro : @do-something ;
-toolset.flags do-something PYTHON : <python.interpreter> ;
+feature.feature example.python.interpreter : : free ;
+
+toolset.flags do-something PYTHON : <example.python.interpreter> ;
actions do-something
{
"$(PYTHON:E=python)" "$(HERE)/foo.py" "$(>)" "$(<)"
Modified: branches/release/tools/build/v2/kernel/bootstrap.jam
==============================================================================
--- branches/release/tools/build/v2/kernel/bootstrap.jam (original)
+++ branches/release/tools/build/v2/kernel/bootstrap.jam 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -187,11 +187,28 @@
DEPENDS all : $(targets) ;
}
+ rule call-in-module ( m : rulename : * )
+ {
+ module $(m)
+ {
+ return [ $(2) $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ] ;
+ }
+ }
+
+
rule set-update-action ( action : targets * : sources * : properties * )
{
$(action) $(targets) : $(sources) : $(properties) ;
}
+ rule set-update-action-in-module ( m : action : targets * : sources * : properties * )
+ {
+ module $(m)
+ {
+ $(2) $(3) : $(4) : $(5) ;
+ }
+ }
+
rule set-target-variable ( targets + : variable : value * : append ? )
{
if $(append)
@@ -204,19 +221,21 @@
}
}
- rule get-target-variable ( target : variable )
+ rule get-target-variable ( targets + : variable )
{
- return [ on $(target) return $($(variable)) ] ;
+ return [ on $(targets) return $($(variable)) ] ;
}
- rule import-rules-from-parent ( parent-module : this-module : user-rules )
+ rule import-rules-from-parent ( parent-module : this-module : user-rules * )
{
IMPORT $(parent-module) : $(user-rules) : $(this-module) : $(user-rules) ;
EXPORT $(this-module) : $(user-rules) ;
}
rule mark-included ( targets * : includes * ) {
- INCLUDES $(targets) : $(INCLUDES) ;
+ NOCARE $(includes) ;
+ INCLUDES $(targets) : $(includes) ;
+ ISFILE $(includes) ;
}
}
@@ -225,7 +244,11 @@
module PyBB
{
- bootstrap $(root) ;
+ local ok = [ bootstrap $(root) ] ;
+ if ! $(ok)
+ {
+ EXIT ;
+ }
}
Modified: branches/release/tools/build/v2/kernel/bootstrap.py
==============================================================================
--- branches/release/tools/build/v2/kernel/bootstrap.py (original)
+++ branches/release/tools/build/v2/kernel/bootstrap.py 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -21,5 +21,5 @@
sys.modules["b2"] = m
import b2.build_system
- b2.build_system.main()
+ return b2.build_system.main()
Modified: branches/release/tools/build/v2/manager.py
==============================================================================
--- branches/release/tools/build/v2/manager.py (original)
+++ branches/release/tools/build/v2/manager.py 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -38,12 +38,6 @@
self.errors_ = Errors()
self.command_line_free_features_ = property_set.empty()
- # Object Map.
- # TODO: This is a kludge: maps object names to the actual instances.
- # Sometimes, objects are stored in properties, along with some grist.
- # This map is used to store the value and return an id, which can be later on used to retriev it back.
- self.object_map_ = {}
-
global the_manager
the_manager = self
@@ -86,22 +80,6 @@
def set_command_line_free_features(self, v):
self.command_line_free_features_ = v
- def register_object (self, value):
- """ Stores an object in a map and returns a key that can be used to retrieve it.
- """
- key = 'object_registry_' + str (value)
- self.object_map_ [key] = value
- return key
-
- def get_object (self, key):
- """ Returns a previously registered object.
- """
- if not isinstance (key, str):
- # Probably it's the object itself.
- return key
-
- return self.object_map_ [key]
-
def construct (self, properties = [], targets = []):
""" Constructs the dependency graph.
properties: the build properties.
Modified: branches/release/tools/build/v2/nightly.sh
==============================================================================
--- branches/release/tools/build/v2/nightly.sh (original)
+++ branches/release/tools/build/v2/nightly.sh 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -16,11 +16,12 @@
export LC_MESSAGES=C
export LANG=C
cd /tmp
-rm -rf boost-build
+rm -rf boost-build-nightly
+mkdir boost-build-nightly
echo "Checking out sources"
-svn co http://svn.boost.org/svn/boost/trunk/tools boost-build > /tmp/boost_build_checkout_log
-mv /tmp/boost_build_checkout_log boost-build/checkout-log
-cd boost-build/build/v2
+svn co http://svn.boost.org/svn/boost/trunk/tools/build/v2 boost-build-nightly/boost-build > /tmp/boost_build_checkout_log
+mv /tmp/boost_build_checkout_log boost-build-nightly/checkout-log
+cd boost-build-nightly/boost-build/
echo "Building packages and uploading docs"
./roll.sh > ../roll-log 2>&1
cd ..
Modified: branches/release/tools/build/v2/roll.sh
==============================================================================
--- branches/release/tools/build/v2/roll.sh (original)
+++ branches/release/tools/build/v2/roll.sh 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -8,14 +8,8 @@
set -e
-# Do some renames/rearrangments
-cp -r ../v2 ../boost-build
-# Grab jam_src
-cp -r ../../jam/src ../boost-build/jam_src
-cd ../boost-build
-
# Capture the version
-revision=`svnversion ..`
+revision=`svnversion .`
echo "SVN Revision $revision" >> timestamp.txt
date >> timestamp.txt
@@ -23,7 +17,7 @@
rm -rf example/versioned
# Remove unnecessary top-level files
-find . -maxdepth 1 -type f | egrep -v "boost-build.jam|timestamp.txt|roll.sh|bootstrap.jam|build-system.jam|boost_build.png|index.html|hacking.txt|site-config.jam|user-config.jam" | xargs rm -f
+find . -maxdepth 1 -type f | egrep -v "boost-build.jam|timestamp.txt|roll.sh|bootstrap.jam|build-system.jam|boost_build.png|index.html|hacking.txt|site-config.jam|user-config.jam|bootstrap.sh|bootstrap.bat|Jamroot.jam" | xargs rm -f
# Build the documentation
touch doc/jamroot.jam
@@ -48,7 +42,7 @@
# Make packages
find . -name ".svn" | xargs rm -rf
rm roll.sh
-chmod a+x jam_src/build.bat
+chmod a+x engine/src/build.bat
cd .. && zip -r boost-build.zip boost-build && tar --bzip2 -cf boost-build.tar.bz2 boost-build
# Copy packages to a location where they are grabbed for beta.boost.org
cp userman.pdf boost-build.zip boost-build.tar.bz2 ~/public_html/boost_build_nightly
Modified: branches/release/tools/build/v2/test/BoostBuild.py
==============================================================================
--- branches/release/tools/build/v2/test/BoostBuild.py (original)
+++ branches/release/tools/build/v2/test/BoostBuild.py 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -46,6 +46,9 @@
print_annotation(ann[0], ann[1], xml)
annotations = []
+def clear_annotations():
+ global annotations
+ annotations = []
defer_annotations = 0
@@ -246,10 +249,7 @@
# lying around.
dirs = [os.path.join('../engine/src', jam_build_dir + '.debug'),
os.path.join('../engine/src', jam_build_dir),
- os.path.join('../../jam_src', jam_build_dir + '.debug'),
- os.path.join('../../jam_src', jam_build_dir),
- os.path.join('../jam_src', jam_build_dir + '.debug'),
- os.path.join('../jam_src', jam_build_dir)]
+ ]
for d in dirs:
if os.path.exists(d):
jam_build_dir = d
@@ -264,7 +264,7 @@
verbosity = ['-d+2']
if boost_build_path is None:
- boost_build_path = self.original_workdir
+ boost_build_path = self.original_workdir + "/.."
program_list = []
@@ -441,7 +441,10 @@
% os.path.join(self.original_workdir, "test-config.jam"))
if ignore_toolset_requirements:
kw['program'].append("--ignore-toolset-requirements")
+ if "--python" in sys.argv:
+ kw['program'].append("--python")
kw['chdir'] = subdir
+ self.last_program_invocation = kw['program']
apply(TestCmd.TestCmd.run, [self], kw)
except:
self.dump_stdio()
@@ -525,7 +528,10 @@
return ''
def read_and_strip(self, name):
- lines = open(self.glob_file(name), "rb").readlines()
+ if not self.glob_file(name):
+ return ''
+ f = open(self.glob_file(name), "rb")
+ lines = f.readlines()
result = string.join(map(string.rstrip, lines), "\n")
if lines and lines[-1][-1] == '\n':
return result + '\n'
@@ -554,6 +560,8 @@
elif os.path.exists(path):
raise "Path " + path + " already exists and is not a directory";
shutil.copytree(self.workdir, path)
+ print "The failed command was:"
+ print ' '.join(self.last_program_invocation)
at = TestCmd.caller(traceback.extract_stack(), 0)
annotation("stacktrace", at)
@@ -666,6 +674,8 @@
self.ignore("bin/config.log")
+ self.ignore("*.pyc")
+
if not self.unexpected_difference.empty():
annotation('failure', 'Unexpected changes found')
output = StringIO.StringIO()
Modified: branches/release/tools/build/v2/test/absolute_sources.py
==============================================================================
--- branches/release/tools/build/v2/test/absolute_sources.py (original)
+++ branches/release/tools/build/v2/test/absolute_sources.py 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -52,6 +52,21 @@
alias a : $(pwd)/a.cpp ;
""")
+t.write("standalone.py", """
+from b2.manager import get_manager
+
+# FIXME: this is ugly as death
+get_manager().projects().initialize(__name__)
+
+import os ;
+
+# This use of list as parameter is also ugly.
+project(['standalone'])
+
+pwd = os.getcwd()
+alias('a', [os.path.join(pwd, 'a.cpp')])
+""")
+
t.run_build_system()
t.expect_addition("bin/$toolset/debug/a.exe")
Modified: branches/release/tools/build/v2/test/chain.py
==============================================================================
--- branches/release/tools/build/v2/test/chain.py (original)
+++ branches/release/tools/build/v2/test/chain.py 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -19,8 +19,8 @@
t.write("jamroot.jam", "import gcc ;")
t.write("jamfile.jam", r'''
-import modules ;
-if [ modules.peek : NT ]
+import os ;
+if [ os.name ] = NT
{
actions create
{
Modified: branches/release/tools/build/v2/test/custom_generator.py
==============================================================================
--- branches/release/tools/build/v2/test/custom_generator.py (original)
+++ branches/release/tools/build/v2/test/custom_generator.py 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -36,6 +36,23 @@
generators.register-standard rcc.resource-compile : RCC : OBJ ;
""")
+t.write("rcc.py", """
+import b2.build.type as type
+import b2.build.generators as generators
+
+from b2.manager import get_manager
+
+# Use 'RCC' to avoid conflicts with definitions in the standard rc.jam and
+# msvc.jam
+type.register('RCC', ['rcc'])
+
+generators.register_standard("rcc.resource-compile", ["RCC"], ["OBJ"])
+
+get_manager().engine().register_action(
+ "rcc.resource-compile",
+ '@($(STDOUT):E=rc-object) > "$(<)"')
+""")
+
t.write("jamfile.jam", """
obj r : r.rcc ;
""")
Modified: branches/release/tools/build/v2/test/default_build.py
==============================================================================
--- branches/release/tools/build/v2/test/default_build.py (original)
+++ branches/release/tools/build/v2/test/default_build.py 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -42,7 +42,8 @@
differing from previous default build <variant>debug
"""
-t.run_build_system("-n --no-error-backtrace", status=1, stdout=expected)
+t.run_build_system("-n --no-error-backtrace", status=1)
+t.fail_test(t.stdout().find("default build must be identical in all alternatives") == -1)
# Test that default-build must be identical in all alternatives. No Error case,
# empty default build.
Modified: branches/release/tools/build/v2/test/dll_path.py
==============================================================================
--- branches/release/tools/build/v2/test/dll_path.py (original)
+++ branches/release/tools/build/v2/test/dll_path.py 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -29,10 +29,10 @@
""")
t.write("jamroot.jam", """
-using dll-paths ;
+using dll_paths ;
""")
-t.write("dll-paths.jam", """
+t.write("dll_paths.jam", """
import type ;
import generators ;
import feature ;
@@ -48,7 +48,7 @@
{
rule __init__ ( )
{
- generator.__init__ dll-paths.list : EXE : PATH_LIST ;
+ generator.__init__ dll_paths.list : EXE : PATH_LIST ;
}
rule generated-targets ( sources + : property-set : project name ? )
@@ -81,6 +81,46 @@
}
""")
+t.write("dll_paths.py", """
+import bjam
+
+import b2.build.type as type
+import b2.build.generators as generators
+
+from b2.manager import get_manager
+
+def init():
+ type.register("PATH_LIST", ["pathlist"])
+
+ class DllPathsListGenerator(generators.Generator):
+
+ def __init__(self):
+ generators.Generator.__init__(self, "dll_paths.list", False, ["EXE"], ["PATH_LIST"])
+
+ def generated_targets(self, sources, ps, project, name):
+
+ dll_paths = []
+ for s in sources:
+ a = s.action()
+ if a:
+ p = a.properties()
+ dll_paths += p.get('dll-path')
+ dll_paths.sort()
+ return generators.Generator.generated_targets(self,
+ sources, ps.add_raw(["<dll-path>" + p for p in dll_paths]),
+ project, name)
+
+ generators.register(DllPathsListGenerator())
+
+command = \"\"\"
+echo $(PATHS) > $(<[1])
+\"\"\"
+def function(target, sources, ps):
+ bjam.call('set-target-variable', target, "PATHS", ps.get('dll-path'))
+
+get_manager().engine().register_action("dll_paths.list", command, function=function)
+""")
+
t.write("a/a.cpp", """
void
#if defined(_WIN32)
Modified: branches/release/tools/build/v2/test/explicit.py
==============================================================================
--- branches/release/tools/build/v2/test/explicit.py (original)
+++ branches/release/tools/build/v2/test/explicit.py 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -8,9 +8,7 @@
t = BoostBuild.Tester()
-t.write("jamroot.jam", "")
-
-t.write("jamfile.jam", """
+t.write("jamroot.jam", """
exe hello : hello.cpp ;
exe hello2 : hello.cpp ;
explicit hello2 ;
Modified: branches/release/tools/build/v2/test/generator_selection.py
==============================================================================
--- branches/release/tools/build/v2/test/generator_selection.py (original)
+++ branches/release/tools/build/v2/test/generator_selection.py 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -51,6 +51,28 @@
}
""")
+ t.write("Other/mygen.py", """
+import b2.build.generators as generators
+import b2.build.type as type
+
+from b2.manager import get_manager
+
+import os
+
+
+type.register('MY_TYPE', ['extension'])
+generators.register_standard('mygen.generate-a-cpp-file', ['MY_TYPE'], ['CPP'])
+if os.name == 'nt':
+ action = 'echo void g() {} > "$(<)"'
+else:
+ action = 'echo "void g() {}" > "$(<)"'
+def f(*args):
+ print "Generating a CPP file..."
+
+get_manager().engine().register_action("mygen.generate-a-cpp-file",
+ action, function=f)
+""")
+
t.write("Other/jamfile.jam", """
import mygen ;
obj other-obj : source.extension ;
Modified: branches/release/tools/build/v2/test/loop.py
==============================================================================
--- branches/release/tools/build/v2/test/loop.py (original)
+++ branches/release/tools/build/v2/test/loop.py 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -21,6 +21,5 @@
t.run_build_system("--no-error-backtrace", status=1)
t.fail_test(string.find(t.stdout(),
"error: Recursion in main target references") == -1)
-t.fail_test(string.find(t.stdout(), "./main ./l ./main") == -1)
t.cleanup()
Modified: branches/release/tools/build/v2/test/notfile.py
==============================================================================
--- branches/release/tools/build/v2/test/notfile.py (original)
+++ branches/release/tools/build/v2/test/notfile.py 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -38,7 +38,7 @@
name = t.adjust_names(["bin/$toolset/debug/hello.exe"])[0]
name = apply(os.path.join, string.split(name, "/"));
-c = "valgrind " + name
+c = "valgrind *" + name
t.expect_output_line(c)
t.cleanup()
Modified: branches/release/tools/build/v2/test/project_root_constants.py
==============================================================================
--- branches/release/tools/build/v2/test/project_root_constants.py (original)
+++ branches/release/tools/build/v2/test/project_root_constants.py 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -56,10 +56,11 @@
hello ;
""")
-t.run_build_system(subdir="d/d2", stdout="""d: foo
+t.run_build_system(subdir="d/d2")
+t.fail_test(t.stdout().find("""d: foo
d2: foo
d2: bar
Hello 10
-""")
+""") == -1)
t.cleanup()
Modified: branches/release/tools/build/v2/test/searched_lib.py
==============================================================================
--- branches/release/tools/build/v2/test/searched_lib.py (original)
+++ branches/release/tools/build/v2/test/searched_lib.py 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -46,9 +46,6 @@
import path ;
import project ;
-local here = [ project.attribute $(__name__) location ] ;
-here = [ path.root $(here) [ path.pwd ] ] ;
-
exe main : main.cpp helper ;
lib helper : helper.cpp test_lib ;
lib test_lib : : <name>test_lib <search>lib ;
@@ -82,9 +79,6 @@
project : requirements <hardcode-dll-paths>false ;
-local here = [ project.attribute $(__name__) location ] ;
-here = [ path.root $(here) [ path.pwd ] ] ;
-
unit-test main : main.cpp helper ;
lib helper : helper.cpp test_lib ;
lib test_lib : : <name>test_lib <search>lib ;
Modified: branches/release/tools/build/v2/test/standalone.py
==============================================================================
--- branches/release/tools/build/v2/test/standalone.py (original)
+++ branches/release/tools/build/v2/test/standalone.py 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -32,6 +32,23 @@
alias runtime : x ;
""")
+t.write("standalone.py", """
+from b2.manager import get_manager
+
+# FIXME: this is ugly as death
+get_manager().projects().initialize(__name__)
+
+import os ;
+
+# This use of list as parameter is also ugly.
+project(['standalone'])
+
+pwd = os.getcwd()
+alias('x', [os.path.join(pwd, '../a.cpp')])
+alias('runtime', ['x'])
+""")
+
+
t.write("sub/jamfile.jam", """
stage bin : /standalone//runtime ;
""")
Modified: branches/release/tools/build/v2/test/suffix.py
==============================================================================
--- branches/release/tools/build/v2/test/suffix.py (original)
+++ branches/release/tools/build/v2/test/suffix.py 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -47,6 +47,23 @@
}
""")
+t.write("suffixes.py", """
+import b2.build.type as type
+import b2.build.generators as generators
+import b2.tools.common as common
+
+from b2.manager import get_manager
+
+type.register("First", ["first"])
+type.register("Second", [""], "First")
+
+generators.register_standard("suffixes.second", ["CPP"], ["Second"])
+
+get_manager().engine().register_action("suffixes.second",
+ "%s $(<)" % common.file_creation_command())
+
+""")
+
t.write("jamroot.jam", """
import suffixes ;
""")
Modified: branches/release/tools/build/v2/test/test_all.py
==============================================================================
--- branches/release/tools/build/v2/test/test_all.py (original)
+++ branches/release/tools/build/v2/test/test_all.py 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -64,6 +64,10 @@
print "PASSED"
else:
print "FAILED"
+
+ if i == "regression":
+ BoostBuild.flush_annotations()
+ BoostBuild.clear_annotations()
else:
rs = "succeed"
if not passed:
Modified: branches/release/tools/build/v2/test/using.py
==============================================================================
--- branches/release/tools/build/v2/test/using.py (original)
+++ branches/release/tools/build/v2/test/using.py 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -23,6 +23,15 @@
rule init ( ) { }
""")
+t.write("some_tool.py", """
+from b2.manager import get_manager
+
+get_manager().projects().initialize(__name__)
+
+def init():
+ pass
+""")
+
t.write("sub/jamfile.jam", """
exe a : a.cpp ;
""")
Modified: branches/release/tools/build/v2/test/wrong_project.py
==============================================================================
--- branches/release/tools/build/v2/test/wrong_project.py (original)
+++ branches/release/tools/build/v2/test/wrong_project.py 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -26,6 +26,15 @@
rule init ( ) { }
""")
+t.write("some_tool.py", """
+from b2.manager import get_manager
+
+get_manager().projects().initialize(__name__)
+
+def init():
+ pass
+""")
+
t.run_build_system()
t.expect_addition("bin/$toolset/debug/a.exe")
Modified: branches/release/tools/build/v2/tools/boostbook.jam
==============================================================================
--- branches/release/tools/build/v2/tools/boostbook.jam (original)
+++ branches/release/tools/build/v2/tools/boostbook.jam 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -408,7 +408,8 @@
root-project = [ $(root-project).project-module ] ;
while
[ project.attribute $(root-project) parent-module ] &&
- [ project.attribute $(root-project) parent-module ] != user-config
+ [ project.attribute $(root-project) parent-module ] != user-config &&
+ [ project.attribute $(root-project) parent-module ] != project-config
{
root-project = [ project.attribute $(root-project) parent-module ] ;
}
Modified: branches/release/tools/build/v2/tools/builtin.py
==============================================================================
--- branches/release/tools/build/v2/tools/builtin.py (original)
+++ branches/release/tools/build/v2/tools/builtin.py 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -8,13 +8,16 @@
""" Defines standard features and rules.
"""
+import b2.build.targets as targets
+
import sys
from b2.build import feature, property, virtual_target, generators, type, property_set, scanner
from b2.util.utility import *
-from b2.util import path, regex
+from b2.util import path, regex, bjam_signature
import b2.tools.types
from b2.manager import get_manager
+
# Records explicit properties for a variant.
# The key is the variant name.
__variant_explicit_properties = {}
@@ -26,6 +29,7 @@
__variant_explicit_properties = {}
+@bjam_signature((["name"], ["parents_or_properties", "*"], ["explicit_properties", "*"]))
def variant (name, parents_or_properties, explicit_properties = []):
""" Declares a new variant.
First determines explicit properties for this variant, by
@@ -47,39 +51,35 @@
"""
parents = []
if not explicit_properties:
- if get_grist (parents_or_properties [0]):
- explicit_properties = parents_or_properties
-
- else:
- parents = parents_or_properties
-
+ explicit_properties = parents_or_properties
else:
parents = parents_or_properties
+
+ inherited = property_set.empty()
+ if parents:
- # The problem is that we have to check for conflicts
- # between base variants.
- if len (parents) > 1:
- raise BaseException ("Multiple base variants are not yet supported")
-
- inherited = []
- # Add explicitly specified properties for parents
- for p in parents:
+ # If we allow multiple parents, we'd have to to check for conflicts
+ # between base variants, and there was no demand for so to bother.
+ if len (parents) > 1:
+ raise BaseException ("Multiple base variants are not yet supported")
+
+ p = parents[0]
# TODO: the check may be stricter
if not feature.is_implicit_value (p):
raise BaseException ("Invalid base varaint '%s'" % p)
- inherited += __variant_explicit_properties [p]
+ inherited = __variant_explicit_properties[p]
- property.validate (explicit_properties)
- explicit_properties = property.refine (inherited, explicit_properties)
+ explicit_properties = property_set.create_with_validation(explicit_properties)
+ explicit_properties = inherited.refine(explicit_properties)
# Record explicitly specified properties for this variant
# We do this after inheriting parents' properties, so that
# they affect other variants, derived from this one.
- __variant_explicit_properties [name] = explicit_properties
+ __variant_explicit_properties[name] = explicit_properties
feature.extend('variant', [name])
- feature.compose (replace_grist (name, '<variant>'), explicit_properties)
+ feature.compose ("<variant>" + name, explicit_properties.all())
__os_names = """
amiga aix bsd cygwin darwin dos emx freebsd hpux iphone linux netbsd
@@ -294,6 +294,11 @@
'armv5t', 'armv5te', 'armv6', 'armv6j', 'iwmmxt', 'ep9312'],
['propagated', 'optional'])
+
+ feature.feature('conditional', [], ['incidental', 'free'])
+
+ # The value of 'no' prevents building of a target.
+ feature.feature('build', ['yes', 'no'], ['optional'])
# Windows-specific features
feature.feature ('user-interface', ['console', 'gui', 'wince', 'native', 'auto'], [])
@@ -315,7 +320,7 @@
class SearchedLibTarget (virtual_target.AbstractFileTarget):
def __init__ (self, name, project, shared, real_name, search, action):
- virtual_target.AbstractFileTarget.__init__ (self, name, False, 'SEARCHED_LIB', project, action)
+ virtual_target.AbstractFileTarget.__init__ (self, name, 'SEARCHED_LIB', project, action)
self.shared_ = shared
self.real_name_ = real_name
@@ -333,7 +338,7 @@
return self.search_
def actualize_location (self, target):
- project.manager ().engine ().add_not_file_target (target)
+ bjam.call("NOTFILE", target)
def path (self):
#FIXME: several functions rely on this not being None
@@ -376,8 +381,8 @@
bjam.call("mark-included", target, all)
engine = get_manager().engine()
- engine.set_target_variable(angle, "SEARCH", self.includes_)
- engine.set_target_variable(quoted, "SEARCH", self.includes_)
+ engine.set_target_variable(angle, "SEARCH", get_value(self.includes_))
+ engine.set_target_variable(quoted, "SEARCH", [b] + get_value(self.includes_))
# Just propagate current scanner to includes, in a hope
# that includes do not change scanners.
@@ -385,6 +390,7 @@
scanner.register (CScanner, 'include')
type.set_scanner ('CPP', CScanner)
+type.set_scanner ('C', CScanner)
# Ported to trunk_at_47077
class LibGenerator (generators.Generator):
@@ -397,6 +403,7 @@
generators.Generator.__init__(self, id, composing, source_types, target_types_and_names, requirements)
def run(self, project, name, prop_set, sources):
+
# The lib generator is composing, and can be only invoked with
# explicit name. This check is present in generator.run (and so in
# builtin.LinkingGenerator), but duplicate it here to avoid doing
@@ -427,55 +434,38 @@
generators.register(LibGenerator())
-### # The implementation of the 'lib' rule. Beyond standard syntax that rule allows
-### # simplified:
-### # lib a b c ;
-### # so we need to write code to handle that syntax.
-### rule lib ( names + : sources * : requirements * : default-build *
-### : usage-requirements * )
-### {
-### local project = [ project.current ] ;
-###
-### # This is a circular module dependency, so it must be imported here
-### import targets ;
-###
-### local result ;
-### if ! $(sources) && ! $(requirements)
-### && ! $(default-build) && ! $(usage-requirements)
-### {
-### for local name in $(names)
-### {
-### result += [
-### targets.main-target-alternative
-### [ new typed-target $(name) : $(project) : LIB
-### :
-### : [ targets.main-target-requirements $(requirements) <name>$(name) :
-### $(project) ]
-### : [ targets.main-target-default-build $(default-build) : $(project) ]
-### : [ targets.main-target-usage-requirements $(usage-requirements) : $(project) ]
-### ] ] ;
-### }
-### }
-### else
-### {
-### if $(names[2])
-### {
-### errors.user-error "When several names are given to the 'lib' rule" :
-### "it's not allowed to specify sources or requirements. " ;
-### }
-###
-### local name = $(names[1]) ;
-### result = [ targets.main-target-alternative
-### [ new typed-target $(name) : $(project) : LIB
-### : [ targets.main-target-sources $(sources) : $(name) ]
-### : [ targets.main-target-requirements $(requirements) : $(project) ]
-### : [ targets.main-target-default-build $(default-build) : $(project) ]
-### : [ targets.main-target-usage-requirements $(usage-requirements) : $(project) ]
-### ] ] ;
-### }
-### return $(result) ;
-### }
-### IMPORT $(__name__) : lib : : lib ;
+def lib(names, sources=[], requirements=[], default_build=[], usage_requirements=[]):
+ """The implementation of the 'lib' rule. Beyond standard syntax that rule allows
+ simplified: 'lib a b c ;'."""
+
+ if len(names) > 1:
+ if any(r.startswith('<name>') for r in requirements):
+ get_manager().errors()("When several names are given to the 'lib' rule\n" +
+ "it is not allowed to specify the <name> feature.")
+
+ if sources:
+ get_manager().errors()("When several names are given to the 'lib' rule\n" +
+ "it is not allowed to specify sources.")
+
+ project = get_manager().projects().current()
+ result = []
+
+ for name in names:
+ r = requirements[:]
+
+ # Support " lib a ; " and " lib a b c ; " syntax.
+ if not sources and not any(r.startswith("<name>") for r in requirements) \
+ and not any(r.startswith("<file") for r in requirements):
+ r.append("<name>" + name)
+
+ result.append(targets.create_typed_metatarget(name, "LIB", sources,
+ r,
+ default_build,
+ usage_requirements))
+ return result
+
+get_manager().projects().add_rule("lib", lib)
+
# Updated to trunk_at_47077
class SearchedLibGenerator (generators.Generator):
@@ -488,6 +478,7 @@
generators.Generator.__init__ (self, id, composing, source_types, target_types_and_names, requirements)
def run(self, project, name, prop_set, sources):
+
if not name:
return None
@@ -574,19 +565,21 @@
generators.Generator.__init__ (self, id, composing, source_types, target_types_and_names, requirements)
def run (self, project, name, prop_set, sources):
+
lib_sources = prop_set.get('<library>')
- [ sources.append (project.manager().get_object(x)) for x in lib_sources ]
+ sources.extend(lib_sources)
# Add <library-path> properties for all searched libraries
extra = []
for s in sources:
if s.type () == 'SEARCHED_LIB':
search = s.search()
- extra.append(replace_grist(search, '<library-path>'))
+ extra.extend(property.Property('<library-path>', sp) for sp in search)
orig_xdll_path = []
- if prop_set.get('<hardcode-dll-paths>') == ['true'] and type.is_derived(self.target_types_ [0], 'EXE'):
+ if prop_set.get('<hardcode-dll-paths>') == ['true'] \
+ and type.is_derived(self.target_types_ [0], 'EXE'):
xdll_path = prop_set.get('<xdll-path>')
orig_xdll_path = [ replace_grist(x, '<dll-path>') for x in xdll_path ]
# It's possible that we have libraries in sources which did not came
@@ -601,7 +594,7 @@
location = path.root(s.name(), p.get('source-location'))
xdll_path.append(path.parent(location))
- extra += [ replace_grist(x, '<dll-path>') for x in xdll_path ]
+ extra.extend(property.Property('<dll-path>', sp) for sp in xdll_path)
if extra:
prop_set = prop_set.add_raw (extra)
@@ -656,36 +649,33 @@
# sources to pass to inherited rule
sources2 = []
- # properties to pass to inherited rule
- properties2 = []
# sources which are libraries
libraries = []
# Searched libraries are not passed as argument to linker
# but via some option. So, we pass them to the action
# via property.
- properties2 = prop_set.raw()
fsa = []
fst = []
for s in sources:
if type.is_derived(s.type(), 'SEARCHED_LIB'):
- name = s.real_name()
+ n = s.real_name()
if s.shared():
- fsa.append(name)
+ fsa.append(n)
else:
- fst.append(name)
+ fst.append(n)
else:
sources2.append(s)
+ add = []
if fsa:
- properties2 += [replace_grist('&&'.join(fsa), '<find-shared-library>')]
+ add.append("<find-shared-library>" + '&&'.join(fsa))
if fst:
- properties2 += [replace_grist('&&'.join(fst), '<find-static-library>')]
-
- spawn = generators.Generator.generated_targets(self, sources2, property_set.create(properties2), project, name)
-
+ add.append("<find-static-library>" + '&&'.join(fst))
+
+ spawn = generators.Generator.generated_targets(self, sources2, prop_set.add_raw(add), project, name)
return spawn
@@ -720,3 +710,9 @@
###
###
###
+
+get_manager().projects().add_rule("variant", variant)
+
+import stage
+import symlink
+import message
Modified: branches/release/tools/build/v2/tools/clang-linux.jam
==============================================================================
--- branches/release/tools/build/v2/tools/clang-linux.jam (original)
+++ branches/release/tools/build/v2/tools/clang-linux.jam 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -71,7 +71,7 @@
SPACE = " " ;
# Declare flags and action for compilation.
-toolset.flags clang-linux.compile OPTIONS <optimization>off : -O0 ;
+toolset.flags clang-linux.compile OPTIONS <optimization>off : ;
toolset.flags clang-linux.compile OPTIONS <optimization>speed : -O3 ;
toolset.flags clang-linux.compile OPTIONS <optimization>space : -Os ;
@@ -85,7 +85,7 @@
toolset.flags clang-linux.compile OPTIONS <warnings-as-errors>on : -Werror ;
toolset.flags clang-linux.compile OPTIONS <debug-symbols>on : -g ;
-toolset.flags clang-linux.compile OPTIONS <profiling>on : -pg ;
+toolset.flags clang-linux.compile OPTIONS <profiling>on : ;
toolset.flags clang-linux.compile OPTIONS <rtti>off : -fno-rtti ;
rule compile.c++ ( targets * : sources * : properties * )
Modified: branches/release/tools/build/v2/tools/common.py
==============================================================================
--- branches/release/tools/build/v2/tools/common.py (original)
+++ branches/release/tools/build/v2/tools/common.py 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -14,6 +14,7 @@
import bjam
import os
import os.path
+import sys
from b2.build import feature
from b2.util.utility import *
@@ -46,9 +47,31 @@
__debug_configuration = '--debug-configuration' in bjam.variable('ARGV')
__show_configuration = '--show-configuration' in bjam.variable('ARGV')
-
+
+ global __executable_path_variable
+ OS = bjam.call("peek", [], "OS")[0]
+ if OS == "NT":
+ # On Windows the case and capitalization of PATH is not always predictable, so
+ # let's find out what variable name was really set.
+ for n in sys.environ:
+ if n.lower() == "path":
+ __executable_path_variable = n
+ break
+ else:
+ __executable_path_variable = "PATH"
+
+ m = {"NT": __executable_path_variable,
+ "CYGWIN": "PATH",
+ "MACOSX": "DYLD_LIBRARY_PATH",
+ "AIX": "LIBPATH"}
+ global __shared_library_path_variable
+ __shared_library_path_variable = m.get(OS, "LD_LIBRARY_PATH")
+
reset()
+def shared_library_path_variable():
+ return __shared_library_path_variable
+
# ported from trunk_at_47174
class Configurations(object):
"""
@@ -502,9 +525,9 @@
"""
Returns a command that prepends the given paths to the named path variable on
the current platform.
- """
+ """
return path_variable_setting_command(variable,
- paths + os.environ(variable).split(os.pathsep))
+ paths + os.environ.get(variable, "").split(os.pathsep))
def file_creation_command():
"""
@@ -535,9 +558,9 @@
# Schedule the mkdir build action.
if os_name() == 'NT':
- engine.set_update_action("common.MkDir1-quick-fix-for-windows", target, [], None)
+ engine.set_update_action("common.MkDir1-quick-fix-for-windows", target, [])
else:
- engine.set_update_action("common.MkDir1-quick-fix-for-unix", target, [], None)
+ engine.set_update_action("common.MkDir1-quick-fix-for-unix", target, [])
# Prepare a Jam 'dirs' target that can be used to make the build only
# construct all the target directories.
Modified: branches/release/tools/build/v2/tools/fop.jam
==============================================================================
--- branches/release/tools/build/v2/tools/fop.jam (original)
+++ branches/release/tools/build/v2/tools/fop.jam 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -9,7 +9,8 @@
import os ;
import generators ;
import common ;
-import errors ;
+import errors ;
+import boostbook ;
generators.register-standard fop.render.pdf : FO : PDF ;
generators.register-standard fop.render.ps : FO : PS ;
@@ -19,21 +20,23 @@
fop-command = [ common.get-invocation-command fop : fop : $(fop-command)
: [ modules.peek : FOP_DIR ] ] ;
- if $(.initialized)
+ .FOP_COMMAND = $(fop-command) ;
+ .FOP_SETUP = ;
+
+ # JAVA_HOME is the location that java was installed to.
+
+ if $(java-home)
{
- if $(.FOP_COMMAND) != $(fop-command) || $(JAVA_HOME) != $(java-home)
- || $(JAVACMD) != $(java)
- {
- errors.user-error "fop: reinitialization with different options" ;
- }
+ .FOP_SETUP += [ common.variable-setting-command JAVA_HOME : $(java-home) ] ;
}
- else
+
+ # JAVACMD is the location that of the java executable, useful for a
+ # non-standard java installation, where the executable isn't at
+ # $JAVA_HOME/bin/java.
+
+ if $(java)
{
- .initialized = true ;
- .FOP_COMMAND = $(fop-command) ;
- # What is the meaning of this logic? Needs more comments!! --DWA
- java-home ?= $(java) ;
- .FOP_SETUP = [ common.variable-setting-command JAVA_HOME : $(java-home) ] ;
+ .FOP_SETUP += [ common.variable-setting-command JAVACMD : $(java) ] ;
}
# Make sure the fop command is executed from within the directory where it's located.
Modified: branches/release/tools/build/v2/tools/gcc.jam
==============================================================================
--- branches/release/tools/build/v2/tools/gcc.jam (original)
+++ branches/release/tools/build/v2/tools/gcc.jam 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -58,11 +58,83 @@
# Example:
# using gcc : 3.4 : : <cxxflags>foo <linkflags>bar <linker-type>sun ;
#
+# The compiler command to use is detected in a three step manner:
+# 1) If an explicit command is specified by the user, it will be used and must available.
+# 2) If only a certain version is specified, it is enforced:
+# - either a command 'g++-VERSION' must be available
+# - or the default command 'g++' must be available and match the exact version.
+# 3) Without user-provided restrictions use default 'g++'
rule init ( version ? : command * : options * )
{
+ #1): use user-provided command
+ local tool-command = ;
+ if $(command)
+ {
+ tool-command = [ common.get-invocation-command-nodefault gcc : g++ : $(command) ] ;
+ if ! $(tool-command)
+ {
+ errors.error "toolset gcc initialization:" :
+ "provided command '$(command)' not found" :
+ "initialized from" [ errors.nearest-user-location ] ;
+ }
+ }
+ #2): enforce user-provided version
+ else if $(version)
+ {
+ tool-command = [ common.get-invocation-command-nodefault gcc : "g++-$(version[1])" ] ;
+
+ #2.1) fallback: check whether "g++" reports the requested version
+ if ! $(tool-command)
+ {
+ tool-command = [ common.get-invocation-command-nodefault gcc : g++ ] ;
+ if $(tool-command)
+ {
+ local tool-command-string = $(tool-command:J=" ") ;
+ local tool-version = [ MATCH "^([0-9.]+)" : [ SHELL "$(tool-command-string) -dumpversion" ] ] ;
+ if $(tool-version) != $(version)
+ {
+ # Permit a match betwen two-digit version specified by the user
+ # (e.g. 4.4) and 3-digit version reported by gcc.
+ # Since only two digits are present in binary name anyway,
+ # insisting that user specify 3-digit version when
+ # configuring Boost.Build while it's not required on
+ # command like would be strange.
+ local stripped = [ MATCH "^([0-9]+\.[0-9]+).*" : $(tool-version) ] ;
+ if $(stripped) != $(version)
+ {
+ errors.error "toolset gcc initialization:" :
+ "version '$(version)' requested but 'g++-$(version)' not found and version '$(tool-version)' of default '$(tool-command)' does not match" :
+ "initialized from" [ errors.nearest-user-location ] ;
+ tool-command = ;
+ }
+ # Use full 3-digit version to be compatible with the 'using gcc ;' case
+ version = $(tool-version) ;
+ }
+ }
+ else
+ {
+ errors.error "toolset gcc initialization:" :
+ "version '$(version)' requested but neither 'g++-$(version)' nor default 'g++' found" :
+ "initialized from" [ errors.nearest-user-location ] ;
+ }
+ }
+ }
+ #3) default: no command and no version specified, try using default command "g++"
+ else
+ {
+ tool-command = [ common.get-invocation-command-nodefault gcc : g++ ] ;
+ if ! $(tool-command)
+ {
+ errors.error "toolset gcc initialization:" :
+ "no command provided, default command 'g++' not found" :
+ "initialized from" [ errors.nearest-user-location ] ;
+ }
+ }
+
+
# Information about the gcc command...
# The command.
- local command = [ common.get-invocation-command gcc : g++ : $(command) ] ;
+ local command = $(tool-command) ;
# The root directory of the tool install.
local root = [ feature.get-values <root> : $(options) ] ;
# The bin directory where to find the command to execute.
@@ -359,6 +431,17 @@
option = -maix64 ;
}
}
+ else if $(os) = hpux
+ {
+ if $(model) = 32
+ {
+ option = -milp32 ;
+ }
+ else
+ {
+ option = -mlp64 ;
+ }
+ }
else
{
if $(model) = 32
Modified: branches/release/tools/build/v2/tools/gcc.py
==============================================================================
--- branches/release/tools/build/v2/tools/gcc.py (original)
+++ branches/release/tools/build/v2/tools/gcc.py 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -287,7 +287,6 @@
# In that case we'll just add another parameter to 'init' and move this login
# inside 'init'.
if not os_name () in ['CYGWIN', 'NT']:
- print "osname:", os_name()
# This logic will add -fPIC for all compilations:
#
# lib a : a.cpp b ;
@@ -379,7 +378,7 @@
property while creating or using shared library, since it's not supported by
gcc/libc.
"""
- def run(self, project, name, prop_set, sources):
+ def run(self, project, name, ps, sources):
# TODO: Replace this with the use of a target-os property.
no_static_link = False
@@ -393,10 +392,9 @@
## }
## }
- properties = prop_set.raw()
reason = None
- if no_static_link and '<runtime-link>static' in properties:
- if '<link>shared' in properties:
+ if no_static_link and ps.get('runtime-link') == 'static':
+ if ps.get('link') == 'shared':
reason = "On gcc, DLL can't be build with '<runtime-link>static'."
elif type.is_derived(self.target_types[0], 'EXE'):
for s in sources:
@@ -412,7 +410,7 @@
return
else:
generated_targets = unix.UnixLinkingGenerator.run(self, project,
- name, prop_set, sources)
+ name, ps, sources)
return generated_targets
if on_windows():
@@ -626,7 +624,7 @@
engine.set_target_variable('LOCATE', clean, bjam.call('get-target-variable', targets, 'LOCATE'))
engine.add_dependency(clean, sources)
engine.add_dependency(targets, clean)
- engine.set_update_action('common.RmTemps', clean, targets, None)
+ engine.set_update_action('common.RmTemps', clean, targets)
# Declare action for creating static libraries.
# The letter 'r' means to add files to the archive with replacement. Since we
@@ -643,6 +641,8 @@
engine = get_manager().engine()
engine.set_target_variable(targets, 'SPACE', ' ')
engine.set_target_variable(targets, 'JAM_SEMAPHORE', '<s>gcc-link-semaphore')
+ engine.set_target_variable(targets, "HAVE_SONAME", HAVE_SONAME)
+ engine.set_target_variable(targets, "SONAME_OPTION", SONAME_OPTION)
engine.register_action(
'gcc.link.dll',
Modified: branches/release/tools/build/v2/tools/make.py
==============================================================================
--- branches/release/tools/build/v2/tools/make.py (original)
+++ branches/release/tools/build/v2/tools/make.py 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -1,4 +1,5 @@
-# Status: being ported by Vladimir Prus
+# Status: ported.
+# Base revision: 64068
# Copyright 2003 Dave Abrahams
# Copyright 2003 Douglas Gregor
@@ -15,16 +16,15 @@
from b2.manager import get_manager
import b2.build.property_set
+
class MakeTarget(BasicTarget):
def construct(self, name, source_targets, property_set):
- action_name = property_set.get("<action>")[0]
-
- action = Action(get_manager(), source_targets, action_name, property_set)
- # FIXME: type.type uses global data.
- target = FileTarget(self.name(), 1, type.type(self.name()),
- self.project(), action)
+ action_name = property_set.get("<action>")[0]
+ action = Action(get_manager(), source_targets, action_name[1:], property_set)
+ target = FileTarget(self.name(), type.type(self.name()),
+ self.project(), action, exact=True)
return [ b2.build.property_set.empty(),
[self.project().manager().virtual_targets().register(target)]]
@@ -33,11 +33,15 @@
target_name = target_name[0]
generating_rule = generating_rule[0]
+ if generating_rule[0] != '@':
+ generating_rule = '@' + generating_rule
if not requirements:
requirements = []
+
requirements.append("<action>%s" % generating_rule)
+
m = get_manager()
targets = m.targets()
project = m.projects().current()
Modified: branches/release/tools/build/v2/tools/msvc.jam
==============================================================================
--- branches/release/tools/build/v2/tools/msvc.jam (original)
+++ branches/release/tools/build/v2/tools/msvc.jam 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -905,8 +905,9 @@
mc-compiler = [ feature.get-values <mc-compiler> : $(options) ] ;
mc-compiler ?= mc ;
- manifest-tool = mt ;
-
+ manifest-tool = [ feature.get-values <manifest-tool> : $(options) ] ;
+ manifest-tool ?= mt ;
+
local cc-filter = [ feature.get-values <compiler-filter> : $(options) ] ;
for local c in $(cpu)
Modified: branches/release/tools/build/v2/tools/package.jam
==============================================================================
--- branches/release/tools/build/v2/tools/package.jam (original)
+++ branches/release/tools/build/v2/tools/package.jam 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -33,13 +33,17 @@
import "class" : new ;
import option ;
import project ;
+import feature ;
import property ;
import stage ;
import targets ;
import modules ;
-rule install ( name : requirements * : binaries * : libraries * : headers * )
+feature.feature install-default-prefix : : free incidental ;
+
+rule install ( name package-name ? : requirements * : binaries * : libraries * : headers * )
{
+ package-name ?= $(name) ;
if [ MATCH --prefix=(.*) : [ modules.peek : ARGV ] ]
{
# If --prefix is explicitly specified on the command line,
@@ -67,17 +71,7 @@
# First, figure out all locations. Use the default if no prefix option
# given.
- local prefix = [ option.get prefix : [ property.select
- <install-default-prefix> : $(requirements) ] ] ;
- prefix = $(prefix:G=) ;
- requirements = [ property.change $(requirements) : <install-default-prefix>
- ] ;
- # Or some likely defaults if neither is given.
- if ! $(prefix)
- {
- if [ modules.peek : NT ] { prefix = C:\\$(name) ; }
- else if [ modules.peek : UNIX ] { prefix = /usr/local ; }
- }
+ local prefix = [ get-prefix $(name) : $(requirements) ] ;
# Architecture dependent files.
local exec-locate = [ option.get exec-prefix : $(prefix) ] ;
@@ -126,3 +120,46 @@
$(1)-lib-shared-universe $(1)-lib-shared-cygwin ;
}
}
+
+rule install-data ( target-name : package-name : data * : requirements * )
+{
+ package-name ?= target-name ;
+ if [ MATCH --prefix=(.*) : [ modules.peek : ARGV ] ]
+ {
+ # If --prefix is explicitly specified on the command line,
+ # then we need wipe away any settings of datarootdir
+ option.set datarootdir : ;
+ }
+
+ local prefix = [ get-prefix $(package-name) : $(requirements) ] ;
+ local datadir = [ option.get datarootdir : $(prefix)/share ] ;
+
+ stage.install $(target-name)
+ : $(data)
+ : $(requirements) <location>$(datadir)/$(package-name)
+ ;
+
+ local c = [ project.current ] ;
+ local project-module = [ $(c).project-module ] ;
+ module $(project-module)
+ {
+ explicit $(1) ;
+ }
+}
+
+local rule get-prefix ( package-name : requirements * )
+{
+ local prefix = [ option.get prefix : [ property.select
+ <install-default-prefix> : $(requirements) ] ] ;
+ prefix = $(prefix:G=) ;
+ requirements = [ property.change $(requirements) : <install-default-prefix>
+ ] ;
+ # Or some likely defaults if neither is given.
+ if ! $(prefix)
+ {
+ if [ modules.peek : NT ] { prefix = C:\\$(package-name) ; }
+ else if [ modules.peek : UNIX ] { prefix = /usr/local ; }
+ }
+ return $(prefix) ;
+}
+
Modified: branches/release/tools/build/v2/tools/pathscale.jam
==============================================================================
--- branches/release/tools/build/v2/tools/pathscale.jam (original)
+++ branches/release/tools/build/v2/tools/pathscale.jam 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -12,7 +12,7 @@
import fortran ;
feature.extend toolset : pathscale ;
-toolset.inherit pathscale : unix ;
+toolset.inherit pathscale : unix ;
generators.override pathscale.prebuilt : builtin.prebuilt ;
generators.override pathscale.searched-lib-generator : searched-lib-generator ;
@@ -63,41 +63,46 @@
generators.register-fortran90-compiler pathscale.compile.fortran90 : FORTRAN90 : OBJ : <toolset>pathscale ;
# Declare flags and actions for compilation
-flags pathscale.compile OPTIONS <debug-symbols>on : -g ;
-flags pathscale.compile OPTIONS <profiling>on : -pg ;
-flags pathscale.compile OPTIONS <link>shared : -fPIC ;
+flags pathscale.compile OPTIONS <optimization>off : -O0 ;
flags pathscale.compile OPTIONS <optimization>speed : -O3 ;
flags pathscale.compile OPTIONS <optimization>space : -Os ;
+
+flags pathscale.compile OPTIONS <inlining>off : -noinline ;
+flags pathscale.compile OPTIONS <inlining>on : -inline ;
+flags pathscale.compile OPTIONS <inlining>full : -inline ;
+
+flags pathscale.compile OPTIONS <warnings>off : -woffall ;
+flags pathscale.compile OPTIONS <warnings>on : -Wall ;
+flags pathscale.compile OPTIONS <warnings>all : -Wall -pedantic ;
+flags pathscale.compile OPTIONS <warnings-as-errors>on : -Werror ;
+
+flags pathscale.compile OPTIONS <debug-symbols>on : -ggdb ;
+flags pathscale.compile OPTIONS <profiling>on : -pg ;
+flags pathscale.compile OPTIONS <link>shared : -fPIC ;
flags pathscale.compile OPTIONS <address-model>32 : -m32 ;
flags pathscale.compile OPTIONS <address-model>64 : -m64 ;
-flags pathscale.compile OPTIONS <warnings>off : -woffall -Wno-uninitialized ;
-flags pathscale.compile OPTIONS <warnings>on : -Wall ;
-flags pathscale.compile OPTIONS <warnings>all : -Wall ;
-
-flags pathscale.compile.c++ OPTIONS <inlining>off : -noinline ;
-
-flags pathscale.compile OPTIONS <cflags> ;
-flags pathscale.compile.c++ OPTIONS <cxxflags> ;
+flags pathscale.compile USER_OPTIONS <cflags> ;
+flags pathscale.compile.c++ USER_OPTIONS <cxxflags> ;
flags pathscale.compile DEFINES <define> ;
flags pathscale.compile INCLUDES <include> ;
-flags pathscale.compile.fortran OPTIONS <fflags> ;
-flags pathscale.compile.fortran90 OPTIONS <fflags> ;
+flags pathscale.compile.fortran USER_OPTIONS <fflags> ;
+flags pathscale.compile.fortran90 USER_OPTIONS <fflags> ;
actions compile.c
{
- "$(CONFIG_C_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+ "$(CONFIG_C_COMMAND)" $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
}
actions compile.c++
{
- "$(CONFIG_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+ "$(CONFIG_COMMAND)" $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
}
actions compile.fortran
{
- "$(CONFIG_F_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+ "$(CONFIG_F_COMMAND)" $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
}
rule compile.fortran90 ( targets * : sources * : properties * )
@@ -112,15 +117,15 @@
actions compile.fortran90
{
- "$(CONFIG_F90_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -module $(<[1]:D) -c -o "$(<)" "$(>)"
+ "$(CONFIG_F90_COMMAND)" $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -module $(<[1]:D) -c -o "$(<)" "$(>)"
}
# Declare flags and actions for linking
-flags pathscale.link OPTIONS <debug-symbols>on : -g ;
+flags pathscale.link OPTIONS <debug-symbols>on : -ggdb -rdynamic ;
# Strip the binary when no debugging is needed
flags pathscale.link OPTIONS <debug-symbols>off : -g0 ;
flags pathscale.link OPTIONS <profiling>on : -pg ;
-flags pathscale.link OPTIONS <linkflags> ;
+flags pathscale.link USER_OPTIONS <linkflags> ;
flags pathscale.link LINKPATH <library-path> ;
flags pathscale.link FINDLIBS-ST <find-static-library> ;
flags pathscale.link FINDLIBS-SA <find-shared-library> ;
@@ -141,7 +146,7 @@
actions link bind LIBRARIES
{
- "$(CONFIG_COMMAND)" $(OPTIONS) -L"$(LINKPATH)" -Wl,$(RPATH_OPTION:E=-R)$(SPACE)-Wl,"$(RPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST)
+ "$(CONFIG_COMMAND)" $(OPTIONS) $(USER_OPTIONS) -L"$(LINKPATH)" -Wl,$(RPATH_OPTION:E=-R)$(SPACE)-Wl,"$(RPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST)
}
# Slight mods for dlls
@@ -152,7 +157,7 @@
actions link.dll bind LIBRARIES
{
- "$(CONFIG_COMMAND)" $(OPTIONS) -L"$(LINKPATH)" -Wl,$(RPATH_OPTION:E=-R)$(SPACE)-Wl,"$(RPATH)" -o "$(<)" -Wl,-soname$(SPACE)-Wl,$(<[1]:D=) -shared "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST)
+ "$(CONFIG_COMMAND)" $(OPTIONS) $(USER_OPTIONS) -L"$(LINKPATH)" -Wl,$(RPATH_OPTION:E=-R)$(SPACE)-Wl,"$(RPATH)" -o "$(<)" -Wl,-soname$(SPACE)-Wl,$(<[1]:D=) -shared "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST)
}
# Declare action for creating static libraries
Modified: branches/release/tools/build/v2/tools/pgi.jam
==============================================================================
--- branches/release/tools/build/v2/tools/pgi.jam (original)
+++ branches/release/tools/build/v2/tools/pgi.jam 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -57,7 +57,7 @@
# Declare flags and actions for compilation
flags pgi.compile OPTIONS : -Kieee ;
-flags pgi.compile OPTIONS <link>shared : -fpic ;
+flags pgi.compile OPTIONS <link>shared : -fpic -fPIC ;
flags pgi.compile OPTIONS <debug-symbols>on : -gopt ;
flags pgi.compile OPTIONS <profiling>on : -xprofile=tcov ;
flags pgi.compile OPTIONS <optimization>speed : -fast -Mx,8,0x10000000 ;
@@ -97,7 +97,7 @@
flags pgi.link OPTIONS <debug-symbols>off : -s ;
flags pgi.link OPTIONS <profiling>on : -xprofile=tcov ;
flags pgi.link OPTIONS <linkflags> ;
-flags pgi.link OPTIONS <link>shared : -fpic ;
+flags pgi.link OPTIONS <link>shared : -fpic -fPIC ;
flags pgi.link LINKPATH <library-path> ;
flags pgi.link FINDLIBS-ST <find-static-library> ;
flags pgi.link FINDLIBS-SA <find-shared-library> ;
Modified: branches/release/tools/build/v2/tools/qcc.jam
==============================================================================
--- branches/release/tools/build/v2/tools/qcc.jam (original)
+++ branches/release/tools/build/v2/tools/qcc.jam 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -25,12 +25,6 @@
toolset.inherit-flags qcc : unix ;
toolset.inherit-rules qcc : unix ;
-
-# Set typed target suffixes used by the qcc toolset.
-type.set-generated-target-suffix OBJ : <toolset>qcc : o ;
-type.set-generated-target-suffix STATIC_LIB : <toolset>qcc : a ;
-
-
# Initializes the qcc toolset for the given version. If necessary, command may
# be used to specify where the compiler is located. The parameter 'options' is a
# space-delimited list of options, each one being specified as
Modified: branches/release/tools/build/v2/tools/quickbook.jam
==============================================================================
--- branches/release/tools/build/v2/tools/quickbook.jam (original)
+++ branches/release/tools/build/v2/tools/quickbook.jam 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -121,6 +121,9 @@
# <quickbook-binary-dependencies> targets to build QuickBook from sources.
feature.feature <quickbook-binary> : : free ;
feature.feature <quickbook-binary-dependencies> : : free dependency ;
+feature.feature <quickbook-define> : : free ;
+feature.feature <quickbook-indent> : : free ;
+feature.feature <quickbook-line-width> : : free ;
# quickbook-binary-generator handles generation of the QuickBook executable, by
@@ -297,6 +300,9 @@
toolset.flags quickbook.quickbook-to-boostbook QB-COMMAND <quickbook-binary> ;
toolset.flags quickbook.quickbook-to-boostbook QB-DEPENDENCIES <quickbook-binary-dependencies> ;
toolset.flags quickbook.quickbook-to-boostbook INCLUDES <include> ;
+toolset.flags quickbook.quickbook-to-boostbook QB-DEFINES <quickbook-define> ;
+toolset.flags quickbook.quickbook-to-boostbook QB-INDENT <quickbook-indent> ;
+toolset.flags quickbook.quickbook-to-boostbook QB-LINE-WIDTH <quickbook-line-width> ;
rule quickbook-to-boostbook ( target : source : properties * )
@@ -309,7 +315,7 @@
actions quickbook-to-boostbook
{
- "$(QB-COMMAND)" -I"$(INCLUDES)" --output-file="$(1)" "$(2)"
+ "$(QB-COMMAND)" -I"$(INCLUDES)" -D"$(QB-DEFINES)" --indent="$(QB-INDENT)" --linewidth="$(QB-LINE-WIDTH)" --output-file="$(1)" "$(2)"
}
Modified: branches/release/tools/build/v2/tools/types/lib.py
==============================================================================
--- branches/release/tools/build/v2/tools/types/lib.py (original)
+++ branches/release/tools/build/v2/tools/types/lib.py 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -1,23 +1,77 @@
-# Copyright David Abrahams 2004. Distributed under the Boost
+# Status: ported
+# Base revision: 64456.
+# Copyright David Abrahams 2004.
+# Copyright Vladimir Prus 2010.
+# Distributed under the Boost
# Software License, Version 1.0. (See accompanying
# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
-from b2.build import type
+import b2.build.type as type
-def register ():
-
- if not type.registered ('LIB'):
- type.register ('LIB')
-
- type.register_type ('STATIC_LIB', ['lib', 'a'], 'LIB', ['NT', 'CYGWIN'])
- type.register_type ('STATIC_LIB', ['a'], 'LIB')
-
- type.register_type ('IMPORT_LIB', [], 'STATIC_LIB')
- type.set_generated_target_suffix ('IMPORT_LIB', [], 'lib')
-
- type.register_type ('SHARED_LIB', ['dll'], 'LIB', ['NT', 'CYGWIN'])
- type.register_type ('SHARED_LIB', ['so'], 'LIB')
-
- type.register_type ('SEARCHED_LIB', [], 'LIB')
+# The following naming scheme is used for libraries.
+#
+# On *nix:
+# libxxx.a static library
+# libxxx.so shared library
+#
+# On windows (msvc)
+# libxxx.lib static library
+# xxx.dll DLL
+# xxx.lib import library
+#
+# On windows (mingw):
+# libxxx.a static library
+# libxxx.dll DLL
+# libxxx.dll.a import library
+#
+# On cygwin i.e. <target-os>cygwin
+# libxxx.a static library
+# cygxxx.dll DLL
+# libxxx.dll.a import library
+#
-register ()
+type.register('LIB')
+
+# FIXME: should not register both extensions on both platforms.
+type.register('STATIC_LIB', ['a', 'lib'], 'LIB')
+
+# The 'lib' prefix is used everywhere
+type.set_generated_target_prefix('STATIC_LIB', [], 'lib')
+
+# Use '.lib' suffix for windows
+type.set_generated_target_suffix('STATIC_LIB', ['<target-os>windows'], 'lib')
+
+# Except with gcc.
+type.set_generated_target_suffix('STATIC_LIB', ['<toolset>gcc', '<target-os>windows'], 'a')
+
+# Use xxx.lib for import libs
+type.register('IMPORT_LIB', [], 'STATIC_LIB')
+type.set_generated_target_prefix('IMPORT_LIB', [], '')
+type.set_generated_target_suffix('IMPORT_LIB', [], 'lib')
+
+# Except with gcc (mingw or cygwin), where use libxxx.dll.a
+type.set_generated_target_prefix('IMPORT_LIB', ['<toolset>gcc'], 'lib')
+type.set_generated_target_suffix('IMPORT_LIB', ['<toolset>gcc'], 'dll.a')
+
+type.register('SHARED_LIB', ['so', 'dll', 'dylib'], 'LIB')
+
+# Both mingw and cygwin use libxxx.dll naming scheme.
+# On Linux, use "lib" prefix
+type.set_generated_target_prefix('SHARED_LIB', [], 'lib')
+# But don't use it on windows
+type.set_generated_target_prefix('SHARED_LIB', ['<target-os>windows'], '')
+# But use it again on mingw
+type.set_generated_target_prefix('SHARED_LIB', ['<toolset>gcc', '<target-os>windows'], 'lib')
+# And use 'cyg' on cygwin
+type.set_generated_target_prefix('SHARED_LIB', ['<target-os>cygwin'], 'cyg')
+
+
+type.set_generated_target_suffix('SHARED_LIB', ['<target-os>windows'], 'dll')
+type.set_generated_target_suffix('SHARED_LIB', ['<target-os>cygwin'], 'dll')
+type.set_generated_target_suffix('SHARED_LIB', ['<target-os>darwin'], 'dylib')
+
+type.register('SEARCHED_LIB', [], 'LIB')
+# This is needed so that when we create a target of SEARCHED_LIB
+# type, there's no prefix or suffix automatically added.
+type.set_generated_target_prefix('SEARCHED_LIB', [], '')
+type.set_generated_target_suffix('SEARCHED_LIB', [], '')
Modified: branches/release/tools/build/v2/tools/unix.py
==============================================================================
--- branches/release/tools/build/v2/tools/unix.py (original)
+++ branches/release/tools/build/v2/tools/unix.py 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -129,9 +129,9 @@
def set_library_order (manager, sources, prop_set, result):
used_libraries = []
deps = prop_set.dependency ()
-
- [ sources.append (manager.get_object (get_value (x))) for x in deps ]
- sources = sequence.unique (sources)
+
+ sources.extend(d.value() for d in deps)
+ sources = sequence.unique(sources)
for l in sources:
if l.type () and type.is_derived (l.type (), 'LIB'):
Modified: branches/release/tools/build/v2/util/__init__.py
==============================================================================
--- branches/release/tools/build/v2/util/__init__.py (original)
+++ branches/release/tools/build/v2/util/__init__.py 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -0,0 +1,136 @@
+
+import bjam
+import re
+import types
+
+# Decorator the specifies bjam-side prototype for a Python function
+def bjam_signature(s):
+
+ def wrap(f):
+ f.bjam_signature = s
+ return f
+
+ return wrap
+
+def metatarget(f):
+
+ f.bjam_signature = (["name"], ["sources", "*"], ["requirements", "*"],
+ ["default_build", "*"], ["usage_requirements", "*"])
+ return f
+
+class cached(object):
+
+ def __init__(self, function):
+ self.function = function
+ self.cache = {}
+
+ def __call__(self, *args):
+ try:
+ return self.cache[args]
+ except KeyError:
+ v = self.function(*args)
+ self.cache[args] = v
+ return v
+
+ def __get__(self, instance, type):
+ return types.MethodType(self, instance, type)
+
+def unquote(s):
+ if s and s[0] == '"' and s[-1] == '"':
+ return s[1:-1]
+ else:
+ return s
+
+_extract_jamfile_and_rule = re.compile("(Jamfile<.*>)%(.*)")
+
+def qualify_jam_action(action_name, context_module):
+
+ if action_name.startswith("###"):
+ # Callable exported from Python. Don't touch
+ return action_name
+ elif _extract_jamfile_and_rule.match(action_name):
+ # Rule is already in indirect format
+ return action_name
+ else:
+ ix = action_name.find('.')
+ if ix != -1 and action_name[:ix] == context_module:
+ return context_module + '%' + action_name[ix+1:]
+
+ return context_module + '%' + action_name
+
+
+def set_jam_action(name, *args):
+
+ m = _extract_jamfile_and_rule.match(name)
+ if m:
+ args = ("set-update-action-in-module", m.group(1), m.group(2)) + args
+ else:
+ args = ("set-update-action", name) + args
+
+ return bjam.call(*args)
+
+
+def call_jam_function(name, *args):
+
+ m = _extract_jamfile_and_rule.match(name)
+ if m:
+ args = ("call-in-module", m.group(1), m.group(2)) + args
+ return bjam.call(*args)
+ else:
+ return bjam.call(*((name,) + args))
+
+__value_id = 0
+__python_to_jam = {}
+__jam_to_python = {}
+
+def value_to_jam(value, methods=False):
+ """Makes a token to refer to a Python value inside Jam language code.
+
+ The token is merely a string that can be passed around in Jam code and
+ eventually passed back. For example, we might want to pass PropertySet
+ instance to a tag function and it might eventually call back
+ to virtual_target.add_suffix_and_prefix, passing the same instance.
+
+ For values that are classes, we'll also make class methods callable
+ from Jam.
+
+ Note that this is necessary to make a bit more of existing Jamfiles work.
+ This trick should not be used to much, or else the performance benefits of
+ Python port will be eaten.
+ """
+
+ global __value_id
+
+ r = __python_to_jam.get(value, None)
+ if r:
+ return r
+
+ exported_name = '###_' + str(__value_id)
+ __value_id = __value_id + 1
+ __python_to_jam[value] = exported_name
+ __jam_to_python[exported_name] = value
+
+ if methods and type(value) == types.InstanceType:
+ for field_name in dir(value):
+ field = getattr(value, field_name)
+ if callable(field) and not field_name.startswith("__"):
+ bjam.import_rule("", exported_name + "." + field_name, field)
+
+ return exported_name
+
+def record_jam_to_value_mapping(jam_value, python_value):
+ __jam_to_python[jam_value] = python_value
+
+def jam_to_value_maybe(jam_value):
+
+ if type(jam_value) == type(""):
+ return __jam_to_python.get(jam_value, jam_value)
+ else:
+ return jam_value
+
+def stem(filename):
+ i = filename.find('.')
+ if i != -1:
+ return filename[0:i]
+ else:
+ return filename
Modified: branches/release/tools/build/v2/util/logger.py
==============================================================================
--- branches/release/tools/build/v2/util/logger.py (original)
+++ branches/release/tools/build/v2/util/logger.py 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -30,7 +30,7 @@
return False
def on (self):
- return False
+ return True
class TextLogger (NullLogger):
def __init__ (self):
Modified: branches/release/tools/build/v2/util/path.py
==============================================================================
--- branches/release/tools/build/v2/util/path.py (original)
+++ branches/release/tools/build/v2/util/path.py 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -22,6 +22,9 @@
from utility import to_seq
from glob import glob as builtin_glob
+from b2.util import bjam_signature
+
+@bjam_signature((["path", "root"],))
def root (path, root):
""" If 'path' is relative, it is rooted at 'root'. Otherwise, it's unchanged.
"""
@@ -30,6 +33,7 @@
else:
return os.path.join (root, path)
+@bjam_signature((["native"],))
def make (native):
""" Converts the native path into normalized form.
"""
@@ -43,6 +47,7 @@
return os.path.normpath (native)
+@bjam_signature((["path"],))
def native (path):
""" Builds a native representation of the path.
"""
@@ -303,70 +308,47 @@
result.extend (glob.glob (p))
return result
-# #
-# # Returns true is the specified file exists.
-# #
-# rule exists ( file )
-# {
-# return [ path.glob $(file:D) : $(file:D=) ] ;
-# }
-# NATIVE_RULE path : exists ;
-#
-#
-#
-# #
-# # Find out the absolute name of path and returns the list of all the parents,
-# # starting with the immediate one. Parents are returned as relative names.
-# # If 'upper_limit' is specified, directories above it will be pruned.
-# #
-# rule all-parents ( path : upper_limit ? : cwd ? )
-# {
-# cwd ?= [ pwd ] ;
-# local path_ele = [ regex.split [ root $(path) $(cwd) ] "/" ] ;
-#
-# if ! $(upper_limit) {
-# upper_limit = / ;
-# }
-# local upper_ele = [ regex.split [ root $(upper_limit) $(cwd) ] "/" ] ;
-#
-# # Leave only elements in 'path_ele' below 'upper_ele'
-# while $(path_ele) && $(upper_ele[1]) = $(path_ele[1]) {
-# upper_ele = $(upper_ele[2-]) ;
-# path_ele = $(path_ele[2-]) ;
-# }
-#
-# # All upper elements removed ?
-# if ! $(upper_ele) {
-# # Create the relative paths to parents, number of elements in 'path_ele'
-# local result ;
-# for local i in $(path_ele) {
-# path = [ parent $(path) ] ;
-# result += $(path) ;
-# }
-# return $(result) ;
-# }
-# else {
-# error "$(upper_limit) is not prefix of $(path)" ;
-# }
-# }
-#
-#
-# #
-# # Search for 'pattern' in parent directories of 'dir', up till and including
-# # 'upper_limit', if it is specified, or till the filesystem root otherwise.
-# #
-# rule glob-in-parents ( dir : patterns + : upper-limit ? )
-# {
-# local result ;
-# local parent-dirs = [ all-parents $(dir) : $(upper-limit) ] ;
-#
-# while $(parent-dirs) && ! $(result)
-# {
-# result = [ glob $(parent-dirs[1]) : $(patterns) ] ;
-# parent-dirs = $(parent-dirs[2-]) ;
-# }
-# return $(result) ;
-# }
+#
+# Find out the absolute name of path and returns the list of all the parents,
+# starting with the immediate one. Parents are returned as relative names.
+# If 'upper_limit' is specified, directories above it will be pruned.
+#
+def all_parents(path, upper_limit=None, cwd=None):
+
+ if not cwd:
+ cwd = os.getcwd()
+
+ path_abs = os.path.join(cwd, path)
+
+ if upper_limit:
+ upper_limit = os.path.join(cwd, upper_limit)
+
+ result = []
+ while path_abs and path_abs != upper_limit:
+ (head, tail) = os.path.split(path)
+ path = os.path.join(path, "..")
+ result.append(path)
+ path_abs = head
+
+ if upper_limit and path_abs != upper_limit:
+ raise BaseException("'%s' is not a prefix of '%s'" % (upper_limit, path))
+
+ return result
+
+# Search for 'pattern' in parent directories of 'dir', up till and including
+# 'upper_limit', if it is specified, or till the filesystem root otherwise.
+#
+def glob_in_parents(dir, patterns, upper_limit=None):
+
+ result = []
+ parent_dirs = all_parents(dir, upper_limit)
+
+ for p in parent_dirs:
+ result = glob(p, patterns)
+ if result: break
+
+ return result
+
#
# #
# # Assuming 'child' is a subdirectory of 'parent', return the relative
@@ -866,7 +848,7 @@
exclude_patterns = []
result = glob(roots, patterns, exclude_patterns)
- subdirs = [s for s in result if s != "." and s != ".." and os.path.isdir(s)]
+ subdirs = [s for s in glob(roots, ["*"]) if s != "." and s != ".." and os.path.isdir(s)]
if subdirs:
result.extend(glob_tree(subdirs, patterns, exclude_patterns))
Modified: branches/release/tools/build/v2/util/sequence.py
==============================================================================
--- branches/release/tools/build/v2/util/sequence.py (original)
+++ branches/release/tools/build/v2/util/sequence.py 2011-01-25 13:06:12 EST (Tue, 25 Jan 2011)
@@ -5,19 +5,17 @@
import operator
-def unique (values):
- # TODO: is this the most efficient way?
- # consider using a set from Python 2.4.
- return list(set(values))
-# cache = {}
-# result = []
-# for v in values:
-# if not cache.has_key(v):
-# cache[v] = None
-# result.append(v)
-# return result
-
-
+def unique (values, stable=False):
+ if stable:
+ s = set()
+ r = []
+ for v in values:
+ if not v in s:
+ r.append(v)
+ s.add(v)
+ return r
+ else:
+ return list(set(values))
def max_element (elements, ordered = None):
""" Returns the maximum number in 'elements'. Uses 'ordered' for comparisons,
Boost-Commit list run by bdawes at acm.org, david.abrahams at rcn.com, gregod at cs.rpi.edu, cpdaniel at pacbell.net, john at johnmaddock.co.uk