Boost logo

Boost-Commit :

From: ghost_at_[hidden]
Date: 2008-05-18 05:05:43


Author: vladimir_prus
Date: 2008-05-18 05:05:42 EDT (Sun, 18 May 2008)
New Revision: 45480
URL: http://svn.boost.org/trac/boost/changeset/45480

Log:
Progress. Hello world now builds with gcc.

Text files modified:
   branches/build/python_port/python/boost/build/build/errors.py | 12 ++
   branches/build/python_port/python/boost/build/build/generators.py | 124 ++++++++++++++++++++-------------------
   branches/build/python_port/python/boost/build/build/targets.py | 1
   branches/build/python_port/python/boost/build/build/toolset.py | 22 +++---
   branches/build/python_port/python/boost/build/build/virtual_target.py | 2
   branches/build/python_port/python/boost/build/manager.py | 15 ++--
   branches/build/python_port/python/boost/build/tools/builtin.py | 23 +++----
   branches/build/python_port/python/boost/build/tools/common.py | 61 ++++++++-----------
   branches/build/python_port/python/boost/build/tools/gcc.py | 98 +++++++++++++++++--------------
   branches/build/python_port/python/boost/build/tools/unix.py | 13 ++-
   branches/build/python_port/python/boost/build/util/utility.py | 2
   11 files changed, 192 insertions(+), 181 deletions(-)

Modified: branches/build/python_port/python/boost/build/build/errors.py
==============================================================================
--- branches/build/python_port/python/boost/build/build/errors.py (original)
+++ branches/build/python_port/python/boost/build/build/errors.py 2008-05-18 05:05:42 EDT (Sun, 18 May 2008)
@@ -50,11 +50,12 @@
 class ExceptionWithUserContext(Exception):
 
     def __init__(self, message, context,
- original_exception=None, original_tb=None):
+ original_exception=None, original_tb=None, stack=None):
         Exception.__init__(self, message)
         self.context_ = context
         self.original_exception_ = original_exception
         self.original_tb_ = original_tb
+ self.stack_ = stack
 
     def report(self):
         print "error:", self.message
@@ -66,7 +67,11 @@
             c.report()
         print
         if "--stacktrace" in bjam.variable("ARGV"):
- traceback.print_tb(self.original_tb_)
+ if self.original_tb_:
+ traceback.print_tb(self.original_tb_)
+ elif self.stack_:
+ for l in traceback.format_list(self.stack_):
+ print l,
         else:
             print " use the '--stacktrace' option to get Python stacktrace"
         print
@@ -109,7 +114,8 @@
         raise ExceptionWithUserContext("unexpected exception", self.contexts_[:],
                                        e, sys.exc_info()[2])
     def __call__(self, message):
- raise ExceptionWithUserContext(message, self.contexts_[:])
+ raise ExceptionWithUserContext(message, self.contexts_[:],
+ stack=traceback.extract_stack())
 
         
 

Modified: branches/build/python_port/python/boost/build/build/generators.py
==============================================================================
--- branches/build/python_port/python/boost/build/build/generators.py (original)
+++ branches/build/python_port/python/boost/build/build/generators.py 2008-05-18 05:05:42 EDT (Sun, 18 May 2008)
@@ -51,12 +51,14 @@
 import cStringIO
 import os.path
 
-import type, virtual_target, property_set, property
+from virtual_target import Subvariant
+import virtual_target, type, property_set, property
 from boost.build.util.logger import *
 from boost.build.util.utility import *
 from boost.build.util import set
 from boost.build.util.sequence import unique
 import boost.build.util.sequence as sequence
+from boost.build.manager import get_manager
 
 def reset ():
     """ Clear the module state. This is mainly for testing purposes.
@@ -83,11 +85,12 @@
 
 
 __debug = None
+__indent = ""
 
 def debug():
     global __debug
     if __debug is None:
- __debug = "--debug-building" in bjam.variable("ARGV")
+ __debug = "--debug-generators" in bjam.variable("ARGV")
     return __debug
 
 def increase_indent(self):
@@ -96,9 +99,9 @@
 def decrease_indent(self):
     __indent = __indent[0:-4]
 
-def dout(self, message):
- if __debug:
- print ___indent + message
+def dout(message):
+ if debug():
+ print __indent + message
 
 def normalize_target_list (targets):
     """ Takes a vector of 'virtual-target' instances and makes a normalized
@@ -193,13 +196,16 @@
         for t in self.target_types_and_names_:
             m = _re_match_type.match(t)
             assert m
-
- if m == base:
- target_types.append(t + m.group(2))
+
+ if m.group(1) == base:
+ if m.group(2):
+ target_types.append(type + m.group(2))
+ else:
+ target_types.append(type)
             else:
                 target_types.append(t)
 
- return self.__class__(self.id_, self.compositing_,
+ return self.__class__(self.id_, self.composing_,
                               self.source_types_,
                               target_types,
                               self.requirements_)
@@ -268,14 +274,10 @@
         
         if project.manager ().logger ().on ():
             project.manager ().logger ().log (__name__, " generator '%s'" % self.id_)
- project.manager ().logger ().log (__name__, " multiple: '%s'" % multiple)
             project.manager ().logger ().log (__name__, " composing: '%s'" % self.composing_)
         
         if not self.composing_ and len (sources) > 1 and len (self.source_types_) > 1:
             raise BaseException ("Unsupported source/source_type combination")
-
- if len (self.source_types_) > 1:
- multiple = False
                 
         # We don't run composing generators if no name is specified. The reason
         # is that composing generator combines several targets, which can have
@@ -288,20 +290,19 @@
         # transofrmation CPP -> OBJ -> STATIC_LIB -> RSP -> EXE won't be allowed
         # (the OBJ -> STATIC_LIB generator is composing)
         if not self.composing_ or name:
- return self.run_really (project, name, prop_set, sources, multiple)
+ return self.run_really (project, name, prop_set, sources)
         else:
             return []
 
- def run_really (self, project, name, prop_set, sources, multiple):
+ def run_really (self, project, name, prop_set, sources):
 
         # consumed: Targets that this generator will consume directly.
         # bypassed: Targets that can't be consumed and will be returned as-is.
         
         if self.composing_:
- (consumed, bypassed) = self.convert_multiple_sources_to_consumable_types (project, prop_set, sources, multiple)
-
+ (consumed, bypassed) = self.convert_multiple_sources_to_consumable_types (project, prop_set, sources)
         else:
- (consumed, bypassed) = self.convert_to_consumable_types (project, name, prop_set, sources, multiple, False)
+ (consumed, bypassed) = self.convert_to_consumable_types (project, name, prop_set, sources)
                 
         result = []
         if consumed:
@@ -356,7 +357,7 @@
         # In the first case, we want to take the part till the last
         # dot. In the second case -- no sure, but for now take
         # the part till the last dot too.
- name = os.path.splitext(sources[0].name())
+ name = os.path.splitext(sources[0].name())[0]
                         
         for s in sources[1:]:
             n2 = os.path.splitext(s.name())
@@ -365,7 +366,7 @@
                     "%s: source targets have different names: cannot determine target name"
                     % (self.id_))
                         
- # Names of sources might include directory. We should strip it.
+ # Names of sources might include directory. We should strip it.
         return os.path.basename(name)
         
         
@@ -392,7 +393,7 @@
             source.
         """
         if not name:
- name = self.determine_output_name(self, sources)
+ name = self.determine_output_name(sources)
         
         # Assign an action for each target
         action = self.action_class()
@@ -411,7 +412,7 @@
         
         return [ project.manager().virtual_targets().register(t) for t in targets ]
 
- def convert_to_consumable_types (self, project, name, prop_set, sources, only_one):
+ def convert_to_consumable_types (self, project, name, prop_set, sources, only_one=False):
         """ Attempts to convert 'source' to the types that this generator can
             handle. The intention is to produce the set of targets can should be
             used when generator is run.
@@ -449,7 +450,7 @@
         # be done by 'construct_types'.
                     
         if missing_types:
- transformed = construct_types (project, name, missing_types, multiple, prop_set, sources)
+ transformed = construct_types (project, name, missing_types, prop_set, sources)
                                 
             # Add targets of right type to 'consumed'. Add others to
             # 'bypassed'. The 'generators.construct' rule has done
@@ -487,12 +488,9 @@
         return (consumed, bypassed)
     
 
- def convert_multiple_sources_to_consumable_types (self, project, prop_set, sources, multiple):
+ def convert_multiple_sources_to_consumable_types (self, project, prop_set, sources):
         """ Converts several files to consumable types.
- """
- if not multiple:
- multiple = '*'
-
+ """
         consumed = []
         bypassed = []
 
@@ -500,7 +498,7 @@
         # a usable type.
         for s in sources:
             # TODO: need to check for failure on each source.
- (c, b) = self.convert_to_consumable_types (project, None, prop_set, [s], multiple, True)
+ (c, b) = self.convert_to_consumable_types (project, None, prop_set, [s], True)
             if not c:
                 project.manager ().logger ().log (__name__, " failed to convert ", s)
 
@@ -553,7 +551,7 @@
     # in .generators.$(t) in that case, otherwise, it will
     # be tried twice and we'll get false ambiguity.
     for t in sequence.unique(g.target_types()):
- __type_to_generators.get(t, []).append(g)
+ __type_to_generators.setdefault(t, []).append(g)
 
     # Update the set of generators for toolset
 
@@ -691,15 +689,15 @@
     
     return __viable_source_types_cache [key]
 
-def try_one_generator_really (project, name, generator, multiple, target_type, properties, sources):
+def try_one_generator_really (project, name, generator, target_type, properties, sources):
     """ Returns usage requirements + list of created targets.
     """
- targets = generator.run (project, name, properties, sources, multiple)
+ targets = generator.run (project, name, properties, sources)
 
     usage_requirements = []
     success = False
 
- dout("returned " + targets)
+ dout("returned " + str(targets))
 
     if targets:
         success = True;
@@ -719,11 +717,11 @@
 # }
 
     if success:
- return (usage_requirements, target)
+ return (usage_requirements, targets)
     else:
         return None
 
-def try_one_generator (project, name, generator, multiple, target_type, properties, sources):
+def try_one_generator (project, name, generator, target_type, properties, sources):
     """ Checks if generator invocation can be pruned, because it's guaranteed
         to fail. If so, quickly returns empty list. Otherwise, calls
         try_one_generator_really.
@@ -746,10 +744,10 @@
         return []
 
     else:
- return try_one_generator_really (project, name, generator, multiple, target_type, properties, sources)
+ return try_one_generator_really (project, name, generator, target_type, properties, sources)
 
 
-def construct_types (project, name, target_types, multiple, prop_set, sources):
+def construct_types (project, name, target_types, prop_set, sources):
     
     result = []
     usage_requirements = property_set.empty()
@@ -784,7 +782,7 @@
         if not t.type ():
             raise BaseException ("target '%s' has no type" % str (t))
 
-def find_viable_generators_aux (logger, target_type, prop_set):
+def find_viable_generators_aux (target_type, prop_set):
     """ Returns generators which can be used to construct target of specified type
         with specified properties. Uses the following algorithm:
         - iterates over requested target_type and all it's bases (in the order returned bt
@@ -798,21 +796,19 @@
     """
     # Select generators that can create the required target type.
     viable_generators = []
+ initial_generators = []
 
     import type
 
     # Try all-type generators first. Assume they have
     # quite specific requirements.
     all_bases = type.all_bases(target_type)
-
- logger.log (__name__, "find_viable_generators target_type = '%s' property_set = '%s'" % (target_type, prop_set.as_path ()))
-
+
     for t in all_bases:
- logger.log (__name__, "trying type ", t [0])
         
- generators_for_this_type = __type_to_generators.get(t, [])
-
- if generators_for_this_type:
+ initial_generators = __type_to_generators.get(t, [])
+
+ if initial_generators:
             dout("there are generators for this type")
             if t != target_type:
                 # We're here, when no generators for target-type are found,
@@ -821,34 +817,36 @@
                 # base type, not of 'target-type'. So, we clone the generators
                 # and modify the list of target types.
                 generators2 = []
- for g in generators:
+ for g in initial_generators[:]:
                     # generators.register adds generator to the list of generators
                     # for toolsets, which is a bit strange, but should work.
                     # That list is only used when inheriting toolset, which
                     # should have being done before generators are run.
- generators2.append(g.clone_and_change_target_type(
- t, target_type))
- generators.register(generators2[-1])
+ ng = g.clone_and_change_target_type(t, target_type)
+ generators2.append(ng)
+ register(ng)
                     
- generators = generators2
+ initial_generators = generators2
+ break
     
- for g in generators:
- dout("trying generator" + g.id() + "(" + g.source_types() + "->" + g.target_types() + ")")
+ for g in initial_generators:
+ dout("trying generator " + g.id()
+ + "(" + str(g.source_types()) + "->" + str(g.target_types()) + ")")
         
         m = g.match_rank(prop_set)
         if m:
             dout(" is viable")
- viable-generators.append(g)
+ viable_generators.append(g)
                             
     return viable_generators
 
-def find_viable_generators (logger, target_type, prop_set):
+def find_viable_generators (target_type, prop_set):
     key = target_type + '.' + str (prop_set)
 
     l = __viable_generators_cache.get (key, None)
 
     if not l:
- l = find_viable_generators_aux (logger, target_type, prop_set)
+ l = find_viable_generators_aux (target_type, prop_set)
 
         __viable_generators_cache [key] = l
 
@@ -885,19 +883,21 @@
         
     return result
     
-def __construct_really (project, name, target_type, multiple, prop_set, sources):
+def __construct_really (project, name, target_type, prop_set, sources):
     """ Attempts to construct target by finding viable generators, running them
         and selecting the dependency graph.
     """
- viable_generators = find_viable_generators (project.manager ().logger (), target_type, prop_set)
+ viable_generators = find_viable_generators (target_type, prop_set)
                     
     result = []
     
     project.manager ().logger ().log (__name__, "*** %d viable generators" % len (viable_generators))
+
+ generators_that_succeeded = []
     
     for g in viable_generators:
         __active_generators.append(g)
- r = try_one_generator (project, name, g, multiple, target_type, prop_set, sources)
+ r = try_one_generator (project, name, g, target_type, prop_set, sources)
         del __active_generators[-1]
         
         if r:
@@ -913,10 +913,10 @@
                     print >>output, " - " + g.id()
                 print >>output, "First generator produced: "
                 for t in result[1:]:
- print >>output, " - " + t.str()
+ print >>output, " - " + str(t)
                 print >>output, "Second generator produced:"
                 for t in r[1:]:
- print >>output, " - " + t.str()
+ print >>output, " - " + str(t)
                 get_manager().errors()(output.getvalue())
             else:
                 result = r;
@@ -953,9 +953,11 @@
 
         project.manager ().logger ().log (__name__, " properties: ", prop_set.raw ())
              
- result = __construct_really (project, name, target_type, multiple, prop_set, sources)
+ result = __construct_really (project, name, target_type, prop_set, sources)
 
     project.manager ().logger ().decrease_indent ()
         
     __construct_stack = __construct_stack [1:]
+
+ return result
     

Modified: branches/build/python_port/python/boost/build/build/targets.py
==============================================================================
--- branches/build/python_port/python/boost/build/build/targets.py (original)
+++ branches/build/python_port/python/boost/build/build/targets.py 2008-05-18 05:05:42 EDT (Sun, 18 May 2008)
@@ -74,6 +74,7 @@
 
 import re
 import os.path
+import sys
 
 from boost.build.util.utility import *
 import property, project, virtual_target, property_set, feature, generators

Modified: branches/build/python_port/python/boost/build/build/toolset.py
==============================================================================
--- branches/build/python_port/python/boost/build/build/toolset.py (original)
+++ branches/build/python_port/python/boost/build/build/toolset.py 2008-05-18 05:05:42 EDT (Sun, 18 May 2008)
@@ -251,21 +251,23 @@
             __add_flag (new_rule_or_module, f.variable_name, f.condition, f.values)
 
 def inherit_rules (toolset, base):
- base_generators = generators.generators_for_toolset (base)
+ pass
+ # FIXME: do something about this.
+# base_generators = generators.generators_for_toolset (base)
 
- import action
+# import action
 
- ids = []
- for g in base_generators:
- (old_toolset, id) = split_action_id (g.id ())
- ids.append (id) ;
+# ids = []
+# for g in base_generators:
+# (old_toolset, id) = split_action_id (g.id ())
+# ids.append (id) ;
 
- new_actions = []
+# new_actions = []
 
- engine = get_manager().engine()
+# engine = get_manager().engine()
     # FIXME: do this!
- for action in engine.action.values():
- pass
+# for action in engine.action.values():
+# pass
 # (old_toolset, id) = split_action_id(action.action_name)
 #
 # if old_toolset == base:

Modified: branches/build/python_port/python/boost/build/build/virtual_target.py
==============================================================================
--- branches/build/python_port/python/boost/build/build/virtual_target.py (original)
+++ branches/build/python_port/python/boost/build/build/virtual_target.py 2008-05-18 05:05:42 EDT (Sun, 18 May 2008)
@@ -577,7 +577,7 @@
     # can create "_d.so" extensions, for example.
     if get_grist(suffix):
         suffix = ungrist(suffix)
- else:
+ elif suffix:
         suffix = "." + suffix
 
     prefix = boost.build.build.type.generated_target_prefix(type, property_set)

Modified: branches/build/python_port/python/boost/build/manager.py
==============================================================================
--- branches/build/python_port/python/boost/build/manager.py (original)
+++ branches/build/python_port/python/boost/build/manager.py 2008-05-18 05:05:42 EDT (Sun, 18 May 2008)
@@ -2,13 +2,6 @@
 # Software License, Version 1.0. (See accompanying
 # file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
 
-from build.virtual_target import VirtualTargetRegistry
-from build.targets import TargetRegistry
-from build.project import ProjectRegistry
-from build.scanner import ScannerRegistry
-from build.errors import Errors
-from boost.build.util.logger import NullLogger
-from build import build_request, property_set, feature
 import bjam
 
 # To simplify implementation of tools level, we'll
@@ -26,6 +19,14 @@
         """ Constructor.
             engine: the build engine that will actually construct the targets.
         """
+ from build.virtual_target import VirtualTargetRegistry
+ from build.targets import TargetRegistry
+ from build.project import ProjectRegistry
+ from build.scanner import ScannerRegistry
+ from build.errors import Errors
+ from boost.build.util.logger import NullLogger
+ from build import build_request, property_set, feature
+
         self.engine_ = engine
         self.virtual_targets_ = VirtualTargetRegistry (self)
         self.projects_ = ProjectRegistry (self, global_build_dir)

Modified: branches/build/python_port/python/boost/build/tools/builtin.py
==============================================================================
--- branches/build/python_port/python/boost/build/tools/builtin.py (original)
+++ branches/build/python_port/python/boost/build/tools/builtin.py 2008-05-18 05:05:42 EDT (Sun, 18 May 2008)
@@ -75,7 +75,7 @@
     # they affect other variants, derived from this one.
     __variant_explicit_properties [name] = explicit_properties
            
- feature.extend_feature ('variant', [name])
+ feature.extend('variant', [name])
     feature.compose (replace_grist (name, '<variant>'), explicit_properties)
 
 def register_globals ():
@@ -267,7 +267,7 @@
     def __init__ (self, id = 'LibGenerator', composing = True, source_types = [], target_types_and_names = ['LIB'], requirements = []):
         generators.Generator.__init__ (self, id, composing, source_types, target_types_and_names, requirements)
     
- def run (self, project, name, prop_set, sources, multiple):
+ def run (self, project, name, prop_set, sources):
         # The lib generator is composing, and can be only invoked with
         # explicit name. This check is present in generator.run (and so in
         # builtin.LinkingGenerator), but duplicate it here to avoid doing
@@ -356,9 +356,9 @@
         # search.
         generators.Generator.__init__ (self, id, composing, source_types, target_types_and_names, requirements)
     
- def run (self, project, name, prop_set, sources, multiple):
+ def run (self, project, name, prop_set, sources):
         if not name:
- return ([], [])
+ return None
 
         # If name is empty, it means we're called not from top-level.
         # In this case, we just fail immediately, because SearchedLibGenerator
@@ -437,7 +437,7 @@
     def __init__ (self, id, composing, source_types, target_types_and_names, requirements):
         generators.Generator.__init__ (self, id, composing, source_types, target_types_and_names, requirements)
         
- def run (self, project, name, prop_set, sources, multiple):
+ def run (self, project, name, prop_set, sources):
         lib_sources = prop_set.get ('<library>')
         [ sources.append (project.manager ().get_object (x)) for x in lib_sources ]
         
@@ -467,7 +467,7 @@
         if extra:
             prop_set = prop_set.add_raw (extra)
                         
- result = generators.Generator.run (self, project, name, prop_set, sources, multiple)
+ result = generators.Generator.run (self, project, name, prop_set, sources)
         
         return (self.extra_usage_requirements (result, prop_set), result)
     
@@ -541,13 +541,10 @@
         
         return spawn
 
-### rule register-linker ( id composing ? : source_types + : target_types + :
-### requirements * )
-### {
-### local g = [ new LinkingGenerator $(id) $(composing) : $(source_types)
-### : $(target_types) : $(requirements) ] ;
-### generators.register $(g) ;
-### }
+
+def register_linker (id, source_types, target_types, requirements):
+ g = LinkingGenerator(id, 1, source_types, target_types, requirements)
+ generators.register(g)
 
 class ArchiveGenerator (generators.Generator):
     """ The generator class for handling STATIC_LIB creation.

Modified: branches/build/python_port/python/boost/build/tools/common.py
==============================================================================
--- branches/build/python_port/python/boost/build/tools/common.py (original)
+++ branches/build/python_port/python/boost/build/tools/common.py 2008-05-18 05:05:42 EDT (Sun, 18 May 2008)
@@ -111,8 +111,9 @@
         raise BaseException (message)
 
     __all_signatures [sig] = True
-
- return condition
+
+ # FIXME: we actually don't add any subfeatures to the condition
+ return [condition]
 
 
 def get_invocation_command (toolset, tool, user_provided_command = None, additional_paths = None, path_last = None):
@@ -138,6 +139,7 @@
         command = check_tool (user_provided_command)
 
         if not command:
+ print "User-provided command not found"
             # It's possible, in theory, that user-provided command is OK, but we're
             # not smart enough to understand that.
 
@@ -178,7 +180,7 @@
 
 
 
-def find_tool (name, additional_paths = [], path_last = False):
+def find_tool (name, additional_paths = None, path_last = False):
     """ Attempts to find tool (binary) named 'name' in PATH and in 'additiona-paths'.
         If found in path, returns 'name'.
         If found in additional paths, returns full name. If there are several possibilities,
@@ -186,6 +188,8 @@
         Otherwise, returns empty string.
         If 'path_last' is specified, path is checked after 'additional_paths'.
     """
+ if not additional_paths:
+ additional_paths = []
     programs = path.programs_path ()
     match = path.glob (programs, [name, name + '.exe'])
     additional_match = path.glob (additional_paths, [name, name + '.exe'])
@@ -206,39 +210,24 @@
     if result:
         return path.native (result [0])
 
-### # Checks if 'command' can be found either in path
-### # or is a full name to an existing file.
-### rule check_tool-aux ( command )
-### {
-### if $(command:D)
-### {
-### if [ path.exists $(command) ]
-### {
-### return $(command) ;
-### }
-### }
-### else
-### {
-### if [ GLOB [ modules.peek : PATH Path path ] : $(command) ]
-### {
-### return $(command) ;
-### }
-### }
-### }
-###
-###
-### # Checks that a tool can be invoked by 'command'.
-### # If command is not an absolute path, checks if it can be found in 'path'.
-### # If comand is absolute path, check that it exists. Returns 'command'
-### # if ok and empty string otherwise.
-### rule check_tool ( xcommand + )
-### {
-### if [ check_tool-aux $(xcommand[1]) ]
-### || [ check_tool-aux $(xcommand[-1]) ]
-### {
-### return $(xcommand) ;
-### }
-### }
+# Checks if 'command' can be found either in path
+# or is a full name to an existing file.
+def check_tool_aux(command):
+ dirname = os.path.dirname(command)
+ if dirname:
+ return os.path.exists(command)
+ else:
+ paths = bjam.variable("PATH") + bjam.variable("Path") + bjam.variable("path")
+ if path.glob(paths, [command]):
+ return command
+
+def check_tool(command):
+ # Checks that a tool can be invoked by 'command'.
+ # If command is not an absolute path, checks if it can be found in 'path'.
+ # If comand is absolute path, check that it exists. Returns 'command'
+ # if ok and empty string otherwise.
+ if check_tool_aux(command[0]) or check_tool_aux(command[-1]):
+ return command
 
 def handle_options (tool, condition, command, options):
     """ Handle common options for toolset, specifically sets the following

Modified: branches/build/python_port/python/boost/build/tools/gcc.py
==============================================================================
--- branches/build/python_port/python/boost/build/tools/gcc.py (original)
+++ branches/build/python_port/python/boost/build/tools/gcc.py 2008-05-18 05:05:42 EDT (Sun, 18 May 2008)
@@ -6,11 +6,14 @@
 # License Version 1.0. (See accompanying file LICENSE_1_0.txt or
 # http://www.boost.org/LICENSE_1_0.txt)
 
-import unix, builtin, common
-from boost.build.build import feature, toolset, type, action, generators
+import os.path
+from boost.build.tools import common, builtin, unix
+from boost.build.build import feature, toolset, type, generators
 from boost.build.util.utility import *
+from boost.build.manager import get_manager
 
-feature.extend_feature ('toolset', ['gcc'])
+
+feature.extend ('toolset', ['gcc'])
 
 toolset.inherit_generators ('gcc', [], 'unix', ['unix_link', 'unix_link_dll'])
 toolset.inherit_flags ('gcc', 'unix')
@@ -48,7 +51,7 @@
     init_link_flags ('gcc', linker, condition)
 
 
-def gcc_compile_cpp (manager, targets, sources, properties):
+def gcc_compile_cpp(targets, sources, properties):
     # Some extensions are compiled as C++ by default. For others, we need
     # to pass -x c++.
     # We could always pass -x c++ but distcc does not work with it.
@@ -56,9 +59,12 @@
     lang = ''
     if not extension in ['.cc', '.cp', '.cxx', '.cpp', '.c++', '.C']:
         lang = '-x c++'
- manager.engine ().set_target_variable (targets, 'LANG', lang)
+ get_manager().engine ().set_target_variable (targets, 'LANG', lang)
 
-action.register ('gcc.compile.c++', gcc_compile_cpp, ['"$(CONFIG_COMMAND)" $(LANG) -Wall -ftemplate-depth-100 $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"'])
+engine = get_manager().engine()
+engine.register_action ('gcc.compile.c++',
+ '"$(CONFIG_COMMAND)" $(LANG) -Wall -ftemplate-depth-100 $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"',
+ function=gcc_compile_cpp)
 
 builtin.register_c_compiler ('gcc.compile.c++', ['CPP'], ['OBJ'], ['<toolset>gcc'])
 
@@ -69,22 +75,24 @@
     # by allowing the user to specify both C and C++ compiler names.
     manager.engine ().set_target_variable (targets, 'LANG', '-x c')
 
-action.register ('gcc.compile.c', gcc_compile_c, ['"$(CONFIG_COMMAND)" $(LANG) -Wall $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"'])
+engine.register_action ('gcc.compile.c',
+ '"$(CONFIG_COMMAND)" $(LANG) -Wall $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"',
+ function=gcc_compile_c)
 
 builtin.register_c_compiler ('gcc.compile.c', ['C'], ['OBJ'], ['<toolset>gcc'])
 
 
 # Declare flags and action for compilation
-toolset.flags ('gcc.compile', 'OPTIONS', '<optimization>off', ['-O0'])
-toolset.flags ('gcc.compile', 'OPTIONS', '<optimization>speed', ['-O3'])
-toolset.flags ('gcc.compile', 'OPTIONS', '<optimization>space', ['-Os'])
-
-toolset.flags ('gcc.compile', 'OPTIONS', '<inlining>off', ['-fno-inline'])
-toolset.flags ('gcc.compile', 'OPTIONS', '<inlining>on', ['-Wno-inline'])
-toolset.flags ('gcc.compile', 'OPTIONS', '<inlining>full', ['-finline-functions', '-Wno-inline'])
+toolset.flags ('gcc.compile', 'OPTIONS', ['<optimization>off'], ['-O0'])
+toolset.flags ('gcc.compile', 'OPTIONS', ['<optimization>speed'], ['-O3'])
+toolset.flags ('gcc.compile', 'OPTIONS', ['<optimization>space'], ['-Os'])
+
+toolset.flags ('gcc.compile', 'OPTIONS', ['<inlining>off'], ['-fno-inline'])
+toolset.flags ('gcc.compile', 'OPTIONS', ['<inlining>on'], ['-Wno-inline'])
+toolset.flags ('gcc.compile', 'OPTIONS', ['<inlining>full'], ['-finline-functions', '-Wno-inline'])
 
-toolset.flags ('gcc.compile', 'OPTIONS', '<debug-symbols>on', ['-g'])
-toolset.flags ('gcc.compile', 'OPTIONS', '<profiling>on', ['-pg'])
+toolset.flags ('gcc.compile', 'OPTIONS', ['<debug-symbols>on'], ['-g'])
+toolset.flags ('gcc.compile', 'OPTIONS', ['<profiling>on'], ['-pg'])
 
 # On cygwin and mingw, gcc generates position independent code by default,
 # and warns if -fPIC is specified. This might not be the right way
@@ -94,19 +102,20 @@
 # we'll just add another parameter to 'init' and move this login
 # inside 'init'.
 if not os_name () in ['CYGWIN', 'NT']:
- toolset.flags ('gcc.compile', 'OPTIONS', '<link>shared/<main-target-type>LIB', ['-fPIC'])
+ toolset.flags ('gcc.compile', 'OPTIONS', ['<link>shared', '<main-target-type>LIB'], ['-fPIC'])
 
 if os_name () != 'NT':
     HAVE_SONAME = True
 else:
     HAVE_SONAME = False
 
-toolset.flags ('gcc.compile', 'OPTIONS', '<cflags>')
-toolset.flags ('gcc.compile.c++', 'OPTIONS', '<cxxflags>')
-toolset.flags ('gcc.compile', 'DEFINES', '<define>')
-toolset.flags ('gcc.compile', 'INCLUDES', '<include>')
+toolset.flags ('gcc.compile', 'OPTIONS', [], ['<cflags>'])
+toolset.flags ('gcc.compile.c++', 'OPTIONS', [], ['<cxxflags>'])
+toolset.flags ('gcc.compile', 'DEFINES', [], ['<define>'])
+toolset.flags ('gcc.compile', 'INCLUDES', [], ['<include>'])
 
 class GccLinkingGenerator (unix.UnixLinkingGenerator):
+
     """ The class which check that we don't try to use
         the <link-runtime>static property while creating or using shared library,
         since it's not supported by gcc/libc.
@@ -124,14 +133,15 @@
 
             if m:
                 raise UserError (m + " It's suggested to use <link-runtime>static together with the <link>static")
-
+
         return unix.UnixLinkingGenerator.generated_targets (self, sources, prop_set, project, name)
 
-def gcc_link (manager, targets, sources, properties):
- manager.engine ().set_target_variable (targets, 'SPACE', " ")
+def gcc_link (targets, sources, properties):
+ get_manager().engine ().set_target_variable (targets, 'SPACE', " ")
 
-# TODO: how to set 'bind LIBRARIES'?
-action.register ('gcc.link', gcc_link, ['"$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -Wl,-rpath-link$(SPACE)-Wl,"$(RPATH_LINK)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-ST) -l$(FINDLIBS-SA) $(OPTIONS)'])
+engine.register_action('gcc.link', '"$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -Wl,-rpath-link$(SPACE)-Wl,"$(RPATH_LINK)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-ST) -l$(FINDLIBS-SA) $(OPTIONS)',
+ bound_list=['LIBRARIES'],
+ function=gcc_link)
 
 generators.register (GccLinkingGenerator ('gcc.link', True, ['LIB', 'OBJ'], ['EXE'], ['<toolset>gcc']))
 
@@ -140,7 +150,9 @@
     manager.engine ().set_target_variable (target, 'SPACE', " ")
 
 # TODO: how to set 'bind LIBRARIES'?
-action.register ('gcc.link.dll', gcc_link_dll, ['"$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -o "$(<)" $(HAVE_SONAME)-Wl,-h$(SPACE)-Wl,$(<[1]:D=) -shared "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-ST) -l$(FINDLIBS-SA) $(OPTIONS)'])
+engine.register_action('gcc.link.dll', '"$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -o "$(<)" $(HAVE_SONAME)-Wl,-h$(SPACE)-Wl,$(<[1]:D=) -shared "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-ST) -l$(FINDLIBS-SA) $(OPTIONS)',
+ bound_list=['LIBRARIES'],
+ function=gcc_link_dll)
 
 generators.register (GccLinkingGenerator ('gcc.link.dll', True, ['LIB', 'OBJ'], ['SHARED_LIB'], ['<toolset>gcc']))
 
@@ -150,17 +162,17 @@
 
 # Declare flags for linking
 # First, the common flags
-toolset.flags ('gcc.link', 'OPTIONS', '<debug-symbols>on', ['-g'])
-toolset.flags ('gcc.link', 'OPTIONS', '<profiling>on', ['-pg'])
-toolset.flags ('gcc.link', 'OPTIONS', '<linkflags>')
-toolset.flags ('gcc.link', 'LINKPATH', '<library-path>')
-toolset.flags ('gcc.link', 'FINDLIBS-ST', '<find-static-library>')
-toolset.flags ('gcc.link', 'FINDLIBS-SA', '<find-shared-library>')
-toolset.flags ('gcc.link', 'LIBRARIES', '<library-file>')
+toolset.flags ('gcc.link', 'OPTIONS', ['<debug-symbols>on'], ['-g'])
+toolset.flags ('gcc.link', 'OPTIONS', ['<profiling>on'], ['-pg'])
+toolset.flags ('gcc.link', 'OPTIONS', [], ['<linkflags>'])
+toolset.flags ('gcc.link', 'LINKPATH', [], ['<library-path>'])
+toolset.flags ('gcc.link', 'FINDLIBS-ST', [], ['<find-static-library>'])
+toolset.flags ('gcc.link', 'FINDLIBS-SA', [], ['<find-shared-library>'])
+toolset.flags ('gcc.link', 'LIBRARIES', [], ['<library-file>'])
 
 ### # For <link-runtime>static we made sure there are no dynamic libraries
 ### # in the link
-toolset.flags ('gcc.link', 'OPTIONS', '<link-runtime>static', ['-static'])
+toolset.flags ('gcc.link', 'OPTIONS', ['<link-runtime>static'], ['-static'])
 
 def init_link_flags (tool, linker, condition):
     """ Sets the vendor specific flags.
@@ -170,7 +182,7 @@
         # We use --strip-all flag as opposed to -s since icc
         # (intel's compiler) is generally option-compatible with
         # and inherits from gcc toolset, but does not support -s
- toolset.flags (tool + '_link', 'OPTIONS', condition + '/<debug-symbols>off', ['-Wl,--strip-all'])
+ toolset.flags (tool + '_link', 'OPTIONS', condition + ['<debug-symbols>off'], ['-Wl,--strip-all'])
         toolset.flags (tool + '_link', 'RPATH', condition, ['<dll-path>'])
         toolset.flags (tool + '_link', 'RPATH_LINK', condition, ['<xdll-path>'])
 
@@ -178,12 +190,12 @@
         # we can't pass -s to ld unless we also pass -static
         # so we removed -s completly from OPTIONS and add it
         # to ST_OPTIONS
- toolset.flags (tool + '_link', 'ST_OPTIONS', condition + '/<debug-symbols>off', ['-s'])
+ toolset.flags (tool + '_link', 'ST_OPTIONS', condition + ['<debug-symbols>off'], ['-s'])
         toolset.flags (tool + '_link', 'RPATH', condition, ['<dll-path>'])
         toolset.flags (tool + '_link', 'RPATH_LINK', condition, ['<xdll-path>'])
 
     elif linker == 'sun':
- toolset.flags (tool + '_link', 'OPTIONS', condition + '/<debug-symbols>off', ['-Wl,-s'])
+ toolset.flags (tool + '_link', 'OPTIONS', condition + ['<debug-symbols>off'], ['-Wl,-s'])
         toolset.flags (tool + '_link', 'RPATH', condition, ['<dll-path>'])
         # Solaris linker does not have a separate -rpath-link, but
         # allows to use -L for the same purpose.
@@ -195,7 +207,7 @@
         # is a separate question.
         # AH, 2004/10/16: it is still necessary because some tests link
         # against static libraries that were compiled without PIC.
- toolset.flags (tool + '_link', 'OPTIONS', condition + '/<link>shared', ['-mimpure-text'])
+ toolset.flags (tool + '_link', 'OPTIONS', condition + ['<link>shared'], ['-mimpure-text'])
 
     else:
             raise UserError ("'%s' initialization: invalid linker '%s'\n" \
@@ -217,12 +229,10 @@
 ### # The 'c' letter means suppresses warning in case the archive
 ### # does not exists yet. That warning is produced only on
 ### # some platforms, for whatever reasons.
-def gcc_archive (manager, targets, sources, properties):
- pass
-
-action.register ('gcc.archive', gcc_archive, ['ar ruc "$(<)" "$(>)"'])
+#def gcc_archive (manager, targets, sources, properties):
+# pass
 
-builtin.register_c_compiler ('gcc.compile.c++', ['CPP'], ['OBJ'], ['<toolset>gcc'])
+engine.register_action('gcc.archive', 'ar ruc "$(<)" "$(>)"')
 
 ### # Set up threading support. It's somewhat contrived, so perform it at the end,
 ### # to avoid cluttering other code.

Modified: branches/build/python_port/python/boost/build/tools/unix.py
==============================================================================
--- branches/build/python_port/python/boost/build/tools/unix.py (original)
+++ branches/build/python_port/python/boost/build/tools/unix.py 2008-05-18 05:05:42 EDT (Sun, 18 May 2008)
@@ -10,7 +10,7 @@
 """
 
 import builtin
-from boost.build.build import action, generators, type
+from boost.build.build import generators, type
 from boost.build.util.utility import *
 from boost.build.util import set, sequence
 
@@ -19,8 +19,8 @@
     def __init__ (self, id, composing, source_types, target_types, requirements):
         builtin.LinkingGenerator.__init__ (self, id, composing, source_types, target_types, requirements)
     
- def run (self, project, name, prop_set, sources, multiple):
- result = builtin.LinkingGenerator.run (self, project, name, prop_set, sources, multiple)
+ def run (self, project, name, prop_set, sources):
+ result = builtin.LinkingGenerator.run (self, project, name, prop_set, sources)
         set_library_order (project.manager (), sources, prop_set, result [1])
                                 
         return result
@@ -74,7 +74,9 @@
         return (f, sources)
 
 ### # The derived toolset must specify their own rules and actions.
-action.register ('unix.prebuilt', None, None)
+# FIXME: restore?
+# action.register ('unix.prebuilt', None, None)
+
 
 generators.register (UnixPrebuiltLibGenerator ('unix.prebuilt', False, [], ['LIB'], ['<file>', '<toolset>unix']))
 
@@ -104,7 +106,8 @@
 def unix_archive (manager, targets, sources, properties):
     pass
 
-action.register ('unix.archive', unix_archive, [''])
+# FIXME: restore?
+#action.register ('unix.archive', unix_archive, [''])
 
 ### actions searched-lib-generator {
 ### }

Modified: branches/build/python_port/python/boost/build/util/utility.py
==============================================================================
--- branches/build/python_port/python/boost/build/util/utility.py (original)
+++ branches/build/python_port/python/boost/build/util/utility.py 2008-05-18 05:05:42 EDT (Sun, 18 May 2008)
@@ -90,7 +90,7 @@
         return [ get_grist_one (v) for v in value ]
 
 def ungrist (value):
- """ Returns the grist of the value.
+ """ Returns the value without grist.
         If value is a sequence, does it for every value and returns the result as a sequence.
     """
     def ungrist_one (value):


Boost-Commit list run by bdawes at acm.org, david.abrahams at rcn.com, gregod at cs.rpi.edu, cpdaniel at pacbell.net, john at johnmaddock.co.uk