$include_dir="/home/hyper-archives/boost-commit/include"; include("$include_dir/msg-header.inc") ?>
From: steven_at_[hidden]
Date: 2008-07-07 00:30:50
Author: steven_watanabe
Date: 2008-07-07 00:30:47 EDT (Mon, 07 Jul 2008)
New Revision: 47165
URL: http://svn.boost.org/trac/boost/changeset/47165
Log:
Port gcc and dependencies.  Hello world builds.  Further functionality is untested
Added:
   branches/build/python_port/python/boost/build/tools/pch.py   (contents, props changed)
   branches/build/python_port/python/boost/build/tools/types/asm.py   (contents, props changed)
Text files modified: 
   branches/build/python_port/python/boost/build/build/generators.py     |    20                                         
   branches/build/python_port/python/boost/build/build/virtual_target.py |     6                                         
   branches/build/python_port/python/boost/build/tools/builtin.py        |   223 +++++++-                                
   branches/build/python_port/python/boost/build/tools/common.py         |    79 +-                                      
   branches/build/python_port/python/boost/build/tools/gcc.py            |  1021 ++++++++++++++++++++++++++++++--------- 
   branches/build/python_port/python/boost/build/tools/rc.py             |     5                                         
   branches/build/python_port/python/boost/build/tools/types/__init__.py |     3                                         
   branches/build/python_port/python/boost/build/tools/unix.py           |     3                                         
   branches/build/python_port/python/boost/build/util/path.py            |    11                                         
   branches/build/python_port/python/boost/build/util/utility.py         |     4                                         
   10 files changed, 1041 insertions(+), 334 deletions(-)
Modified: branches/build/python_port/python/boost/build/build/generators.py
==============================================================================
--- branches/build/python_port/python/boost/build/build/generators.py	(original)
+++ branches/build/python_port/python/boost/build/build/generators.py	2008-07-07 00:30:47 EDT (Mon, 07 Jul 2008)
@@ -93,10 +93,12 @@
         __debug = "--debug-generators" in bjam.variable("ARGV")        
     return __debug
 
-def increase_indent(self):
+def increase_indent():
+    global __indent
     __indent += "    "
 
-def decrease_indent(self):
+def decrease_indent():
+    global __indent
     __indent = __indent[0:-4]
 
 def dout(message):
@@ -137,6 +139,8 @@
             NOTE: all subclasses must have a similar signature for clone to work!
     """
     def __init__ (self, id, composing, source_types, target_types_and_names, requirements):
+        assert(not isinstance(source_types, str))
+        assert(not isinstance(target_types_and_names, str))
         self.id_ = id
         self.composing_ = composing
         self.source_types_ = source_types
@@ -943,19 +947,19 @@
         
     __construct_stack.append (1)
 
-    if project.manager ().logger ().on ():
+    if project.manager().logger().on():
         increase_indent ()
         
-        dout( "*** construct ", target_type, m)
+        dout( "*** construct " + target_type)
         
         for s in sources:
-            dout("    from ", s)
+            dout("    from " + str(s))
 
-        project.manager ().logger ().log (__name__, "    properties: ", prop_set.raw ())
+        project.manager().logger().log (__name__, "    properties: ", prop_set.raw ())
              
-    result = __construct_really (project, name, target_type, prop_set, sources)
+    result = __construct_really(project, name, target_type, prop_set, sources)
 
-    project.manager ().logger ().decrease_indent ()
+    project.manager().logger().decrease_indent()
         
     __construct_stack = __construct_stack [1:]
 
Modified: branches/build/python_port/python/boost/build/build/virtual_target.py
==============================================================================
--- branches/build/python_port/python/boost/build/build/virtual_target.py	(original)
+++ branches/build/python_port/python/boost/build/build/virtual_target.py	2008-07-07 00:30:47 EDT (Mon, 07 Jul 2008)
@@ -842,7 +842,7 @@
         actions which create them.
     """
     def __init__ (self, manager, prop_set):
-        Action.__init__ (self, manager, None, prop_set)
+        Action.__init__ (self, manager, None, None, prop_set)
         
     def actualize (self):
         if not self.actualized_:
@@ -856,7 +856,9 @@
     are not scanned for dependencies."""
 
     def __init__(self, sources, action_name, property_set):
-        Action.__init__(sources, action_name, property_set)
+        #FIXME: should the manager parameter of Action.__init__
+        #be removed? -- Steven Watanabe
+        Action.__init__(boost.build.manager.get_manager(), sources, action_name, property_set)
 
     def actualize_source_type(self, sources, property_set):
         
Modified: branches/build/python_port/python/boost/build/tools/builtin.py
==============================================================================
--- branches/build/python_port/python/boost/build/tools/builtin.py	(original)
+++ branches/build/python_port/python/boost/build/tools/builtin.py	2008-07-07 00:30:47 EDT (Mon, 07 Jul 2008)
@@ -1,3 +1,5 @@
+# Status: minor updates by Steven Watanabe to make gcc work
+#
 #  Copyright (C) Vladimir Prus 2002. Permission to copy, use, modify, sell and
 #  distribute this software is granted provided this copyright notice appears in
 #  all copies. This software is provided "as is" without express or implied
@@ -79,6 +81,31 @@
     feature.extend('variant', [name])
     feature.compose (replace_grist (name, '<variant>'), explicit_properties)
 
+__os_names = """
+    amiga aix bsd cygwin darwin dos emx freebsd hpux iphone linux netbsd
+    openbsd osf qnx qnxnto sgi solaris sun sunos svr4 sysv ultrix unix unixware
+    vms windows
+""".split()
+
+# Translates from bjam current OS to the os tags used in host-os and target-os,
+# i.e. returns the running host-os.
+#
+def default_host_os():
+    host_os = os_name()
+    if host_os not in (x.upper() for x in __os_names):
+        if host_os == 'NT': host_os = 'windows'
+        elif host_os == 'AS400': host_os = 'unix'
+        elif host_os == 'MINGW': host_os = 'windows'
+        elif host_os == 'BSDI': host_os = 'bsd'
+        elif host_os == 'COHERENT': host_os = 'unix'
+        elif host_os == 'DRAGONFLYBSD': host_os = 'bsd'
+        elif host_os == 'IRIX': host_os = 'sgi'
+        elif host_os == 'MACOSX': host_os = 'darwin'
+        elif host_os == 'KFREEBSD': host_os = freebsd
+        elif host_os == 'LINUX': host_os = 'linux'
+        else: host_os = 'unix'
+    return host_os.lower()
+
 def register_globals ():
     """ Registers all features and variants declared by this module.
     """
@@ -88,13 +115,25 @@
     # TODO: check this. Compatibility with bjam names? Subfeature for version?
     os = sys.platform
     feature.feature ('os', [os], ['propagated', 'link-incompatible'])
+
+
+    # The two OS features define a known set of abstract OS names. The host-os is
+    # the OS under which bjam is running. Even though this should really be a fixed
+    # property we need to list all the values to prevent unknown value errors. Both
+    # set the default value to the current OS to account for the default use case of
+    # building on the target OS.
+    feature.feature('host-os', __os_names)
+    feature.set_default('host-os', default_host_os())
+
+    feature.feature('target-os', __os_names, ['propagated', 'link-incompatible'])
+    feature.set_default('target-os', default_host_os())
     
     feature.feature ('toolset', [], ['implicit', 'propagated' ,'symmetric'])
     
     feature.feature ('stdlib', ['native'], ['propagated', 'composite'])
     
     feature.feature ('link', ['shared', 'static'], ['propagated'])
-    feature.feature ('link-runtime', ['shared', 'static'], ['propagated'])
+    feature.feature ('runtime-link', ['shared', 'static'], ['propagated'])
     feature.feature ('runtime-debugging', ['on', 'off'], ['propagated'])
     
     
@@ -136,6 +175,17 @@
     feature.feature ('use', [], ['free', 'dependency', 'incidental'])
     feature.feature ('dependency', [], ['free', 'dependency', 'incidental'])
     feature.feature ('implicit-dependency', [], ['free', 'dependency', 'incidental'])
+
+    feature.feature('warnings', [
+        'on',         # Enable default/"reasonable" warning level for the tool.
+        'all',        # Enable all possible warnings issued by the tool.
+        'off'],       # Disable all warnings issued by the tool.
+        ['incidental', 'propagated'])
+
+    feature.feature('warnings-as-errors', [
+        'off',        # Do not fail the compilation if there are warnings.
+        'on'],        # Fail the compilation if there are warnings.
+        ['incidental', 'propagated'])
     
     feature.feature ('source', [], ['free', 'dependency', 'incidental'])
     feature.feature ('library', [], ['free', 'dependency', 'incidental'])
@@ -170,6 +220,81 @@
     # of the library.
     feature.feature ('allow', [], ['free'])
     
+    # The addressing model to generate code for. Currently a limited set only
+    # specifying the bit size of pointers.
+    feature.feature('address-model', ['16', '32', '64'], ['propagated', 'optional'])
+
+    # Type of CPU architecture to compile for.
+    feature.feature('architecture', [
+        # x86 and x86-64
+        'x86',
+
+        # ia64
+        'ia64',
+
+        # Sparc
+        'sparc',
+
+        # RS/6000 & PowerPC
+        'power',
+
+        # MIPS/SGI
+        'mips1', 'mips2', 'mips3', 'mips4', 'mips32', 'mips32r2', 'mips64',
+
+        # HP/PA-RISC
+        'parisc',
+        
+        # Advanced RISC Machines
+        'arm',
+
+        # Combined architectures for platforms/toolsets that support building for
+        # multiple architectures at once. "combined" would be the default multi-arch
+        # for the toolset.
+        'combined',
+        'combined-x86-power'],
+
+        ['propagated', 'optional'])
+
+    # The specific instruction set in an architecture to compile.
+    feature.feature('instruction-set', [
+        # x86 and x86-64
+        'i386', 'i486', 'i586', 'i686', 'pentium', 'pentium-mmx', 'pentiumpro', 'pentium2', 'pentium3',
+        'pentium3m', 'pentium-m', 'pentium4', 'pentium4m', 'prescott', 'nocona', 'conroe', 'conroe-xe',
+        'conroe-l', 'allendale', 'mermon', 'mermon-xe', 'kentsfield', 'kentsfield-xe', 'penryn', 'wolfdale',
+        'yorksfield', 'nehalem', 'k6', 'k6-2', 'k6-3', 'athlon', 'athlon-tbird', 'athlon-4', 'athlon-xp',
+        'athlon-mp', 'k8', 'opteron', 'athlon64', 'athlon-fx', 'winchip-c6', 'winchip2', 'c3', 'c3-2',
+
+        # ia64
+        'itanium', 'itanium1', 'merced', 'itanium2', 'mckinley',
+
+        # Sparc
+        'v7', 'cypress', 'v8', 'supersparc', 'sparclite', 'hypersparc', 'sparclite86x', 'f930', 'f934',
+        'sparclet', 'tsc701', 'v9', 'ultrasparc', 'ultrasparc3',
+
+        # RS/6000 & PowerPC
+        '401', '403', '405', '405fp', '440', '440fp', '505', '601', '602',
+        '603', '603e', '604', '604e', '620', '630', '740', '7400',
+        '7450', '750', '801', '821', '823', '860', '970', '8540',
+        'power-common', 'ec603e', 'g3', 'g4', 'g5', 'power', 'power2',
+        'power3', 'power4', 'power5', 'powerpc', 'powerpc64', 'rios',
+        'rios1', 'rsc', 'rios2', 'rs64a',
+
+        # MIPS
+        '4kc', '4kp', '5kc', '20kc', 'm4k', 'r2000', 'r3000', 'r3900', 'r4000',
+        'r4100', 'r4300', 'r4400', 'r4600', 'r4650',
+        'r6000', 'r8000', 'rm7000', 'rm9000', 'orion', 'sb1', 'vr4100',
+        'vr4111', 'vr4120', 'vr4130', 'vr4300',
+        'vr5000', 'vr5400', 'vr5500',
+
+        # HP/PA-RISC
+        '700', '7100', '7100lc', '7200', '7300', '8000',
+        
+        # Advanced RISC Machines
+        'armv2', 'armv2a', 'armv3', 'armv3m', 'armv4', 'armv4t', 'armv5',
+        'armv5t', 'armv5te', 'armv6', 'armv6j', 'iwmmxt', 'ep9312'],
+
+        ['propagated', 'optional'])
+    
     # Windows-specific features
     feature.feature ('user-interface', ['console', 'gui', 'wince', 'native', 'auto'], [])
     feature.feature ('variant', [], ['implicit', 'composite', 'propagated', 'symmetric'])
@@ -190,7 +315,7 @@
 
 class SearchedLibTarget (virtual_target.AbstractFileTarget):
     def __init__ (self, name, project, shared, real_name, search, action):
-        AbstractFileTarget.__init__ (self, name, False, 'SEARCHED_LIB', project, action)
+        virtual_target.AbstractFileTarget.__init__ (self, name, False, 'SEARCHED_LIB', project, action)
         
         self.shared_ = shared
         self.real_name_ = real_name
@@ -211,7 +336,8 @@
         project.manager ().engine ().add_not_file_target (target)
     
     def path (self):
-        pass
+        #FIXME: several functions rely on this not being None
+        return ""
 
 
 class CScanner (scanner.Scanner):
@@ -349,6 +475,7 @@
 ### }
 ### IMPORT $(__name__) : lib : : lib ;
 
+# Updated to trunk_at_47077
 class SearchedLibGenerator (generators.Generator):
     def __init__ (self, id = 'SearchedLibGenerator', composing = False, source_types = [], target_types_and_names = ['SEARCHED_LIB'], requirements = []):
         # TODO: the comment below looks strange. There are no requirements!
@@ -358,7 +485,7 @@
         # search.
         generators.Generator.__init__ (self, id, composing, source_types, target_types_and_names, requirements)
     
-    def run (self, project, name, prop_set, sources):
+    def run(self, project, name, prop_set, sources):
         if not name:
             return None
 
@@ -369,17 +496,22 @@
         properties = prop_set.raw ()
         shared = '<link>shared' in properties
 
-        a = NullAction (project.manager (), prop_set)
+        a = virtual_target.NullAction (project.manager(), prop_set)
         
-        real_name = feature.get_values ('<name>', properties) [0]
-        search = feature.get_values ('<search>', properties) [0]
-        t = SearchedLibTarget (name, project, shared, real_name, search, a)
+        real_name = feature.get_values ('<name>', properties)
+        if real_name:
+            real_name = real_name[0]
+        else:
+            real_nake = name
+        search = feature.get_values('<search>', properties)
+        usage_requirements = property_set.create(['<xdll-path>' + p for p in search])
+        t = SearchedLibTarget(name, project, shared, real_name, search, a)
 
         # We return sources for a simple reason. If there's
         #    lib png : z : <name>png ; 
         # the 'z' target should be returned, so that apps linking to
         # 'png' will link to 'z', too.
-        return (virtual_target.register (t), sources)
+        return(usage_requirements, [boost.build.manager.get_manager().virtual_targets().register(t)] + sources)
 
 generators.register (SearchedLibGenerator ())
 
@@ -440,18 +572,21 @@
         generators.Generator.__init__ (self, id, composing, source_types, target_types_and_names, requirements)
         
     def run (self, project, name, prop_set, sources):
-        lib_sources = prop_set.get ('<library>')
-        [ sources.append (project.manager ().get_object (x)) for x in lib_sources ]
+        lib_sources = prop_set.get('<library>')
+        [ sources.append (project.manager().get_object(x)) for x in lib_sources ]
         
         # Add <library-path> properties for all searched libraries
         extra = []
         for s in sources:
             if s.type () == 'SEARCHED_LIB':
-                search = s.search ()
-                extra.append (replace_grist (search, '<library-path>'))
+                search = s.search()
+                extra.append(replace_grist(search, '<library-path>'))
+
+        orig_xdll_path = []
                    
-        if prop_set.get ('<hardcode-dll-paths>') == ['true'] and type.is_derived (self.target_types_ [0], 'EXE'):
-            xdll_path = prop_set.get ('<xdll-path>')
+        if prop_set.get('<hardcode-dll-paths>') == ['true'] and type.is_derived(self.target_types_ [0], 'EXE'):
+            xdll_path = prop_set.get('<xdll-path>')
+            orig_xdll_path = [ replace_grist(x, '<dll-path>') for x in xdll_path ]
             # It's possible that we have libraries in sources which did not came
             # from 'lib' target. For example, libraries which are specified
             # just as filenames as sources. We don't have xdll-path properties
@@ -460,18 +595,24 @@
                 if type.is_derived (s.type (), 'SHARED_LIB') and not s.action ():
                     # Unfortunately, we don't have a good way to find the path
                     # to a file, so use this nasty approach.
-                    p = s.project ()
-                    location = path.root (s.name (), p.get ('source-location'))
-                    xdll_path.append (path.parent (location))
+                    p = s.project()
+                    location = path.root(s.name(), p.get('source-location'))
+                    xdll_path.append(path.parent(location))
                           
-            extra += [ replace_grist (x, '<dll-path>') for x in xdll_path ]
+            extra += [ replace_grist(x, '<dll-path>') for x in xdll_path ]
         
         if extra:
             prop_set = prop_set.add_raw (extra)
                         
-        result = generators.Generator.run (self, project, name, prop_set, sources)
+        result = generators.Generator.run(self, project, name, prop_set, sources)
+
+        if result:
+            ur = self.extra_usage_requirements(result, prop_set)
+            ur = ur.add(property_set.create(orig_xdll_path))
+        else:
+            return None
         
-        return (self.extra_usage_requirements (result, prop_set), result)
+        return(ur, result)
     
     def extra_usage_requirements (self, created_targets, prop_set):
         
@@ -485,13 +626,13 @@
             
             # TODO: is it safe to use the current directory? I think we should use 
             # another mechanism to allow this to be run from anywhere.
-            pwd = os.getcwd ()
+            pwd = os.getcwd()
             
             for t in created_targets:
-                if type.is_derived (t.type (), 'SHARED_LIB'):
-                    paths.append (path.root (path.make (t.path ()), pwd))
+                if type.is_derived(t.type(), 'SHARED_LIB'):
+                    paths.append(path.root(path.make(t.path()), pwd))
 
-            extra += replace_grist (paths, '<xdll-path>')
+            extra += replace_grist(paths, '<xdll-path>')
         
         # We need to pass <xdll-path> features that we've got from sources,
         # because if shared library is built, exe which uses it must know paths
@@ -501,11 +642,11 @@
         # Just pass all features in property_set, it's theorically possible
         # that we'll propagate <xdll-path> features explicitly specified by
         # the user, but then the user's to blaim for using internal feature.                
-        values = prop_set.get ('<xdll-path>')
-        extra += replace_grist (values, '<xdll-path>')
+        values = prop_set.get('<xdll-path>')
+        extra += replace_grist(values, '<xdll-path>')
         
         if extra:
-            result = property_set.create (extra)
+            result = property_set.create(extra)
 
         return result
 
@@ -521,31 +662,33 @@
         # Searched libraries are not passed as argument to linker
         # but via some option. So, we pass them to the action
         # via property. 
-        properties2 = prop_set.raw ()
+        properties2 = prop_set.raw()
         fsa = []
         fst = []
         for s in sources:
-            if type.is_derived (s.type (), 'SEARCHED_LIB'):
-                name = s.real_name ()
-                if s.shared ():
-                    fsa.append (name)
+            if type.is_derived(s.type(), 'SEARCHED_LIB'):
+                name = s.real_name()
+                if s.shared():
+                    fsa.append(name)
 
                 else:
-                    fst.append (name)
+                    fst.append(name)
 
             else:
-                sources2.append (s)
+                sources2.append(s)
 
-        properties2 += '&&'.join (replace_grist (fsa, '<find-shared-library>'))
-        properties2 += '&&'.join (replace_grist (fst, '<find-static-library>'))
+        if fsa:
+            properties2 += [replace_grist('&&'.join(fsa), '<find-shared-library>')]
+        if fst:
+            properties2 += [replace_grist('&&'.join(fst), '<find-static-library>')]
                 
-        spawn = generators.Generator.generated_targets (self, sources2, property_set.create (properties2), project, name)
+        spawn = generators.Generator.generated_targets(self, sources2, property_set.create(properties2), project, name)
         
         return spawn
 
 
-def register_linker (id, source_types, target_types, requirements):
-    g = LinkingGenerator(id, 1, source_types, target_types, requirements)
+def register_linker(id, source_types, target_types, requirements):
+    g = LinkingGenerator(id, True, source_types, target_types, requirements)
     generators.register(g)
 
 class ArchiveGenerator (generators.Generator):
Modified: branches/build/python_port/python/boost/build/tools/common.py
==============================================================================
--- branches/build/python_port/python/boost/build/tools/common.py	(original)
+++ branches/build/python_port/python/boost/build/tools/common.py	2008-07-07 00:30:47 EDT (Mon, 07 Jul 2008)
@@ -3,12 +3,13 @@
 #  all copies. This software is provided "as is" without express or implied
 #  warranty, and with no claim as to its suitability for any purpose.
 
-""" Provides actions common to all toolsets, for as making directoies and
+""" Provides actions common to all toolsets, for as making directories and
     removing files.
 """
 
 import re
 import bjam
+import os
 
 from boost.build.build import feature
 from boost.build.util.utility import *
@@ -20,7 +21,7 @@
     """ Clear the module state. This is mainly for testing purposes.
         Note that this must be called _after_ resetting the module 'feature'.
     """    
-    global __had_unspecified_value, __had_value, __declared_subfeature, __all_signatures
+    global __had_unspecified_value, __had_value, __declared_subfeature, __all_signatures, __debug_configuration
     
     # Stores toolsets without specified initialization values.
     __had_unspecified_value = {}
@@ -33,6 +34,8 @@
     
     # Stores all signatures of the toolsets.
     __all_signatures = {}
+
+    __debug_configuration = '--debug-configuration' in bjam.variable('ARGV')
     
 reset ()
 
@@ -115,6 +118,34 @@
     # FIXME: we actually don't add any subfeatures to the condition
     return [condition]
 
+# Ported from trunk_at_47077
+def get_invocation_command_nodefault(
+    toolset, tool, user_provided_command, additional_paths=[], path_last=None):
+    """
+        A helper rule to get the command to invoke some tool. If
+        'user-provided-command' is not given, tries to find binary named 'tool' in
+        PATH and in the passed 'additional-path'. Otherwise, verifies that the first
+        element of 'user-provided-command' is an existing program.
+        
+        This rule returns the command to be used when invoking the tool. If we can't
+        find the tool, a warning is issued. If 'path-last' is specified, PATH is
+        checked after 'additional-paths' when searching for 'tool'.
+    """
+    if not user_provided_command:
+        command = find_tool(tool, additional_paths, path_last) 
+        if not command and __debug_configuration:
+            print "warning: toolset", toolset, "initialization: can't find tool, tool"
+            #FIXME
+            #print "warning: initialized from" [ errors.nearest-user-location ] ;
+    else:
+        command = check_tool(user_provided_command)
+        if not command and __debug_configuration:
+            print "warning: toolset", toolset, "initialization:"
+            print "warning: can't find user-provided command", user_provided_command
+            #FIXME
+            #ECHO "warning: initialized from" [ errors.nearest-user-location ]
+            
+    return command
 
 def get_invocation_command (toolset, tool, user_provided_command = None, additional_paths = None, path_last = None):
     """ A helper rule to get the command to invoke some tool. The rule is either passed
@@ -157,28 +188,20 @@
 
     return command
 
-
-###################################################################
-# Still to port.
-# Original lines are prefixed with "### "
-#
-### # Given an invocation command,
-### # return the absolute path to the command. This works even if commnad
-### # has not path element and is present in PATH.
-### rule get-absolute-tool-path ( command )
-### {
-###     if $(command:D)
-###     {
-###         return $(command:D) ;
-###     }
-###     else
-###     {
-###         local m = [ GLOB [ modules.peek : PATH Path path ] : $(command) $(command).exe ] ;
-###         return $(m[1]:D) ;
-###     }    
-### }
-
-
+def get_absolute_tool_path(command):
+    """
+        Given an invocation command,
+        return the absolute path to the command. This works even if commnad
+        has not path element and is present in PATH.
+    """
+    if os.path.dirname(command):
+        return os.path.dirname(command)
+    else:
+        programs = path.programs_path()
+        m = path.glob(programs, [command, command + '.exe' ])
+        if not len(m):
+            print "Could not find:", command, "in", programs
+        return os.path.dirname(m[0])
 
 def find_tool (name, additional_paths = None, path_last = False):
     """ Attempts to find tool (binary) named 'name' in PATH and in 'additiona-paths'.
@@ -242,10 +265,10 @@
 
     assert (command)
     toolset.flags (tool, 'CONFIG_COMMAND', condition, [command])
-    toolset.flags (tool + '_compile', 'OPTIONS', condition, feature.get_values ('<compileflags>', options))
-    toolset.flags (tool + '_compile_c', 'OPTIONS', condition, feature.get_values ('<cflags>', options))
-    toolset.flags (tool + '_compile_c++', 'OPTIONS', condition, feature.get_values ('<cxxflags>', options))
-    toolset.flags (tool + '_link', 'OPTIONS', condition, feature.get_values ('<linkflags>', options))
+    toolset.flags (tool + '.compile', 'OPTIONS', condition, feature.get_values ('<compileflags>', options))
+    toolset.flags (tool + '.compile.c', 'OPTIONS', condition, feature.get_values ('<cflags>', options))
+    toolset.flags (tool + '.compile.c++', 'OPTIONS', condition, feature.get_values ('<cxxflags>', options))
+    toolset.flags (tool + '.link', 'OPTIONS', condition, feature.get_values ('<linkflags>', options))
 
 
 ### # returns the location of the "program files" directory on a windows
Modified: branches/build/python_port/python/boost/build/tools/gcc.py
==============================================================================
--- branches/build/python_port/python/boost/build/tools/gcc.py	(original)
+++ branches/build/python_port/python/boost/build/tools/gcc.py	2008-07-07 00:30:47 EDT (Mon, 07 Jul 2008)
@@ -1,286 +1,813 @@
-#  Copyright (c) 2001 David Abrahams.
-#  Copyright (c) 2002-2003 Rene Rivera.
-#  Copyright (c) 2002-2003 Vladimir Prus.
+# Status: being ported by Steven Watanabe
+# Base revision: 47077
+# TODO: common.jam needs to be ported
+# TODO: generators.jam needs to have register_c_compiler.
 #
-#  Use, modification and distribution is subject to the Boost Software
-#  License Version 1.0. (See accompanying file LICENSE_1_0.txt or
-#  http://www.boost.org/LICENSE_1_0.txt)
-
-import os.path
-from boost.build.tools import common, builtin, unix
-from boost.build.build import feature, toolset, type, generators
-from boost.build.util.utility import *
+# Copyright 2001 David Abrahams.
+# Copyright 2002-2006 Rene Rivera.
+# Copyright 2002-2003 Vladimir Prus.
+#  Copyright (c) 2005 Reece H. Dunn.
+# Copyright 2006 Ilya Sokolov.
+# Copyright 2007 Roland Schwarz
+# Copyright 2007 Boris Gubenko.
+# Copyright 2008 Steven Watanabe
+#
+# Distributed under the Boost Software License, Version 1.0.
+#    (See accompanying file LICENSE_1_0.txt or copy at
+#          http://www.boost.org/LICENSE_1_0.txt)
+
+###import toolset : flags ;
+###import property ;
+###import generators ;
+###import os ;
+###import type ;
+###import feature ;
+###import "class" : new ;
+###import set ;
+###import common ;
+###import errors ;
+###import property-set ;
+###import pch ;
+###import regex ;
+
+import os
+import subprocess
+import re
+
+import bjam
+
+from boost.build.tools import unix, common, rc, pch, builtin
+from boost.build.build import feature, type, toolset, generators
+from boost.build.util.utility import os_name, on_windows
 from boost.build.manager import get_manager
+from boost.build.build.generators import Generator
+from boost.build.build.toolset import flags
+from boost.build.util.utility import to_seq
 
+__debug = None
 
-feature.extend ('toolset', ['gcc'])
+def debug():
+    global __debug
+    if __debug is None:
+        __debug = "--debug-generators" in bjam.variable("ARGV")        
+    return __debug
 
-toolset.inherit_generators ('gcc', [], 'unix', ['unix_link', 'unix_link_dll'])
-toolset.inherit_flags ('gcc', 'unix')
-toolset.inherit_rules ('gcc', 'unix')
+feature.extend('toolset', ['gcc'])
 
-# Make the "o" suffix used for gcc toolset on all
-# platforms
-type.set_generated_target_suffix ('OBJ', ['<toolset>gcc'], 'o')
-type.set_generated_target_suffix ('STATIC_LIB', ['<toolset>gcc'], 'a')
 
+toolset.inherit_generators('gcc', [], 'unix', ['unix.link', 'unix.link.dll'])
+toolset.inherit_flags('gcc', 'unix')
+toolset.inherit_rules('gcc', 'unix')
 
-def init (version = None, command = None, options = None):
-    """ Initializes the gcc toolset for the given version.
-        If necessary, command may be used to specify where the compiler
-        is located.
-        The parameter 'options' is a space-delimited list of options, each
-        one being specified as <option-name>option-value. Valid option names
-        are: cxxflags, linkflags and linker-type. Accepted values for linker-type
-        are gnu and sun, gnu being the default.
-        Example:
-          using gcc : 3.4 : : <cxxflags>foo <linkflags>bar <linker-type>sun ;
-    """
-    options = to_seq (options)
+generators.override('gcc.prebuilt', 'builtin.prebuilt')
+generators.override('gcc.searched-lib-generator', 'searched-lib-generator')
 
-    condition = common.check_init_parameters ('gcc', ('version', version))
-    
-    command = common.get_invocation_command ('gcc', 'g++', command)
-
-    common.handle_options ('gcc', condition, command, options)
-    
-    linker = feature.get_values ('<linker-type>', options)
-    if not linker:
-        linker = 'gnu'
+# Target naming is determined by types/lib.jam and the settings below this
+# comment.
+#
+# On *nix:
+#     libxxx.a     static library
+#     libxxx.so    shared library
+#
+# On windows (mingw):
+#     libxxx.lib   static library
+#     xxx.dll      DLL
+#     xxx.lib      import library
+#
+# On windows (cygwin) i.e. <target-os>cygwin
+#     libxxx.a     static library
+#     xxx.dll      DLL
+#     libxxx.dll.a import library
+#
+# Note: user can always override by using the <tag>@rule
+#       This settings have been choosen, so that mingw
+#       is in line with msvc naming conventions. For
+#       cygwin the cygwin naming convention has been choosen.
 
-    init_link_flags ('gcc', linker, condition)
+# Make the "o" suffix used for gcc toolset on all
+# platforms
+type.set_generated_target_suffix('OBJ', ['<toolset>gcc'], 'o')
+type.set_generated_target_suffix('STATIC_LIB', ['<toolset>gcc', '<target-os>cygwin'], 'a')
 
+type.set_generated_target_suffix('IMPORT_LIB', ['<toolset>gcc', '<target-os>cygwin'], 'dll.a')
+type.set_generated_target_prefix('IMPORT_LIB', ['<toolset>gcc', '<target-os>cygwin'], 'lib')
 
-def gcc_compile_cpp(targets, sources, properties):
-    # Some extensions are compiled as C++ by default. For others, we need
-    # to pass -x c++.
-    # We could always pass -x c++ but distcc does not work with it.
-    extension = os.path.splitext (sources [0]) [1]
-    lang = ''
-    if not extension in ['.cc', '.cp', '.cxx', '.cpp', '.c++', '.C']:
-        lang = '-x c++'
-    get_manager().engine ().set_target_variable (targets, 'LANG', lang)
+__machine_match = re.compile('^([^ ]+)')
+__version_match = re.compile('^([0-9.]+)')
 
-engine = get_manager().engine()
-engine.register_action ('gcc.compile.c++',
-                        '"$(CONFIG_COMMAND)" $(LANG) -Wall -ftemplate-depth-100 $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"',
-                        function=gcc_compile_cpp)
+def init(version = None, command = None, options = None):
+    """
+        Initializes the gcc toolset for the given version. If necessary, command may
+        be used to specify where the compiler is located. The parameter 'options' is a
+        space-delimited list of options, each one specified as
+        <option-name>option-value. Valid option names are: cxxflags, linkflags and
+        linker-type. Accepted linker-type values are gnu, darwin, osf, hpux or sun
+        and the default value will be selected based on the current OS.
+        Example:
+          using gcc : 3.4 : : <cxxflags>foo <linkflags>bar <linker-type>sun ;
+    """
 
-builtin.register_c_compiler ('gcc.compile.c++', ['CPP'], ['OBJ'], ['<toolset>gcc'])
+    options = to_seq(options)
 
+    # Information about the gcc command...
+    #   The command.
+    command = to_seq(common.get_invocation_command('gcc', 'g++', command))
+    #   The root directory of the tool install.
+    root = feature.get_values('<root>', options) ;
+    #   The bin directory where to find the command to execute.
+    bin = None
+    #   The flavor of compiler.
+    flavor = feature.get_values('<flavor>', options)
+    #   Autodetect the root and bin dir if not given.
+    if command:
+        if not bin:
+            bin = common.get_absolute_tool_path(command[-1])
+        if not root:
+            root = os.path.dirname(bin)
+    #   Autodetect the version and flavor if not given.
+    if command:
+        machine_info = subprocess.Popen(command + ['-dumpmachine'], stdout=subprocess.PIPE).communicate()[0]
+        machine = __machine_match.search(machine_info).group(1)
+
+        version_info = subprocess.Popen(command + ['-dumpversion'], stdout=subprocess.PIPE).communicate()[0]
+        version = __version_match.search(version_info).group(1)
+        if not flavor and machine.find('mingw') != -1:
+            flavor = 'mingw'
+
+    condition = None
+    if flavor:
+        condition = common.check_init_parameters('gcc',
+            ('version', version),
+            ('flavor', flavor))
+    else:
+        condition = common.check_init_parameters('gcc',
+            ('version', version))
 
-def gcc_compile_c (manager, targets, sources, properties):
-    # If we use the name g++ then default file suffix -> language mapping
-    # does not work. So have to pass -x option. Maybe, we can work around this
-    # by allowing the user to specify both C and C++ compiler names.
-    manager.engine ().set_target_variable (targets, 'LANG', '-x c')
+    if command:
+        command = command[0]
 
-engine.register_action ('gcc.compile.c',
-                        '"$(CONFIG_COMMAND)" $(LANG) -Wall $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"',
-                        function=gcc_compile_c)
+    common.handle_options('gcc', condition, command, options)
 
-builtin.register_c_compiler ('gcc.compile.c', ['C'], ['OBJ'], ['<toolset>gcc'])
+    linker = feature.get_values('<linker-type>', options)
+    if not linker:
+        if os_name() == 'OSF':
+            linker = 'osf'
+        elif os_name() == 'HPUX':
+            linker = 'hpux' ;
+        else:
+            linker = 'gnu'
+    ## NOTE: this comes from later in this module
+    init_link_flags('gcc', linker, condition)
+
+    # If gcc is installed in non-standard location, we'd need to add
+    # LD_LIBRARY_PATH when running programs created with it (for unit-test/run
+    # rules).
+    if command:
+        # On multilib 64-bit boxes, there are both 32-bit and 64-bit libraries
+        # and all must be added to LD_LIBRARY_PATH. The linker will pick the
+        # right onces. Note that we don't provide a clean way to build 32-bit
+        # binary with 64-bit compiler, but user can always pass -m32 manually.
+        lib_path = [os.path.join(root, 'bin'),
+                    os.path.join(root, 'lib'),
+                    os.path.join(root, 'lib32'),
+                    os.path.join(root, 'lib64')]
+        if debug():
+            print 'notice: using gcc libraries ::', condition, '::', lib_path
+        toolset.flags('gcc.link', 'RUN_PATH', condition, lib_path)
+
+    # If it's not a system gcc install we should adjust the various programs as
+    # needed to prefer using the install specific versions. This is essential
+    # for correct use of MinGW and for cross-compiling.
+
+    # - The archive builder.
+    ##TODO: what is search-path?
+    archiver = common.get_invocation_command('gcc',
+            'ar', feature.get_values('<archiver>', options), bin, ['search-path'])
+    toolset.flags('gcc.archive', '.AR', condition, archiver[0])
+    if debug():
+        print 'notice: using gcc archiver ::', condition, '::', archiver[0]
+
+    # - The resource compiler.
+    rc_command = common.get_invocation_command_nodefault('gcc',
+            'windres', feature.get_values('<rc>', options), bin, ['search-path'])
+    rc_type = feature.get_values('<rc-type>', options)
+
+    if not rc_type:
+        rc_type = 'windres'
+
+    if not rc_command:
+        # If we can't find an RC compiler we fallback to a null RC compiler that
+        # creates empty object files. This allows the same Jamfiles to work
+        # across the board. The null RC uses the assembler to create the empty
+        # objects, so configure that.
+        rc_command = common.get_invocation_command('gcc', 'as', [], bin, ['search-path'])
+        rc_type = 'null'
+    rc.configure(rc_command, condition, '<rc-type>' + rc_type)
+
+###if [ os.name ] = NT
+###{
+###    # This causes single-line command invocation to not go through .bat files,
+###    # thus avoiding command-line length limitations.
+###    JAMSHELL = % ;
+###}
+
+#FIXME: when register_c_compiler is moved to
+# generators, these should be updated
+builtin.register_c_compiler('gcc.compile.c++', ['CPP'], ['OBJ'], ['<toolset>gcc'])
+builtin.register_c_compiler('gcc.compile.c', ['C'], ['OBJ'], ['<toolset>gcc'])
+builtin.register_c_compiler('gcc.compile.asm', ['ASM'], ['OBJ'], ['<toolset>gcc'])
+
+# pch support
+
+# The compiler looks for a precompiled header in each directory just before it
+# looks for the include file in that directory. The name searched for is the
+# name specified in the #include directive with ".gch" suffix appended. The
+# logic in gcc-pch-generator will make sure that BASE_PCH suffix is appended to
+# full name of the header.
+
+type.set_generated_target_suffix('PCH', ['<toolset>gcc'], 'gch')
+
+# GCC-specific pch generator.
+class GccPchGenerator(pch.PchGenerator):
+
+    # Inherit the __init__ method
+
+    def run_pch(self, project, name, prop_set, sources):
+        # Find the header in sources. Ignore any CPP sources.
+        header = None
+        for s in sources:
+            if type.is_derived(s.type, 'H'):
+                header = s
+
+        # Error handling: Base header file name should be the same as the base
+        # precompiled header name.
+        header_name = header.name
+        header_basename = os.path.basename(header_name).rsplit('.', 1)[0]
+        if header_basename != name:
+            location = project.project_module
+            ###FIXME:
+            raise Exception()
+            ### errors.user-error "in" $(location)": pch target name `"$(name)"' should be the same as the base name of header file `"$(header-name)"'" ;
+
+        pch_file = Generator.run(self, project, name, prop_set, [header])
+
+        # return result of base class and pch-file property as usage-requirements
+        # FIXME: what about multiple results from generator.run?
+        return (property_set.create('<pch-file>' + pch_file[0], '<cflags>-Winvalid-pch'),
+                pch_file)
+
+    # Calls the base version specifying source's name as the name of the created
+    # target. As result, the PCH will be named whatever.hpp.gch, and not
+    # whatever.gch.
+    def generated_targets(self, sources, prop_set, project, name = None):
+        name = sources[0].name
+        return Generator.generated_targets(self, sources,
+            prop_set, project, name)
+
+# Note: the 'H' source type will catch both '.h' header and '.hpp' header. The
+# latter have HPP type, but HPP type is derived from H. The type of compilation
+# is determined entirely by the destination type.
+generators.register(GccPchGenerator('gcc.compile.c.pch', False, ['H'], ['C_PCH'], ['<pch>on', '<toolset>gcc' ]))
+generators.register(GccPchGenerator('gcc.compile.c++.pch', False, ['H'], ['CPP_PCH'], ['<pch>on', '<toolset>gcc' ]))
+
+# Override default do-nothing generators.
+generators.override('gcc.compile.c.pch', 'pch.default-c-pch-generator')
+generators.override('gcc.compile.c++.pch', 'pch.default-cpp-pch-generator')
 
+flags('gcc.compile', 'PCH_FILE', ['<pch>on'], ['<pch-file>'])
 
 # Declare flags and action for compilation
-toolset.flags ('gcc.compile', 'OPTIONS', ['<optimization>off'], ['-O0'])
-toolset.flags ('gcc.compile', 'OPTIONS', ['<optimization>speed'], ['-O3'])
-toolset.flags ('gcc.compile', 'OPTIONS', ['<optimization>space'], ['-Os'])
-
-toolset.flags ('gcc.compile', 'OPTIONS', ['<inlining>off'], ['-fno-inline'])
-toolset.flags ('gcc.compile', 'OPTIONS', ['<inlining>on'], ['-Wno-inline'])
-toolset.flags ('gcc.compile', 'OPTIONS', ['<inlining>full'], ['-finline-functions', '-Wno-inline'])
-
-toolset.flags ('gcc.compile', 'OPTIONS', ['<debug-symbols>on'], ['-g'])
-toolset.flags ('gcc.compile', 'OPTIONS', ['<profiling>on'], ['-pg'])
-
-# On cygwin and mingw, gcc generates position independent code by default,
-# and warns if -fPIC is specified. This might not be the right way
-# of checking if we're using cygwin. For example, it's possible 
-# to run cygwin gcc from NT shell, or using crosscompiling.
-# But we'll solve that problem when it's time. In that case
-# we'll just add another parameter to 'init' and move this login
+flags('gcc.compile', 'OPTIONS', ['<optimization>off'], ['-O0'])
+flags('gcc.compile', 'OPTIONS', ['<optimization>speed'], ['-O3'])
+flags('gcc.compile', 'OPTIONS', ['<optimization>space'], ['-Os'])
+
+flags('gcc.compile', 'OPTIONS', ['<inlining>off'], ['-fno-inline'])
+flags('gcc.compile', 'OPTIONS', ['<inlining>on'], ['-Wno-inline'])
+flags('gcc.compile', 'OPTIONS', ['<inlining>full'], ['-finline-functions', '-Wno-inline'])
+
+flags('gcc.compile', 'OPTIONS', ['<warnings>off'], ['-w'])
+flags('gcc.compile', 'OPTIONS', ['<warnings>on'], ['-Wall'])
+flags('gcc.compile', 'OPTIONS', ['<warnings>all'], ['-Wall', '-pedantic'])
+flags('gcc.compile', 'OPTIONS', ['<warnings-as-errors>on'], ['-Werror'])
+
+flags('gcc.compile', 'OPTIONS', ['<debug-symbols>on'], ['-g'])
+flags('gcc.compile', 'OPTIONS', ['<profiling>on'], ['-pg'])
+flags('gcc.compile', 'OPTIONS', ['<rtti>off'], ['-fno-rtti'])
+
+# On cygwin and mingw, gcc generates position independent code by default, and
+# warns if -fPIC is specified. This might not be the right way of checking if
+# we're using cygwin. For example, it's possible to run cygwin gcc from NT
+# shell, or using crosscompiling. But we'll solve that problem when it's time.
+# In that case we'll just add another parameter to 'init' and move this login
 # inside 'init'.
 if not os_name () in ['CYGWIN', 'NT']:
-    toolset.flags ('gcc.compile', 'OPTIONS', ['<link>shared', '<main-target-type>LIB'], ['-fPIC'])
+    print "osname:", os_name()
+    # This logic will add -fPIC for all compilations:
+    #
+    # lib a : a.cpp b ;
+    # obj b : b.cpp ;
+    # exe c : c.cpp a d ;
+    # obj d : d.cpp ;
+    #
+    # This all is fine, except that 'd' will be compiled with -fPIC even though
+    # it's not needed, as 'd' is used only in exe. However, it's hard to detect
+    # where a target is going to be used. Alternative, we can set -fPIC only
+    # when main target type is LIB but than 'b' will be compiled without -fPIC.
+    # In x86-64 that will lead to link errors. So, compile everything with
+    # -fPIC.
+    #
+    # Yet another alternative would be to create propagated <sharedable>
+    # feature, and set it when building shared libraries, but that's hard to
+    # implement and will increase target path length even more.
+    flags('gcc.compile', 'OPTIONS', ['<link>shared'], ['-fPIC'])
+
+if os_name() != 'NT' and os_name() != 'OSF' and os_name() != 'HPUX':
+    # OSF does have an option called -soname but it doesn't seem to work as
+    # expected, therefore it has been disabled.
+    HAVE_SONAME   = ''
+    SONAME_OPTION = '-h'
+
+
+flags('gcc.compile', 'USER_OPTIONS', [], ['<cflags>'])
+flags('gcc.compile.c++', 'USER_OPTIONS',[], ['<cxxflags>'])
+flags('gcc.compile', 'DEFINES', [], ['<define>'])
+flags('gcc.compile', 'INCLUDES', [], ['<include>'])
 
-if os_name () != 'NT':
-    HAVE_SONAME = True
-else:
-    HAVE_SONAME = False
-
-toolset.flags ('gcc.compile', 'OPTIONS', [], ['<cflags>'])
-toolset.flags ('gcc.compile.c++', 'OPTIONS', [], ['<cxxflags>'])
-toolset.flags ('gcc.compile', 'DEFINES', [], ['<define>'])
-toolset.flags ('gcc.compile', 'INCLUDES', [], ['<include>'])
-
-class GccLinkingGenerator (unix.UnixLinkingGenerator):
-    
-    """ The class which check that we don't try to use
-        the <link-runtime>static property while creating or using shared library,
-        since it's not supported by gcc/libc.
-    """
-    def generated_targets (self, sources, prop_set, project, name):
-        if '<link-runtime>static' in prop_set.raw ():
-            m = ''
-            if self.id () == "gcc.link.dll":
-                m = "on gcc, DLLs can't be build with <link-runtime>static."
-            else:
-                for s in sources:
-                    source_type = s.type ()
-                    if source_type and  type.is_derived (source_type, 'SHARED_LIB'):
-                        m = "on gcc, using DLLs together with the <link-runtime>static options is not possible."
-
-            if m:
-                raise UserError (m + " It's suggested to use <link-runtime>static together with the <link>static")
-
-        return unix.UnixLinkingGenerator.generated_targets (self, sources, prop_set, project, name)
-
-def gcc_link (targets, sources, properties):
-    get_manager().engine ().set_target_variable (targets, 'SPACE', " ")
-
-engine.register_action('gcc.link', '"$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -Wl,-rpath-link$(SPACE)-Wl,"$(RPATH_LINK)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-ST) -l$(FINDLIBS-SA) $(OPTIONS)',
-                       bound_list=['LIBRARIES'],
-                       function=gcc_link)
-
-generators.register (GccLinkingGenerator ('gcc.link', True, ['LIB', 'OBJ'], ['EXE'], ['<toolset>gcc']))
-
-
-def gcc_link_dll (manager, target, sources, properties):
-    manager.engine ().set_target_variable (target, 'SPACE', " ")
+engine = get_manager().engine()
 
-# TODO: how to set 'bind LIBRARIES'?
-engine.register_action('gcc.link.dll', '"$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -o "$(<)" $(HAVE_SONAME)-Wl,-h$(SPACE)-Wl,$(<[1]:D=) -shared "$(>)"  "$(LIBRARIES)" -l$(FINDLIBS-ST) -l$(FINDLIBS-SA) $(OPTIONS)',
-                       bound_list=['LIBRARIES'],
-                       function=gcc_link_dll)
+engine.register_action('gcc.compile.c++.pch', 
+    '"$(CONFIG_COMMAND)" -x c++-header $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"')
 
-generators.register (GccLinkingGenerator ('gcc.link.dll', True, ['LIB', 'OBJ'], ['SHARED_LIB'], ['<toolset>gcc']))
+engine.register_action('gcc.compile.c.pch',
+    '"$(CONFIG_COMMAND)" -x c-header $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"')
 
-### generators.override gcc.prebuilt : builtin.prebuilt ;
-### generators.override gcc.searched-lib-generator : searched-lib-generator ;
 
+def gcc_compile_cpp(targets, sources, properties):
+    # Some extensions are compiled as C++ by default. For others, we need to
+    # pass -x c++. We could always pass -x c++ but distcc does not work with it.
+    extension = os.path.splitext (sources [0]) [1]
+    lang = ''
+    if not extension in ['.cc', '.cp', '.cxx', '.cpp', '.c++', '.C']:
+        lang = '-x c++'
+    get_manager().engine().set_target_variable (targets, 'LANG', lang)
+    # FIXME: don't know how to translate this
+    ##DEPENDS $(<) : [ on $(<) return $(PCH_FILE) ]
+
+def gcc_compile_c(targets, sources, properties):
+    # If we use the name g++ then default file suffix -> language mapping does
+    # not work. So have to pass -x option. Maybe, we can work around this by
+    # allowing the user to specify both C and C++ compiler names.
+    #if $(>:S) != .c
+    #{
+        get_manager().engine().set_target_variable (targets, 'LANG', '-x c')
+    #}
+    #FIXME: don't know how to translate this
+    ##DEPENDS $(<) : [ on $(<) return $(PCH_FILE) ] ;
+
+engine.register_action(
+    'gcc.compile.c++',
+    '"$(CONFIG_COMMAND)" $(LANG) -ftemplate-depth-128 $(OPTIONS) ' +
+        '$(USER_OPTIONS) -D$(DEFINES) -I"$(PCH_FILE:D)" -I"$(INCLUDES)" ' +
+        '-c -o "$(<:W)" "$(>:W)"',
+    function=gcc_compile_cpp,
+    bound_list=['PCH_FILE'])
+
+engine.register_action(
+    'gcc.compile.c',
+    '"$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) ' +
+        '-I"$(PCH_FILE:D)" -I"$(INCLUDES)" -c -o "$(<)" "$(>)"',
+    function=gcc_compile_c,
+    bound_list=['PCH_FILE'])
+
+def gcc_compile_asm(targets, sources, properties):
+    get_manager().engine().set_target_variable(targets, 'LANG', '-x assembler-with-cpp')
+
+engine.register_action(
+    'gcc.compile.asm',
+    '"$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"',
+    function=gcc_compile_asm)
 
-# Declare flags for linking
-# First, the common flags
-toolset.flags ('gcc.link', 'OPTIONS', ['<debug-symbols>on'], ['-g'])
-toolset.flags ('gcc.link', 'OPTIONS', ['<profiling>on'], ['-pg'])
-toolset.flags ('gcc.link', 'OPTIONS', [], ['<linkflags>'])
-toolset.flags ('gcc.link', 'LINKPATH', [], ['<library-path>'])
-toolset.flags ('gcc.link', 'FINDLIBS-ST', [], ['<find-static-library>'])
-toolset.flags ('gcc.link', 'FINDLIBS-SA', [], ['<find-shared-library>'])
-toolset.flags ('gcc.link', 'LIBRARIES', [], ['<library-file>'])
 
-### # For <link-runtime>static we made sure there are no dynamic libraries 
-### # in the link
-toolset.flags ('gcc.link', 'OPTIONS', ['<link-runtime>static'], ['-static'])
+class GccLinkingGenerator(unix.UnixLinkingGenerator):
+    """
+        The class which check that we don't try to use the <runtime-link>static
+        property while creating or using shared library, since it's not supported by
+        gcc/libc.
+    """
+    def run(self, project, name, prop_set, sources):
+        # TODO: Replace this with the use of a target-os property.
 
-def init_link_flags (tool, linker, condition):
-    """ Sets the vendor specific flags.
+        no_static_link = False
+        if bjam.variable('UNIX'):
+            no_static_link = True;
+        ##FIXME: what does this mean?
+##        {
+##            switch [ modules.peek : JAMUNAME ]
+##            {
+##                case * : no-static-link = true ;
+##            }
+##        }
+
+        properties = prop_set.raw()
+        reason = None
+        if no_static_link and '<runtime-link>static' in properties:
+            if '<link>shared' in properties:
+                reason = "On gcc, DLL can't be build with '<runtime-link>static'."
+            elif type.is_derived(self.target_types[0], 'EXE'):
+                for s in sources:
+                    source_type = s.type()
+                    if source_type and type.is_derived(source_type, 'SHARED_LIB'):
+                        reason = "On gcc, using DLLS together with the " +\
+                                 "<runtime-link>static options is not possible "
+        if reason:
+            print 'warning:', reason
+            print 'warning:',\
+                "It is suggested to use '<runtime-link>static' together",\
+                "with '<link>static'." ;
+            return
+        else:
+            generated_targets = unix.UnixLinkingGenerator.run(self, project,
+                name, prop_set, sources)
+            return generated_targets
+
+__IMPLIB_COMMAND = None
+if on_windows():
+    __IMPLIB_COMMAND = '-Wl,--out-implib,'
+    generators.register(
+        GccLinkingGenerator('gcc.link', True,
+            ['OBJ', 'SEARCHED_LIB', 'STATIC_LIB', 'IMPORT_LIB'],
+            [ 'EXE' ],
+            [ '<toolset>gcc' ]))
+    generators.register(
+        GccLinkingGenerator('gcc.link.dll', True,
+            ['OBJ', 'SEARCHED_LIB', 'STATIC_LIB', 'IMPORT_LIB'],
+            ['IMPORT_LIB', 'SHARED_LIB'],
+            ['<toolset>gcc']))
+else:
+    generators.register(
+        GccLinkingGenerator('gcc.link', True,
+            ['LIB', 'OBJ'],
+            ['EXE'],
+            ['<toolset>gcc']))
+    generators.register(
+        GccLinkingGenerator('gcc.link.dll', True,
+            ['LIB', 'OBJ'],
+            ['SHARED_LIB'],
+            ['<toolset>gcc']))
+
+# Declare flags for linking.
+# First, the common flags.
+flags('gcc.link', 'OPTIONS', ['<debug-symbols>on'], ['-g'])
+flags('gcc.link', 'OPTIONS', ['<profiling>on'], ['-pg'])
+flags('gcc.link', 'USER_OPTIONS', [], ['<linkflags>'])
+flags('gcc.link', 'LINKPATH', [], ['<library-path>'])
+flags('gcc.link', 'FINDLIBS-ST', [], ['<find-static-library>'])
+flags('gcc.link', 'FINDLIBS-SA', [], ['<find-shared-library>'])
+flags('gcc.link', 'LIBRARIES', [], ['<library-file>'])
+
+# For <runtime-link>static we made sure there are no dynamic libraries in the
+# link. On HP-UX not all system libraries exist as archived libraries (for
+# example, there is no libunwind.a), so, on this platform, the -static option
+# cannot be specified.
+if os_name() != 'HPUX':
+    flags('gcc.link', 'OPTIONS', ['<runtime-link>static'], ['-static'])
+
+# Now, the vendor specific flags.
+# The parameter linker can be either gnu, darwin, osf, hpux or sun.
+def init_link_flags(toolset, linker, condition):
+    """
+        Now, the vendor specific flags.
+        The parameter linker can be either gnu, darwin, osf, hpux or sun.
     """
+    toolset_link = toolset + '.link'
     if linker == 'gnu':
-        # Strip the binary when no debugging is needed.
-        # We use --strip-all flag as opposed to -s since icc
-        # (intel's compiler) is generally option-compatible with
-        # and inherits from gcc toolset, but does not support -s
-        toolset.flags (tool + '_link', 'OPTIONS', condition + ['<debug-symbols>off'], ['-Wl,--strip-all'])
-        toolset.flags (tool + '_link', 'RPATH', condition, ['<dll-path>'])
-        toolset.flags (tool + '_link', 'RPATH_LINK', condition, ['<xdll-path>'])
+        # Strip the binary when no debugging is needed. We use --strip-all flag
+        # as opposed to -s since icc (intel's compiler) is generally
+        # option-compatible with and inherits from the gcc toolset, but does not
+        # support -s.
+
+        # FIXME: what does unchecked translate to?
+        flags(toolset_link, 'OPTIONS', map(lambda x: x + '/<debug-symbols>off', condition), ['-Wl,--strip-all'])  # : unchecked ;
+        flags(toolset_link, 'RPATH',       condition,                      ['<dll-path>'])       # : unchecked ;
+        flags(toolset_link, 'RPATH_LINK',  condition,                      ['<xdll-path>'])      # : unchecked ;
+        flags(toolset_link, 'START-GROUP', condition,                      ['-Wl,--start-group'])# : unchecked ;
+        flags(toolset_link, 'END-GROUP',   condition,                      ['-Wl,--end-group'])  # : unchecked ;
+
+        # gnu ld has the ability to change the search behaviour for libraries
+        # referenced by -l switch. These modifiers are -Bstatic and -Bdynamic
+        # and change search for -l switches that follow them. The following list
+        # shows the tried variants.
+        # The search stops at the first variant that has a match.
+        # *nix: -Bstatic -lxxx
+        #    libxxx.a
+        #
+        # *nix: -Bdynamic -lxxx
+        #    libxxx.so
+        #    libxxx.a
+        #
+        # windows (mingw,cygwin) -Bstatic -lxxx
+        #    libxxx.a
+        #    xxx.lib
+        #
+        # windows (mingw,cygwin) -Bdynamic -lxxx
+        #    libxxx.dll.a
+        #    xxx.dll.a
+        #    libxxx.a
+        #    xxx.lib
+        #    cygxxx.dll (*)
+        #    libxxx.dll
+        #    xxx.dll
+        #    libxxx.a
+        #
+        # (*) This is for cygwin
+        # Please note that -Bstatic and -Bdynamic are not a guarantee that a
+        # static or dynamic lib indeed gets linked in. The switches only change
+        # search patterns!
+
+        # On *nix mixing shared libs with static runtime is not a good idea.
+        flags(toolset_link, 'FINDLIBS-ST-PFX',
+              map(lambda x: x + '/<runtime-link>shared', condition),
+            ['-Wl,-Bstatic']) # : unchecked ;
+        flags(toolset_link, 'FINDLIBS-SA-PFX',
+              map(lambda x: x + '/<runtime-link>shared', condition),
+            ['-Wl,-Bdynamic']) # : unchecked ;
+
+        # On windows allow mixing of static and dynamic libs with static
+        # runtime.
+        flags(toolset_link, 'FINDLIBS-ST-PFX',
+              map(lambda x: x + '/<runtime-link>static/<target-os>windows', condition),
+              ['-Wl,-Bstatic']) # : unchecked ;
+        flags(toolset_link, 'FINDLIBS-SA-PFX',
+              map(lambda x: x + '/<runtime-link>static/<target-os>windows', condition),
+              ['-Wl,-Bdynamic']) # : unchecked ;
+        flags(toolset_link, 'OPTIONS',
+              map(lambda x: x + '/<runtime-link>static/<target-os>windows', condition),
+              ['-Wl,-Bstatic']) # : unchecked ;
 
     elif linker == 'darwin':
-        # we can't pass -s to ld unless we also pass -static
-        # so we removed -s completly from OPTIONS and add it
-        # to ST_OPTIONS            
-        toolset.flags (tool + '_link', 'ST_OPTIONS', condition + ['<debug-symbols>off'], ['-s'])
-        toolset.flags (tool + '_link', 'RPATH', condition, ['<dll-path>'])
-        toolset.flags (tool + '_link', 'RPATH_LINK', condition, ['<xdll-path>'])
+        # On Darwin, the -s option to ld does not work unless we pass -static,
+        # and passing -static unconditionally is a bad idea. So, don't pass -s.
+        # at all, darwin.jam will use separate 'strip' invocation.
+        flags(toolset_link, 'RPATH', condition, ['<dll-path>']) # : unchecked ;
+        flags(toolset_link, 'RPATH_LINK', condition, ['<xdll-path>']) # : unchecked ;
+
+    elif linker == 'osf':
+        # No --strip-all, just -s.
+        flags(toolset_link, 'OPTIONS', map(lambda x: x + '/<debug-symbols>off', condition), ['-Wl,-s'])
+            # : unchecked ;
+        flags(toolset_link, 'RPATH', condition, ['<dll-path>']) # : unchecked ;
+        # This does not supports -R.
+        flags(toolset_link, 'RPATH_OPTION', condition, ['-rpath']) # : unchecked ;
+        # -rpath-link is not supported at all.
 
     elif linker == 'sun':
-        toolset.flags (tool + '_link', 'OPTIONS', condition + ['<debug-symbols>off'], ['-Wl,-s'])
-        toolset.flags (tool + '_link', 'RPATH', condition, ['<dll-path>'])
-        # Solaris linker does not have a separate -rpath-link, but
-        # allows to use -L for the same purpose.
-        toolset.flags (tool + '_link', 'LINKPATH', condition, ['<xdll-path>'])
-
-        # This permits shared libraries with non-PIC code on Solaris
-        # VP, 2004/09/07: Now that we have -fPIC hardcode in link.dll,
-        # the following is not needed. Whether -fPIC should be hardcoded,
-        # is a separate question.
-        # AH, 2004/10/16: it is still necessary because some tests link
-        # against static libraries that were compiled without PIC.
-        toolset.flags (tool + '_link', 'OPTIONS', condition + ['<link>shared'], ['-mimpure-text'])
+        flags(toolset_link, 'OPTIONS', map(lambda x: x + '/<debug-symbols>off', condition), ['-Wl,-s'])
+            # : unchecked ;
+        flags(toolset_link, 'RPATH', condition, ['<dll-path>']) # : unchecked ;
+        # Solaris linker does not have a separate -rpath-link, but allows to use
+        # -L for the same purpose.
+        flags(toolset_link, 'LINKPATH', condition, ['<xdll-path>']) # : unchecked ;
+
+        # This permits shared libraries with non-PIC code on Solaris.
+        # VP, 2004/09/07: Now that we have -fPIC hardcode in link.dll, the
+        # following is not needed. Whether -fPIC should be hardcoded, is a
+        # separate question.
+        # AH, 2004/10/16: it is still necessary because some tests link against
+        # static libraries that were compiled without PIC.
+        flags(toolset_link, 'OPTIONS', map(lambda x: x + '/<link>shared', condition), ['-mimpure-text'])
+            # : unchecked ;
+
+    elif linker == 'hpux':
+        flags(toolset_link, 'OPTIONS', map(lambda x: x + '/<debug-symbols>off', condition),
+            ['-Wl,-s']) # : unchecked ;
+        flags(toolset_link, 'OPTIONS', map(lambda x: x + '/<link>shared', condition),
+            ['-fPIC']) # : unchecked ;
 
     else:
-            raise UserError ("'%s' initialization: invalid linker '%s'\n" \
-                "The value '%s' specified for <linker> is not recognized.\n" \
-                "Possible values are: 'darwin', 'sun', 'gnu'" % (toolset, linker, linker))
-
-
-### if [ os.name ] = NT
-### {
-###     # This causes single-line command invocation to not go through
-###     # .bat files, thus avoiding command-line length limitations
-###     JAMSHELL = % ;  
-### }
-
-### # Declare action for creating static libraries
-### # The 'r' letter means to replace files in the archive
-### # The 'u' letter means only outdated files in the archive
-### #   should be replaced.
-### # The 'c' letter means suppresses warning in case the archive
-### #   does not exists yet. That warning is produced only on
-### #   some platforms, for whatever reasons.
-#def gcc_archive (manager, targets, sources, properties):
-#    pass
-
-engine.register_action('gcc.archive', 'ar ruc "$(<)" "$(>)"')
-
-### # Set up threading support. It's somewhat contrived, so perform it at the end,
-### # to avoid cluttering other code.
-### 
-### if [ os.on-windows ] 
-### {
-###     flags gcc OPTIONS <threading>multi : -mthreads ;
-### }
-### else if [ modules.peek : UNIX ] 
-### {
-###     switch [ modules.peek : JAMUNAME ]
-###     {
-###     case SunOS* :
-###         {
-###         flags gcc OPTIONS <threading>multi : -pthreads ;
-###         flags gcc FINDLIBS-SA <threading>multi : rt ;
-###         }
-###     case BeOS :
-###         {
-###         # BeOS has no threading options, don't set anything here.
-###         }
-###     case *BSD :
-###         {
-###         flags gcc OPTIONS <threading>multi : -pthread ;
-###         # there is no -lrt on BSD
-###         }
-###     case DragonFly :
-###         {
-###         flags gcc OPTIONS <threading>multi : -pthread ;
-###         # there is no -lrt on BSD - DragonFly is a FreeBSD variant,
-###         # which anoyingly doesn't say it's a *BSD.
-###         }
-###     case IRIX :
-###         {
-###         # gcc on IRIX does not support multi-threading, don't set anything here.
-###         }
-###     case HP_UX :
-###         {
-###         # gcc on HP-UX does not support multi-threading, don't set anything here
-###         }
-###     case Darwin :
-###         {
-###         # Darwin has no threading options, don't set anything here.
-###         }
-###     case * :
-###         {
-###         flags gcc OPTIONS <threading>multi : -pthread ;
-###         flags gcc FINDLIBS-SA <threading>multi : rt ;
-###         }
-###     }
-### }
+        # FIXME:
+        errors.user_error(
+        "$(toolset) initialization: invalid linker '$(linker)' " +
+        "The value '$(linker)' specified for <linker> is not recognized. " +
+        "Possible values are 'gnu', 'darwin', 'osf', 'hpux' or 'sun'")
+
+# Declare actions for linking.
+def gcc_link(targets, sources, properties):
+    engine = get_manager().engine()
+    engine.set_target_variable(targets, 'SPACE', ' ')
+    # Serialize execution of the 'link' action, since running N links in
+    # parallel is just slower. For now, serialize only gcc links, it might be a
+    # good idea to serialize all links.
+    engine.set_target_variable(targets, 'JAM_SEMAPHORE', '<s>gcc-link-semaphore')
+
+engine.register_action(
+    'gcc.link',
+    '"$(CONFIG_COMMAND)" -L"$(LINKPATH)" ' +
+        '-Wl,$(RPATH_OPTION:E=-R)$(SPACE)-Wl,"$(RPATH)" ' +
+        '-Wl,-rpath-link$(SPACE)-Wl,"$(RPATH_LINK)" -o "$(<)" ' +
+        '$(START-GROUP) "$(>)" "$(LIBRARIES)" $(FINDLIBS-ST-PFX) ' +
+        '-l$(FINDLIBS-ST) $(FINDLIBS-SA-PFX) -l$(FINDLIBS-SA) $(END-GROUP) ' +
+        '$(OPTIONS) $(USER_OPTIONS)',
+    function=gcc_link,
+    bound_list=['LIBRARIES'])
+
+# Default value. Mostly for the sake of intel-linux that inherits from gcc, but
+# does not have the same logic to set the .AR variable. We can put the same
+# logic in intel-linux, but that's hardly worth the trouble as on Linux, 'ar' is
+# always available.
+__AR = 'ar'
+
+flags('gcc.archive', 'AROPTIONS', [], ['<archiveflags>'])
+
+def gcc_archive(targets, sources, properties):
+    # Always remove archive and start again. Here's rationale from
+    #
+    # Andre Hentz:
+    #
+    # I had a file, say a1.c, that was included into liba.a. I moved a1.c to
+    # a2.c, updated my Jamfiles and rebuilt. My program was crashing with absurd
+    # errors. After some debugging I traced it back to the fact that a1.o was
+    # *still* in liba.a
+    #
+    # Rene Rivera:
+    #
+    # Originally removing the archive was done by splicing an RM onto the
+    # archive action. That makes archives fail to build on NT when they have
+    # many files because it will no longer execute the action directly and blow
+    # the line length limit. Instead we remove the file in a different action,
+    # just before building the archive.
+    #
+    # FIXME:
+    pass
+    ##local clean.a = $(targets[1])(clean) ;
+    ##TEMPORARY $(clean.a) ;
+    ##NOCARE $(clean.a) ;
+    ##LOCATE on $(clean.a) = [ on $(targets[1]) return $(LOCATE) ] ;
+    ##DEPENDS $(clean.a) : $(sources) ;
+    ##DEPENDS $(targets) : $(clean.a) ;
+    ##common.RmTemps $(clean.a) : $(targets) ;
+
+# Declare action for creating static libraries.
+# The letter 'r' means to add files to the archive with replacement. Since we
+# remove archive, we don't care about replacement, but there's no option "add
+# without replacement".
+# The letter 'c' suppresses the warning in case the archive does not exists yet.
+# That warning is produced only on some platforms, for whatever reasons.
+#FIXME: This was originally piecemeal
+engine.register_action('gcc.archive',
+                       '"$(.AR)" $(AROPTIONS) rc "$(<)" "$(>)"',
+                       function=gcc_archive)
+
+def gcc_link_dll(targets, sources, properties):
+    engine = get_manager().engine()
+    engine.set_target_variable(targets, 'SPACE', ' ')
+    engine.set_target_variable(targets, 'JAM_SEMAPHORE', '<s>gcc-link-semaphore')
+
+engine.register_action(
+    'gcc.link.dll',
+    # Differ from 'link' above only by -shared.
+    '"$(CONFIG_COMMAND)" -L"$(LINKPATH)" ' +
+        '-Wl,$(RPATH_OPTION:E=-R)$(SPACE)-Wl,"$(RPATH)" ' +
+        '"$(.IMPLIB-COMMAND)$(<[1])" -o "$(<[-1])" ' +
+        '$(HAVE_SONAME)-Wl,$(SONAME_OPTION)$(SPACE)-Wl,$(<[-1]:D=) ' +
+        '-shared $(START-GROUP) "$(>)" "$(LIBRARIES)" $(FINDLIBS-ST-PFX) ' +
+        '-l$(FINDLIBS-ST) $(FINDLIBS-SA-PFX) -l$(FINDLIBS-SA) $(END-GROUP) ' +
+        '$(OPTIONS) $(USER_OPTIONS)',
+    function = gcc_link_dll,
+    bound_list=['LIBRARIES'])
+
+# Set up threading support. It's somewhat contrived, so perform it at the end,
+# to avoid cluttering other code.
+
+if on_windows():
+    flags('gcc', 'OPTIONS', ['<threading>multi'], ['-mthreads'])
+elif bjam.variable('UNIX'):
+    jamuname = bjam.variable('JAMUNAME')
+    if jamuname.startswith('SunOS'):
+        flags('gcc', 'OPTIONS', ['<threading>multi'], ['-pthreads'])
+        flags('gcc', 'FINDLIBS-SA', [], ['rt'])
+    elif jamuname == 'BeOS':
+        # BeOS has no threading options, don't set anything here.
+        pass
+    elif jamuname.endswith('BSD'):
+        flags('gcc', 'OPTIONS', ['<threading>multi'], ['-pthread'])
+        # there is no -lrt on BSD
+    elif jamuname == 'DragonFly':
+        flags('gcc', 'OPTIONS', ['<threading>multi'], ['-pthread'])
+        # there is no -lrt on BSD - DragonFly is a FreeBSD variant,
+        # which anoyingly doesn't say it's a *BSD.
+    elif jamuname == 'IRIX':
+        # gcc on IRIX does not support multi-threading, don't set anything here.
+        pass
+    elif jamuname == 'Darwin':
+        # Darwin has no threading options, don't set anything here.
+        pass
+    else:
+        flags('gcc', 'OPTIONS', ['<threading>multi'], ['-pthread'])
+        flags('gcc', 'FINDLIBS-SA', [], ['rt'])
+
+def cpu_flags(toolset, variable, architecture, instruction_set, values, default=None):
+    #FIXME: for some reason this fails.  Probably out of date feature code
+##    if default:
+##        flags(toolset, variable,
+##              ['<architecture>' + architecture + '/<instruction-set>'],
+##              values)
+    flags(toolset, variable,
+          #FIXME: same as above
+          [##'<architecture>/<instruction-set>' + instruction_set,
+           '<architecture>' + architecture + '/<instruction-set>' + instruction_set],
+          values)
+
+# Set architecture/instruction-set options.
+#
+# x86 and compatible
+flags('gcc', 'OPTIONS', ['<architecture>x86/<address-model>32'], ['-m32'])
+flags('gcc', 'OPTIONS', ['<architecture>x86/<address-model>64'], ['-m64'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'i386', ['-march=i386'], default=True)
+cpu_flags('gcc', 'OPTIONS', 'x86', 'i486', ['-march=i486'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'i586', ['-march=i586'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'i686', ['-march=i686'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium', ['-march=pentium'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium-mmx', ['-march=pentium-mmx'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'pentiumpro', ['-march=pentiumpro'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium2', ['-march=pentium2'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium3', ['-march=pentium3'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium3m', ['-march=pentium3m'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium-m', ['-march=pentium-m'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium4', ['-march=pentium4'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium4m', ['-march=pentium4m'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'prescott', ['-march=prescott'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'nocona', ['-march=nocona'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'k6', ['-march=k6'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'k6-2', ['-march=k6-2'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'k6-3', ['-march=k6-3'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'athlon', ['-march=athlon'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'athlon-tbird', ['-march=athlon-tbird'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'athlon-4', ['-march=athlon-4'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'athlon-xp', ['-march=athlon-xp'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'athlon-mp', ['-march=athlon-mp'])
+##
+cpu_flags('gcc', 'OPTIONS', 'x86', 'k8', ['-march=k8'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'opteron', ['-march=opteron'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'athlon64', ['-march=athlon64'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'athlon-fx', ['-march=athlon-fx'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'winchip-c6', ['-march=winchip-c6'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'winchip2', ['-march=winchip2'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'c3', ['-march=c3'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'c3-2', ['-march=c3-2'])
+# Sparc
+flags('gcc', 'OPTIONS', ['<architecture>sparc/<address-model>32'], ['-m32'])
+flags('gcc', 'OPTIONS', ['<architecture>sparc/<address-model>64'], ['-m64'])
+cpu_flags('gcc', 'OPTIONS', 'sparc', 'c3', ['-mcpu=c3'], default=True)
+cpu_flags('gcc', 'OPTIONS', 'sparc', 'v7', ['-mcpu=v7'])
+cpu_flags('gcc', 'OPTIONS', 'sparc', 'cypress', ['-mcpu=cypress'])
+cpu_flags('gcc', 'OPTIONS', 'sparc', 'v8', ['-mcpu=v8'])
+cpu_flags('gcc', 'OPTIONS', 'sparc', 'supersparc', ['-mcpu=supersparc'])
+cpu_flags('gcc', 'OPTIONS', 'sparc', 'sparclite', ['-mcpu=sparclite'])
+cpu_flags('gcc', 'OPTIONS', 'sparc', 'hypersparc', ['-mcpu=hypersparc'])
+cpu_flags('gcc', 'OPTIONS', 'sparc', 'sparclite86x', ['-mcpu=sparclite86x'])
+cpu_flags('gcc', 'OPTIONS', 'sparc', 'f930', ['-mcpu=f930'])
+cpu_flags('gcc', 'OPTIONS', 'sparc', 'f934', ['-mcpu=f934'])
+cpu_flags('gcc', 'OPTIONS', 'sparc', 'sparclet', ['-mcpu=sparclet'])
+cpu_flags('gcc', 'OPTIONS', 'sparc', 'tsc701', ['-mcpu=tsc701'])
+cpu_flags('gcc', 'OPTIONS', 'sparc', 'v9', ['-mcpu=v9'])
+cpu_flags('gcc', 'OPTIONS', 'sparc', 'ultrasparc', ['-mcpu=ultrasparc'])
+cpu_flags('gcc', 'OPTIONS', 'sparc', 'ultrasparc3', ['-mcpu=ultrasparc3'])
+# RS/6000 & PowerPC
+flags('gcc', 'OPTIONS', ['<architecture>power/<address-model>32'], ['-m32'])
+flags('gcc', 'OPTIONS', ['<architecture>power/<address-model>64'], ['-m64'])
+cpu_flags('gcc', 'OPTIONS', 'power', '403', ['-mcpu=403'])
+cpu_flags('gcc', 'OPTIONS', 'power', '505', ['-mcpu=505'])
+cpu_flags('gcc', 'OPTIONS', 'power', '601', ['-mcpu=601'])
+cpu_flags('gcc', 'OPTIONS', 'power', '602', ['-mcpu=602'])
+cpu_flags('gcc', 'OPTIONS', 'power', '603', ['-mcpu=603'])
+cpu_flags('gcc', 'OPTIONS', 'power', '603e', ['-mcpu=603e'])
+cpu_flags('gcc', 'OPTIONS', 'power', '604', ['-mcpu=604'])
+cpu_flags('gcc', 'OPTIONS', 'power', '604e', ['-mcpu=604e'])
+cpu_flags('gcc', 'OPTIONS', 'power', '620', ['-mcpu=620'])
+cpu_flags('gcc', 'OPTIONS', 'power', '630', ['-mcpu=630'])
+cpu_flags('gcc', 'OPTIONS', 'power', '740', ['-mcpu=740'])
+cpu_flags('gcc', 'OPTIONS', 'power', '7400', ['-mcpu=7400'])
+cpu_flags('gcc', 'OPTIONS', 'power', '7450', ['-mcpu=7450'])
+cpu_flags('gcc', 'OPTIONS', 'power', '750', ['-mcpu=750'])
+cpu_flags('gcc', 'OPTIONS', 'power', '801', ['-mcpu=801'])
+cpu_flags('gcc', 'OPTIONS', 'power', '821', ['-mcpu=821'])
+cpu_flags('gcc', 'OPTIONS', 'power', '823', ['-mcpu=823'])
+cpu_flags('gcc', 'OPTIONS', 'power', '860', ['-mcpu=860'])
+cpu_flags('gcc', 'OPTIONS', 'power', '970', ['-mcpu=970'])
+cpu_flags('gcc', 'OPTIONS', 'power', '8540', ['-mcpu=8540'])
+cpu_flags('gcc', 'OPTIONS', 'power', 'power', ['-mcpu=power'])
+cpu_flags('gcc', 'OPTIONS', 'power', 'power2', ['-mcpu=power2'])
+cpu_flags('gcc', 'OPTIONS', 'power', 'power3', ['-mcpu=power3'])
+cpu_flags('gcc', 'OPTIONS', 'power', 'power4', ['-mcpu=power4'])
+cpu_flags('gcc', 'OPTIONS', 'power', 'power5', ['-mcpu=power5'])
+cpu_flags('gcc', 'OPTIONS', 'power', 'powerpc', ['-mcpu=powerpc'])
+cpu_flags('gcc', 'OPTIONS', 'power', 'powerpc64', ['-mcpu=powerpc64'])
+cpu_flags('gcc', 'OPTIONS', 'power', 'rios', ['-mcpu=rios'])
+cpu_flags('gcc', 'OPTIONS', 'power', 'rios1', ['-mcpu=rios1'])
+cpu_flags('gcc', 'OPTIONS', 'power', 'rios2', ['-mcpu=rios2'])
+cpu_flags('gcc', 'OPTIONS', 'power', 'rsc', ['-mcpu=rsc'])
+cpu_flags('gcc', 'OPTIONS', 'power', 'rs64a', ['-mcpu=rs64'])
+# AIX variant of RS/6000 & PowerPC
+flags('gcc', 'OPTIONS', ['<architecture>power/<address-model>32/<target-os>aix'], ['-maix32'])
+flags('gcc', 'OPTIONS', ['<architecture>power/<address-model>64/<target-os>aix'], ['-maix64'])
+flags('gcc', 'AROPTIONS', ['<architecture>power/<address-model>64/<target-os>aix'], ['-X 64'])
Added: branches/build/python_port/python/boost/build/tools/pch.py
==============================================================================
--- (empty file)
+++ branches/build/python_port/python/boost/build/tools/pch.py	2008-07-07 00:30:47 EDT (Mon, 07 Jul 2008)
@@ -0,0 +1,83 @@
+# Status: Being ported by Steven Watanabe
+# Base revision: 47077
+#
+# Copyright (c) 2005 Reece H. Dunn.
+# Copyright 2006 Ilya Sokolov
+# Copyright (c) 2008 Steven Watanabe
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+##### Using Precompiled Headers (Quick Guide) #####
+#
+# Make precompiled mypch.hpp:
+#
+#    import pch ;
+#
+#    cpp-pch mypch
+#      : # sources
+#        mypch.hpp
+#      : # requiremnts
+#        <toolset>msvc:<source>mypch.cpp
+#      ;
+#
+# Add cpp-pch to sources:
+#
+#    exe hello
+#      : main.cpp hello.cpp mypch
+#      ;
+
+from boost.build.build import type, feature, generators
+
+type.register('PCH', ['pch'])
+type.register('C_PCH', [], 'PCH')
+type.register('CPP_PCH', [], 'PCH')
+
+# Control precompiled header (PCH) generation.
+feature.feature('pch',
+                ['on', 'off'],
+                ['propagated'])
+
+feature.feature('pch-header', [], ['free', 'dependency'])
+feature.feature('pch-file', [], ['free', 'dependency'])
+
+class PchGenerator(generators.Generator):
+    """
+        Base PCH generator. The 'run' method has the logic to prevent this generator
+        from being run unless it's being used for a top-level PCH target.
+    """
+    def action_class(self):
+        return 'compile-action'
+
+    def run(self, project, name, prop_set, sources):
+        if not name:
+            # Unless this generator is invoked as the top-most generator for a
+            # main target, fail. This allows using 'H' type as input type for
+            # this generator, while preventing Boost.Build to try this generator
+            # when not explicitly asked for.
+            #
+            # One bad example is msvc, where pch generator produces both PCH
+            # target and OBJ target, so if there's any header generated (like by
+            # bison, or by msidl), we'd try to use pch generator to get OBJ from
+            # that H, which is completely wrong. By restricting this generator
+            # only to pch main target, such problem is solved.
+            pass
+        else:
+            r = self.run_pch(project, name,
+                 prop_set.add_raw('<define>BOOST_BUILD_PCH_ENABLED'),
+                 sources)
+            return generators.add_usage_requirements(
+                r, ['<define>BOOST_BUILD_PCH_ENABLED'])
+
+    # This rule must be overridden by the derived classes.
+    def run_pch(self, project, name, prop_set, sources):
+        pass
+
+#FIXME: dummy-generator in builtins.jam needs to be ported.
+# NOTE: requirements are empty, default pch generator can be applied when
+# pch=off.
+###generators.register(
+###    [ new dummy-generator pch.default-c-pch-generator   : :   C_PCH ] ;
+###generators.register
+###    [ new dummy-generator pch.default-cpp-pch-generator : : CPP_PCH ] ;
Modified: branches/build/python_port/python/boost/build/tools/rc.py
==============================================================================
--- branches/build/python_port/python/boost/build/tools/rc.py	(original)
+++ branches/build/python_port/python/boost/build/tools/rc.py	2008-07-07 00:30:47 EDT (Mon, 07 Jul 2008)
@@ -54,7 +54,10 @@
         before calling this. And still get the functionality of build failures when
         the resource compiler can't be found.
     """
-    rc_type = feature.get_values('<rc-type>', options)[0]
+    rc_type = feature.get_values('<rc-type>', options)
+    if rc_type:
+        assert(len(rc_type) == 1)
+        rc_type = rc_type[0]
 
     if command and condition and rc_type:
         flags('rc.compile.resource', '.RC', condition, command)
Modified: branches/build/python_port/python/boost/build/tools/types/__init__.py
==============================================================================
--- branches/build/python_port/python/boost/build/tools/types/__init__.py	(original)
+++ branches/build/python_port/python/boost/build/tools/types/__init__.py	2008-07-07 00:30:47 EDT (Mon, 07 Jul 2008)
@@ -1,4 +1,5 @@
 __all__ = [
+    'asm',
     'cpp',
     'exe',
     'html',
@@ -11,7 +12,7 @@
     for i in __all__:
         m = __import__ (__name__ + '.' + i)
         reg = i + '.register ()'
-        exec (reg)
+        #exec (reg)
 
 # TODO: (PF) I thought these would be imported automatically. Anyone knows why they aren't?
 register_all ()
Added: branches/build/python_port/python/boost/build/tools/types/asm.py
==============================================================================
--- (empty file)
+++ branches/build/python_port/python/boost/build/tools/types/asm.py	2008-07-07 00:30:47 EDT (Mon, 07 Jul 2008)
@@ -0,0 +1,13 @@
+# Copyright Craig Rodrigues 2005.
+# Copyright (c) 2008 Steven Watanabe
+#
+# Distributed under the Boost
+# Software License, Version 1.0. (See accompanying
+# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+from boost.build.build import type
+
+def register():
+    type.register_type('ASM', ['s', 'S', 'asm'])
+
+register()
Modified: branches/build/python_port/python/boost/build/tools/unix.py
==============================================================================
--- branches/build/python_port/python/boost/build/tools/unix.py	(original)
+++ branches/build/python_port/python/boost/build/tools/unix.py	2008-07-07 00:30:47 EDT (Mon, 07 Jul 2008)
@@ -21,7 +21,8 @@
     
     def run (self, project, name, prop_set, sources):
         result = builtin.LinkingGenerator.run (self, project, name, prop_set, sources)
-        set_library_order (project.manager (), sources, prop_set, result [1])
+        if result:
+            set_library_order (project.manager (), sources, prop_set, result [1])
                                 
         return result
     
Modified: branches/build/python_port/python/boost/build/util/path.py
==============================================================================
--- branches/build/python_port/python/boost/build/util/path.py	(original)
+++ branches/build/python_port/python/boost/build/util/path.py	2008-07-07 00:30:47 EDT (Mon, 07 Jul 2008)
@@ -430,12 +430,13 @@
     names = ['PATH', 'Path', 'path']
     
     for name in names:
-        raw.append (os.environ.get (name, ''))
-        
+        raw.append(os.environ.get (name, ''))
+    
     result = []
-    for p in raw:
-        if p:
-            result.append (make (p))
+    for elem in raw:
+        if elem:
+            for p in elem.split(os.path.pathsep):
+                result.append(make(p))
 
     return result
 
Modified: branches/build/python_port/python/boost/build/util/utility.py
==============================================================================
--- branches/build/python_port/python/boost/build/util/utility.py	(original)
+++ branches/build/python_port/python/boost/build/util/utility.py	2008-07-07 00:30:47 EDT (Mon, 07 Jul 2008)
@@ -130,7 +130,9 @@
     return (toolset, name)
 
 def os_name ():
-    return bjam.variable("OS")
+    result = bjam.variable("OS")
+    assert(len(result) == 1)
+    return result[0]
 
 def platform ():
     return bjam.variable("OSPLAT")