early-access version 3088

This commit is contained in:
pineappleEA
2022-11-05 15:35:56 +01:00
parent 4e4fc25ce3
commit b601909c6d
35519 changed files with 5996896 additions and 860 deletions

View File

@@ -0,0 +1,18 @@
# Copyright (c) 2003 Vladimir Prus.
#
# Use, modification and distribution is subject to the Boost Software
# License Version 1.0. (See accompanying file LICENSE.txt or
# https://www.bfgroup.xyz/b2/LICENSE.txt)
# This file handles initial phase of B2 loading.
# Boost.Jam has already figured out where B2 is
# and loads this file, which is responsible for initialization
# of basic facilities such a module system and loading the
# main B2 module, build-system.jam.
#
# Exact operation of this module is not interesting, it makes
# sense to look at build-system.jam right away.
# Load the kernel/bootstrap.jam, which does all the work.
.bootstrap-file = $(.bootstrap-file:D)/kernel/bootstrap.jam ;
include $(.bootstrap-file) ;

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,326 @@
# Copyright (c) 2010 Vladimir Prus.
# Copyright (c) 2013 Steven Watanabe
# Copyright (c) 2021 Rene Ferdinand Rivera Morell
#
# Use, modification and distribution is subject to the Boost Software
# License Version 1.0. (See accompanying file LICENSE.txt or
# https://www.bfgroup.xyz/b2/LICENSE.txt)
import property-set ;
import path ;
import modules ;
import "class" ;
import errors ;
import configure ;
import feature ;
import project ;
import virtual-target ;
import generators ;
import property ;
import print ;
import regex ;
project.initialize $(__name__) ;
.project = [ project.current ] ;
project ac ;
feature.feature ac.print-text : : free ;
rule generate-include ( target : sources * : properties * )
{
print.output $(target) ;
local text = [ property.select <ac.print-text> : $(properties) ] ;
if $(text)
{
print.text $(text:G=) : true ;
}
else
{
local header = [ property.select <include> : $(properties) ] ;
print.text "#include <$(header:G=)>\n" : true ;
}
}
rule generate-main ( target : sources * : properties * )
{
print.output $(target) ;
print.text "int main() {}" : true ;
}
rule find-include-path ( properties : header : provided-path ? : test-source ? )
{
if $(provided-path) && [ path.exists [ path.root $(header) $(provided-path) ] ]
{
return $(provided-path) ;
}
else
{
local a = [ class.new action : ac.generate-include : [ property-set.create <include>$(header) <ac.print-text>$(test-source) ] ] ;
# Create a new CPP target named after the header.
# Replace dots (".") in target basename for portability.
local basename = [ regex.replace $(header:D=) "[.]" "_" ] ;
local header-target = $(header:S=:B=$(basename)) ;
local cpp = [ class.new file-target $(header-target:S=.cpp) exact : CPP : $(.project) : $(a) ] ;
cpp = [ virtual-target.register $(cpp) ] ;
$(cpp).root true ;
local result = [ generators.construct $(.project) $(header-target) : OBJ : $(properties) : $(cpp) : true ] ;
configure.maybe-force-rebuild $(result[2-]) ;
local jam-targets ;
for local t in $(result[2-])
{
jam-targets += [ $(t).actualize ] ;
}
if [ UPDATE_NOW $(jam-targets) : [ modules.peek configure : .log-fd ]
: ignore-minus-n ]
{
return %default ;
}
}
}
rule construct-library ( name : property-set : provided-path ? )
{
local lib-props = [ $(property-set).add-raw <name>$(name) <search>$(provided-path) ] ;
return [ generators.construct $(.project) lib-$(name)
: SEARCHED_LIB : $(lib-props) : : true ] ;
}
rule find-library ( properties : names + : provided-path ? )
{
local result ;
if [ $(properties).get <link> ] = shared
{
link-opts = <link>shared <link>static ;
}
else
{
link-opts = <link>static <link>shared ;
}
while $(link-opts)
{
local names-iter = $(names) ;
properties = [ $(properties).refine [ property-set.create $(link-opts[1]) ] ] ;
while $(names-iter)
{
local name = $(names-iter[1]) ;
local lib = [ construct-library $(name) : $(properties) : $(provided-path) ] ;
local a = [ class.new action : ac.generate-main :
[ property-set.empty ] ] ;
local main.cpp = [ virtual-target.register
[ class.new file-target main-$(name).cpp exact : CPP : $(.project) : $(a) ] ] ;
$(main.cpp).root true ;
local test = [ generators.construct $(.project) $(name) : EXE
: [ $(properties).add $(lib[1]) ] : $(main.cpp) $(lib[2-])
: true ] ;
configure.maybe-force-rebuild $(test[2-]) ;
local jam-targets ;
for t in $(test[2-])
{
jam-targets += [ $(t).actualize ] ;
}
if [ UPDATE_NOW $(jam-targets) : [ modules.peek configure : .log-fd ]
: ignore-minus-n ]
{
result = $(name) $(link-opts[1]) ;
names-iter = ; link-opts = ; # break
}
names-iter = $(names-iter[2-]) ;
}
link-opts = $(link-opts[2-]) ;
}
return $(result) ;
}
class ac-library : basic-target
{
import errors ;
import indirect ;
import virtual-target ;
import ac ;
import configure ;
import config-cache ;
import os ;
rule __init__ ( name : project : requirements * : include-path ? : library-path ? : library-name ? )
{
basic-target.__init__ $(name) : $(project) : : $(requirements) ;
reconfigure $(include-path) : $(library-path) : $(library-name) ;
}
rule set-header ( header )
{
self.header = $(header) ;
}
rule set-default-names ( names + )
{
self.default-names = $(names) ;
}
rule set-header-test ( source )
{
self.header-test = $(source) ;
}
rule reconfigure ( include-path ? : library-path ? : library-name ? )
{
if $(include-path) || $(library-path) || $(library-name)
{
check-not-configured ;
self.include-path = $(include-path) ;
self.library-path = $(library-path) ;
self.library-name = $(library-name) ;
}
}
rule set-target ( target )
{
check-not-configured ;
self.target = $(target) ;
}
rule check-not-configured ( )
{
if $(self.include-path) || $(self.library-path) || $(self.library-name) || $(self.target)
{
errors.user-error [ name ] "is already configured" ;
}
}
rule construct ( name : sources * : property-set )
{
if $(self.target)
{
return [ $(self.target).generate $(property-set) ] ;
}
else
{
local use-environment ;
if ! $(self.library-name) && ! $(self.include-path) && ! $(self.library-path)
{
use-environment = true ;
}
local libnames = $(self.library-name) ;
if ! $(libnames) && $(use-environment)
{
libnames = [ os.environ $(name:U)_NAME ] ;
# Backward compatibility only.
libnames ?= [ os.environ $(name:U)_BINARY ] ;
}
libnames ?= $(self.default-names) ;
local include-path = $(self.include-path) ;
if ! $(include-path) && $(use-environment)
{
include-path = [ os.environ $(name:U)_INCLUDE ] ;
}
local library-path = $(self.library-path) ;
if ! $(library-path) && $(use-environment)
{
library-path = [ os.environ $(name:U)_LIBRARY_PATH ] ;
# Backwards compatibility only
library-path ?= [ os.environ $(name:U)_LIBPATH ] ;
}
local relevant = [ property.select [ configure.get-relevant-features ] <link> :
[ $(property-set).raw ] ] ;
local min = [ property.as-path [ SORT [ feature.minimize $(relevant) ] ] ] ;
local key = ac-library-$(name)-$(relevant:J=-) ;
local lookup = [ config-cache.get $(key) ] ;
if $(lookup)
{
if $(lookup) = missing
{
configure.log-library-search-result $(name) : "no (cached)" $(min) ;
return [ property-set.empty ] ;
}
else
{
local includes = $(lookup[1]) ;
if $(includes) = %default
{
includes = ;
}
local library = [ ac.construct-library $(lookup[2]) :
[ $(property-set).refine [ property-set.create $(lookup[3]) ] ] : $(library-path) ] ;
configure.log-library-search-result $(name) : "yes (cached)" $(min) ;
return [ $(library[1]).add-raw <include>$(includes) ] $(library[2-]) ;
}
}
else
{
local includes = [ ac.find-include-path $(property-set) : $(self.header) : $(include-path) : $(self.header-test) ] ;
local library = [ ac.find-library $(property-set) : $(libnames) : $(library-path) ] ;
if $(includes) && $(library)
{
config-cache.set $(key) : $(includes) $(library) ;
if $(includes) = %default
{
includes = ;
}
library = [ ac.construct-library $(library[1]) :
[ $(property-set).refine [ property-set.create $(library[2]) ] ] : $(library-path) ] ;
configure.log-library-search-result $(name) : "yes" $(min) ;
return [ $(library[1]).add-raw <include>$(includes) ] $(library[2-]) ;
}
else
{
config-cache.set $(key) : missing ;
configure.log-library-search-result $(name) : "no" $(min) ;
return [ property-set.empty ] ;
}
}
}
}
}
class check-library-worker
{
import property-set ;
import targets ;
import property ;
rule __init__ ( target : true-properties * : false-properties * )
{
self.target = $(target) ;
self.true-properties = $(true-properties) ;
self.false-properties = $(false-properties) ;
}
rule check ( properties * )
{
local choosen ;
local t = [ targets.current ] ;
local p = [ $(t).project ] ;
local ps = [ property-set.create $(properties) ] ;
ps = [ $(ps).propagated ] ;
local generated =
[ targets.generate-from-reference $(self.target) : $(p) : $(ps) ] ;
if $(generated[2])
{
choosen = $(self.true-properties) ;
}
else
{
choosen = $(self.false-properties) ;
}
return [ property.evaluate-conditionals-in-context $(choosen) :
$(properties) ] ;
}
}
rule check-library ( target : true-properties * : false-properties * )
{
local instance = [ class.new check-library-worker $(target) :
$(true-properties) : $(false-properties) ] ;
return <conditional>@$(instance).check
[ property.evaluate-conditional-relevance
$(true-properties) $(false-properties)
: [ configure.get-relevant-features ] <link> ] ;
}

View File

@@ -0,0 +1,82 @@
# Copyright 2003, 2004, 2006 Vladimir Prus
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE.txt or copy at
# https://www.bfgroup.xyz/b2/LICENSE.txt)
# This module defines the 'alias' rule and the associated target class.
#
# Alias is just a main target which returns its source targets without any
# processing. For example:
#
# alias bin : hello test_hello ;
# alias lib : helpers xml_parser ;
#
# Another important use of 'alias' is to conveniently group source files:
#
# alias platform-src : win.cpp : <os>NT ;
# alias platform-src : linux.cpp : <os>LINUX ;
# exe main : main.cpp platform-src ;
#
# Lastly, it is possible to create a local alias for some target, with different
# properties:
#
# alias big_lib : : @/external_project/big_lib/<link>static ;
#
import "class" : new ;
import param ;
import project ;
import property-set ;
import targets ;
class alias-target-class : basic-target
{
rule __init__ ( name : project : sources * : requirements *
: default-build * : usage-requirements * )
{
basic-target.__init__ $(name) : $(project) : $(sources) :
$(requirements) : $(default-build) : $(usage-requirements) ;
}
rule construct ( name : source-targets * : property-set )
{
return [ property-set.empty ] $(source-targets) ;
}
rule compute-usage-requirements ( subvariant )
{
local base = [ basic-target.compute-usage-requirements $(subvariant) ] ;
return [ $(base).add [ $(subvariant).sources-usage-requirements ] ] ;
}
rule skip-from-usage-requirements ( )
{
}
}
# Declares the 'alias' target. It will process its sources virtual-targets by
# returning them unaltered as its own constructed virtual-targets.
#
rule alias ( name : sources * : requirements * : default-build * :
usage-requirements * )
{
param.handle-named-params
sources requirements default-build usage-requirements ;
local project = [ project.current ] ;
targets.main-target-alternative
[ new alias-target-class $(name) : $(project)
: [ targets.main-target-sources $(sources) : $(name) : no-renaming ]
: [ targets.main-target-requirements $(requirements) : $(project) ]
: [ targets.main-target-default-build $(default-build) : $(project)
]
: [ targets.main-target-usage-requirements $(usage-requirements) :
$(project) ]
] ;
}
IMPORT $(__name__) : alias : : alias ;

View File

@@ -0,0 +1,75 @@
# Copyright 2003, 2004, 2006 Vladimir Prus
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt)
# Status: ported (danielw)
# Base revision: 56043
# This module defines the 'alias' rule and associated class.
#
# Alias is just a main target which returns its source targets without any
# processing. For example::
#
# alias bin : hello test_hello ;
# alias lib : helpers xml_parser ;
#
# Another important use of 'alias' is to conveniently group source files::
#
# alias platform-src : win.cpp : <os>NT ;
# alias platform-src : linux.cpp : <os>LINUX ;
# exe main : main.cpp platform-src ;
#
# Lastly, it's possible to create local alias for some target, with different
# properties::
#
# alias big_lib : : @/external_project/big_lib/<link>static ;
#
import targets
import property_set
from b2.manager import get_manager
from b2.util import metatarget, is_iterable_typed
class AliasTarget(targets.BasicTarget):
def __init__(self, *args):
targets.BasicTarget.__init__(self, *args)
def construct(self, name, source_targets, properties):
if __debug__:
from .virtual_target import VirtualTarget
assert isinstance(name, basestring)
assert is_iterable_typed(source_targets, VirtualTarget)
assert isinstance(properties, property_set.PropertySet)
return [property_set.empty(), source_targets]
def compute_usage_requirements(self, subvariant):
if __debug__:
from .virtual_target import Subvariant
assert isinstance(subvariant, Subvariant)
base = targets.BasicTarget.compute_usage_requirements(self, subvariant)
# Add source's usage requirement. If we don't do this, "alias" does not
# look like 100% alias.
return base.add(subvariant.sources_usage_requirements())
@metatarget
def alias(name, sources=[], requirements=[], default_build=[], usage_requirements=[]):
assert isinstance(name, basestring)
assert is_iterable_typed(sources, basestring)
assert is_iterable_typed(requirements, basestring)
assert is_iterable_typed(default_build, basestring)
assert is_iterable_typed(usage_requirements, basestring)
project = get_manager().projects().current()
targets = get_manager().targets()
targets.main_target_alternative(AliasTarget(
name, project,
targets.main_target_sources(sources, name, no_renaming=True),
targets.main_target_requirements(requirements or [], project),
targets.main_target_default_build(default_build, project),
targets.main_target_usage_requirements(usage_requirements or [], project)))
# Declares the 'alias' target. It will build sources, and return them unaltered.
get_manager().projects().add_rule("alias", alias)

View File

@@ -0,0 +1,418 @@
# Copyright 2002 Dave Abrahams
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt)
import "class" : new ;
import sequence ;
import set ;
import regex ;
import feature ;
import property ;
import container ;
import string ;
# Transform property-set by applying f to each component property.
#
local rule apply-to-property-set ( f property-set )
{
local properties = [ feature.split $(property-set) ] ;
return [ string.join [ $(f) $(properties) ] : / ] ;
}
# Expand the given build request by combining all property-sets which do not
# specify conflicting non-free features. Expects all the project files to
# already be loaded.
#
rule expand-no-defaults ( property-sets * )
{
# First make all features and subfeatures explicit.
local expanded-property-sets = [ sequence.transform apply-to-property-set
feature.expand-subfeatures : $(property-sets) ] ;
# Now combine all of the expanded property-sets
local product = [ x-product $(expanded-property-sets) : $(feature-space) ] ;
return $(product) ;
}
# Update the list of expected conflicts based on the new
# features.
#
local rule remove-conflicts ( conflicts * : features * )
{
local result ;
for local c in $(conflicts)
{
if ! [ set.intersection [ regex.split $(c) "/" ] : $(features) ]
{
result += $(c) ;
}
}
return $(result) ;
}
# Implementation of x-product, below. Expects all the project files to already
# be loaded.
#
local rule x-product-aux ( property-sets + )
{
local result ;
local p = [ feature.split $(property-sets[1]) ] ;
local f = [ set.difference $(p:G) : [ feature.free-features ] ] ;
local seen ;
local extra-conflicts ;
# No conflict with things used at a higher level?
if ! [ set.intersection $(f) : $(x-product-used) ]
{
local x-product-seen ;
local x-product-conflicts =
[ remove-conflicts $(x-product-conflicts) : $(f) ] ;
{
# Do not mix in any conflicting features.
local x-product-used = $(x-product-used) $(f) ;
if $(property-sets[2])
{
local rest = [ x-product-aux $(property-sets[2-]) ] ;
result = $(property-sets[1])/$(rest) ;
}
if ! $(x-product-conflicts)
{
result ?= $(property-sets[1]) ;
}
}
# If we did not encounter a conflicting feature lower down, do not
# recurse again.
if ! [ set.intersection $(f) : $(x-product-seen) ]
|| [ remove-conflicts $(x-product-conflicts) : $(x-product-seen) ]
{
property-sets = ;
}
else
{
# A property is only allowed to be absent if it conflicts
# with either a higher or lower layer. We don't need to
# bother setting this if we already know that we don't need
# to recurse again.
extra-conflicts = $(f:J=/) ;
}
seen = $(x-product-seen) ;
}
if $(property-sets[2])
{
# Lower layers expansion must conflict with this
local x-product-conflicts = $(x-product-conflicts) $(extra-conflicts) ;
result += [ x-product-aux $(property-sets[2-]) ] ;
}
# Note that we have seen these features so that higher levels will recurse
# again without them set.
x-product-seen += $(f) $(seen) ;
return $(result) ;
}
# Return the cross-product of all elements of property-sets, less any that would
# contain conflicting values for single-valued features. Expects all the project
# files to already be loaded.
#
# Formal definition:
# Returns all maximum non-conflicting subsets of property-sets.
# The result is a list of all property-sets p such that
# 1. p is composed by joining a subset of property-sets without removing
# duplicates
# 2. p contains at most one instance of every single-valued feature
# 3. Adding any additional element of property-sets to p be would
# violate (2)
local rule x-product ( property-sets * )
{
if $(property-sets).non-empty
{
# Prepare some "scoped globals" that can be used by the implementation
# function, x-product-aux.
local x-product-seen x-product-used x-product-conflicts ;
return [ x-product-aux $(property-sets) : $(feature-space) ] ;
}
# Otherwise return empty.
}
# Returns true if either 'v' or the part of 'v' before the first '-' symbol is
# an implicit value. Expects all the project files to already be loaded.
#
local rule looks-like-implicit-value ( v )
{
if [ feature.is-implicit-value $(v) ]
{
return true ;
}
else
{
local split = [ regex.split $(v) - ] ;
if [ feature.is-implicit-value $(split[1]) ]
{
return true ;
}
}
}
# Takes the command line tokens (such as taken from the ARGV rule) and
# constructs a build request from them. Returns a vector of two vectors (where
# "vector" means container.jam's "vector"). First is the set of targets
# specified in the command line, and second is the set of requested build
# properties. Expects all the project files to already be loaded.
#
rule from-command-line ( command-line * )
{
local targets ;
local properties ;
command-line = $(command-line[2-]) ;
local skip-next = ;
for local e in $(command-line)
{
if $(skip-next)
{
skip-next = ;
}
else if ! [ MATCH ^(-) : $(e) ]
{
# Build request spec either has "=" in it or completely consists of
# implicit feature values.
local fs = feature-space ;
if [ MATCH "(.*=.*)" : $(e) ]
|| [ looks-like-implicit-value $(e:D=) : $(feature-space) ]
{
properties += $(e) ;
}
else if $(e)
{
targets += $(e) ;
}
}
else if [ MATCH "^(-[-ldjfsto])$" : $(e) ]
{
skip-next = true ;
}
}
return [ new vector
[ new vector $(targets) ]
[ new vector $(properties) ] ] ;
}
# Converts a list of elements of command line build request specification into internal
# form. Expects all the project files to already be loaded.
#
rule convert-command-line-elements ( elements * )
{
local result ;
for local e in $(elements)
{
result += [ convert-command-line-element $(e) ] ;
}
return $(result) ;
}
# Converts one element of command line build request specification into internal
# form.
local rule convert-command-line-element ( e )
{
local result ;
local parts = [ regex.split $(e) "/" ] ;
while $(parts)
{
local p = $(parts[1]) ;
local m = [ MATCH "([^=]*)=(.*)" : $(p) ] ;
local lresult ;
local feature ;
local values ;
if $(m)
{
feature = $(m[1]) ;
values = [ regex.split $(m[2]) "," ] ;
lresult = <$(feature)>$(values) ;
}
else
{
lresult = [ regex.split $(p) "," ] ;
}
if $(feature) && free in [ feature.attributes <$(feature)> ]
{
# If we have free feature, then the value is everything
# until the end of the command line token. Slashes in
# the following string are not taked to mean separation
# of properties. Commas are also not interpreted specially.
values = $(values:J=,) ;
values = $(values) $(parts[2-]) ;
values = $(values:J=/) ;
lresult = ;
# Optional free features will ignore empty value arguments.
if optional in [ feature.attributes <$(feature)> ]
{
for local v in $(values)
{
if $(v)
{
lresult += <$(feature)>$(v) ;
}
}
}
else
{
lresult = <$(feature)>$(values) ;
}
parts = ;
}
if ! [ MATCH (.*-.*) : $(p) ]
{
# property.validate cannot handle subfeatures, so we avoid the check
# here.
for local p in $(lresult)
{
property.validate $(p) : $(feature-space) ;
}
}
if $(lresult)
{
if ! $(result)
{
result = $(lresult) ;
}
else
{
result = $(result)/$(lresult) ;
}
}
parts = $(parts[2-]) ;
}
return $(result) ;
}
rule __test__ ( )
{
import assert ;
import feature ;
feature.prepare-test build-request-test-temp ;
import build-request ;
import build-request : expand-no-defaults : build-request.expand-no-defaults ;
import errors : try catch ;
import feature : feature subfeature ;
feature toolset : gcc msvc borland : implicit ;
subfeature toolset gcc : version : 2.95.2 2.95.3 2.95.4
3.0 3.0.1 3.0.2 : optional ;
feature variant : debug release : implicit composite ;
feature inlining : on off ;
feature "include" : : free ;
feature stdlib : native stlport : implicit ;
feature runtime-link : dynamic static : symmetric ;
# Empty build requests should expand to empty.
assert.result
: build-request.expand-no-defaults ;
assert.result
<toolset>gcc/<toolset-gcc:version>3.0.1/<stdlib>stlport/<variant>debug
<toolset>msvc/<stdlib>stlport/<variant>debug
<toolset>msvc/<variant>debug
: build-request.expand-no-defaults gcc-3.0.1/stlport msvc/stlport msvc debug ;
assert.result
<toolset>gcc/<toolset-gcc:version>3.0.1/<stdlib>stlport/<variant>debug
<toolset>msvc/<variant>debug
<variant>debug/<toolset>msvc/<stdlib>stlport
: build-request.expand-no-defaults gcc-3.0.1/stlport msvc debug msvc/stlport ;
assert.result
<toolset>gcc/<toolset-gcc:version>3.0.1/<stdlib>stlport/<variant>debug/<inlining>off
<toolset>gcc/<toolset-gcc:version>3.0.1/<stdlib>stlport/<variant>release/<inlining>off
: build-request.expand-no-defaults gcc-3.0.1/stlport debug release <inlining>off ;
assert.result
<include>a/b/c/<toolset>gcc/<toolset-gcc:version>3.0.1/<stdlib>stlport/<variant>debug/<include>x/y/z
<include>a/b/c/<toolset>msvc/<stdlib>stlport/<variant>debug/<include>x/y/z
<include>a/b/c/<toolset>msvc/<variant>debug/<include>x/y/z
: build-request.expand-no-defaults <include>a/b/c gcc-3.0.1/stlport msvc/stlport msvc debug <include>x/y/z ;
local r ;
try ;
{
r = [ build-request.from-command-line bjam gcc/debug runtime-link=dynamic/static ] ;
build-request.convert-command-line-elements [ $(r).get-at 2 ] ;
}
catch \"static\" is not an implicit feature value ;
r = [ build-request.from-command-line bjam debug runtime-link=dynamic ] ;
assert.equal [ $(r).get-at 1 ] : ;
assert.equal [ $(r).get-at 2 ] : debug runtime-link=dynamic ;
assert.equal
[ build-request.convert-command-line-elements debug runtime-link=dynamic ]
: debug <runtime-link>dynamic ;
r = [ build-request.from-command-line bjam -d2 --debug debug target runtime-link=dynamic ] ;
assert.equal [ $(r).get-at 1 ] : target ;
assert.equal [ $(r).get-at 2 ] : debug runtime-link=dynamic ;
assert.equal
[ build-request.convert-command-line-elements debug runtime-link=dynamic ]
: debug <runtime-link>dynamic ;
r = [ build-request.from-command-line bjam debug runtime-link=dynamic,static ] ;
assert.equal [ $(r).get-at 1 ] : ;
assert.equal [ $(r).get-at 2 ] : debug runtime-link=dynamic,static ;
assert.equal
[ build-request.convert-command-line-elements debug runtime-link=dynamic,static ]
: debug <runtime-link>dynamic <runtime-link>static ;
r = [ build-request.from-command-line bjam debug gcc/runtime-link=dynamic,static ] ;
assert.equal [ $(r).get-at 1 ] : ;
assert.equal [ $(r).get-at 2 ] : debug gcc/runtime-link=dynamic,static ;
assert.equal
[ build-request.convert-command-line-elements debug gcc/runtime-link=dynamic,static ]
: debug gcc/<runtime-link>dynamic gcc/<runtime-link>static ;
r = [ build-request.from-command-line bjam msvc gcc,borland/runtime-link=static ] ;
assert.equal [ $(r).get-at 1 ] : ;
assert.equal [ $(r).get-at 2 ] : msvc gcc,borland/runtime-link=static ;
assert.equal
[ build-request.convert-command-line-elements msvc gcc,borland/runtime-link=static ]
: msvc gcc/<runtime-link>static borland/<runtime-link>static ;
r = [ build-request.from-command-line bjam gcc-3.0 ] ;
assert.equal [ $(r).get-at 1 ] : ;
assert.equal [ $(r).get-at 2 ] : gcc-3.0 ;
assert.equal
[ build-request.convert-command-line-elements gcc-3.0 ]
: gcc-3.0 ;
feature.finish-test build-request-test-temp ;
}

View File

@@ -0,0 +1,222 @@
# Status: being ported by Vladimir Prus
# TODO: need to re-compare with mainline of .jam
# Base revision: 40480
#
# (C) Copyright David Abrahams 2002. Permission to copy, use, modify, sell and
# distribute this software is granted provided this copyright notice appears in
# all copies. This software is provided "as is" without express or implied
# warranty, and with no claim as to its suitability for any purpose.
import b2.build.feature
feature = b2.build.feature
from b2.util.utility import *
from b2.util import is_iterable_typed
import b2.build.property_set as property_set
def expand_no_defaults (property_sets):
""" Expand the given build request by combining all property_sets which don't
specify conflicting non-free features.
"""
assert is_iterable_typed(property_sets, property_set.PropertySet)
# First make all features and subfeatures explicit
expanded_property_sets = [ps.expand_subfeatures() for ps in property_sets]
# Now combine all of the expanded property_sets
product = __x_product (expanded_property_sets)
return [property_set.create(p) for p in product]
def __x_product (property_sets):
""" Return the cross-product of all elements of property_sets, less any
that would contain conflicting values for single-valued features.
"""
assert is_iterable_typed(property_sets, property_set.PropertySet)
x_product_seen = set()
return __x_product_aux (property_sets, x_product_seen)[0]
def __x_product_aux (property_sets, seen_features):
"""Returns non-conflicting combinations of property sets.
property_sets is a list of PropertySet instances. seen_features is a set of Property
instances.
Returns a tuple of:
- list of lists of Property instances, such that within each list, no two Property instance
have the same feature, and no Property is for feature in seen_features.
- set of features we saw in property_sets
"""
assert is_iterable_typed(property_sets, property_set.PropertySet)
assert isinstance(seen_features, set)
if not property_sets:
return ([], set())
properties = property_sets[0].all()
these_features = set()
for p in property_sets[0].non_free():
these_features.add(p.feature)
# Note: the algorithm as implemented here, as in original Jam code, appears to
# detect conflicts based on features, not properties. For example, if command
# line build request say:
#
# <a>1/<b>1 c<1>/<b>1
#
# It will decide that those two property sets conflict, because they both specify
# a value for 'b' and will not try building "<a>1 <c1> <b1>", but rather two
# different property sets. This is a topic for future fixing, maybe.
if these_features & seen_features:
(inner_result, inner_seen) = __x_product_aux(property_sets[1:], seen_features)
return (inner_result, inner_seen | these_features)
else:
result = []
(inner_result, inner_seen) = __x_product_aux(property_sets[1:], seen_features | these_features)
if inner_result:
for inner in inner_result:
result.append(properties + inner)
else:
result.append(properties)
if inner_seen & these_features:
# Some of elements in property_sets[1:] conflict with elements of property_sets[0],
# Try again, this time omitting elements of property_sets[0]
(inner_result2, inner_seen2) = __x_product_aux(property_sets[1:], seen_features)
result.extend(inner_result2)
return (result, inner_seen | these_features)
def looks_like_implicit_value(v):
"""Returns true if 'v' is either implicit value, or
the part before the first '-' symbol is implicit value."""
assert isinstance(v, basestring)
if feature.is_implicit_value(v):
return 1
else:
split = v.split("-")
if feature.is_implicit_value(split[0]):
return 1
return 0
def from_command_line(command_line):
"""Takes the command line tokens (such as taken from ARGV rule)
and constructs build request from it. Returns a list of two
lists. First is the set of targets specified in the command line,
and second is the set of requested build properties."""
assert is_iterable_typed(command_line, basestring)
targets = []
properties = []
for e in command_line:
if e[:1] != "-":
# Build request spec either has "=" in it, or completely
# consists of implicit feature values.
if e.find("=") != -1 or looks_like_implicit_value(e.split("/")[0]):
properties.append(e)
elif e:
targets.append(e)
return [targets, properties]
# Converts one element of command line build request specification into
# internal form.
def convert_command_line_element(e):
assert isinstance(e, basestring)
result = None
parts = e.split("/")
for p in parts:
m = p.split("=")
if len(m) > 1:
feature = m[0]
values = m[1].split(",")
lresult = [("<%s>%s" % (feature, v)) for v in values]
else:
lresult = p.split(",")
if p.find('-') == -1:
# FIXME: first port property.validate
# property.validate cannot handle subfeatures,
# so we avoid the check here.
#for p in lresult:
# property.validate(p)
pass
if not result:
result = lresult
else:
result = [e1 + "/" + e2 for e1 in result for e2 in lresult]
return [property_set.create(b2.build.feature.split(r)) for r in result]
###
### rule __test__ ( )
### {
### import assert feature ;
###
### feature.prepare-test build-request-test-temp ;
###
### import build-request ;
### import build-request : expand_no_defaults : build-request.expand_no_defaults ;
### import errors : try catch ;
### import feature : feature subfeature ;
###
### feature toolset : gcc msvc borland : implicit ;
### subfeature toolset gcc : version : 2.95.2 2.95.3 2.95.4
### 3.0 3.0.1 3.0.2 : optional ;
###
### feature variant : debug release : implicit composite ;
### feature inlining : on off ;
### feature "include" : : free ;
###
### feature stdlib : native stlport : implicit ;
###
### feature runtime-link : dynamic static : symmetric ;
###
###
### local r ;
###
### r = [ build-request.from-command-line bjam debug runtime-link=dynamic ] ;
### assert.equal [ $(r).get-at 1 ] : ;
### assert.equal [ $(r).get-at 2 ] : debug <runtime-link>dynamic ;
###
### try ;
### {
###
### build-request.from-command-line bjam gcc/debug runtime-link=dynamic/static ;
### }
### catch \"static\" is not a value of an implicit feature ;
###
###
### r = [ build-request.from-command-line bjam -d2 --debug debug target runtime-link=dynamic ] ;
### assert.equal [ $(r).get-at 1 ] : target ;
### assert.equal [ $(r).get-at 2 ] : debug <runtime-link>dynamic ;
###
### r = [ build-request.from-command-line bjam debug runtime-link=dynamic,static ] ;
### assert.equal [ $(r).get-at 1 ] : ;
### assert.equal [ $(r).get-at 2 ] : debug <runtime-link>dynamic <runtime-link>static ;
###
### r = [ build-request.from-command-line bjam debug gcc/runtime-link=dynamic,static ] ;
### assert.equal [ $(r).get-at 1 ] : ;
### assert.equal [ $(r).get-at 2 ] : debug gcc/<runtime-link>dynamic
### gcc/<runtime-link>static ;
###
### r = [ build-request.from-command-line bjam msvc gcc,borland/runtime-link=static ] ;
### assert.equal [ $(r).get-at 1 ] : ;
### assert.equal [ $(r).get-at 2 ] : msvc gcc/<runtime-link>static
### borland/<runtime-link>static ;
###
### r = [ build-request.from-command-line bjam gcc-3.0 ] ;
### assert.equal [ $(r).get-at 1 ] : ;
### assert.equal [ $(r).get-at 2 ] : gcc-3.0 ;
###
### feature.finish-test build-request-test-temp ;
### }
###
###

View File

@@ -0,0 +1,78 @@
# Copyright 2012 Steven Watanabe
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt)
import modules ;
import errors ;
import regex ;
import path ;
import project ;
import os ;
rule get ( name )
{
return $(.vars.$(name)) ;
}
rule set ( name : value * )
{
.all-vars += $(name) ;
.vars.$(name) = $(value) ;
}
rule save ( )
{
if $(.cache-file)
{
local cache-file-native = [ path.native $(.cache-file) ] ;
local target = <new-cache-file>$(cache-file-native) ;
local contents = "# Automatically generated by B2.\n# Do not edit.\n\nmodule config-cache {\n" ;
for local var in $(.all-vars)
{
local transformed ;
for local value in $(.vars.$(var))
{
transformed += [ regex.escape $(value) : \"\\ : \\ ] ;
}
local quoted = \"$(transformed)\" ;
contents += " set \"$(var)\" : $(quoted:J= ) ;\n" ;
}
contents += "}\n" ;
FILE_CONTENTS on $(target) = $(contents) ;
ALWAYS $(target) ;
config-cache.write $(target) ;
UPDATE_NOW $(target) : [ modules.peek configure : .log-fd ] : ignore-minus-n ;
import common ;
common.Clean clean-all : $(target) ;
}
}
actions write
{
@($(STDOUT):E=$(FILE_CONTENTS:J=)) > "$(<)"
}
if [ os.name ] = VMS
{
actions write
{
@($(STDOUT):E=$(FILE_CONTENTS:J=)) | TYPE SYS$INPUT /OUT=$(<:W)
}
}
rule load ( cache-file )
{
if $(.cache-file)
{
errors.error duplicate load of cache file ;
}
cache-file = [ path.native $(cache-file) ] ;
if [ path.exists $(cache-file) ] && ! ( --reconfigure in [ modules.peek : ARGV ] )
{
FILE_CONTENTS on <old-cache-file>$(cache-file) = "" ;
config-cache.write <old-cache-file>$(cache-file) ;
UPDATE_NOW <old-cache-file>$(cache-file) : [ modules.peek configure : .log-fd ] ;
include <old-cache-file>$(cache-file) ;
}
.cache-file = $(cache-file) ;
}

View File

@@ -0,0 +1,629 @@
# Copyright (c) 2010 Vladimir Prus.
# Copyright 2017-2021 Rene Ferdinand Rivera Morell
#
# Use, modification and distribution is subject to the Boost Software
# License Version 1.0. (See accompanying file LICENSE.txt or
# https://www.bfgroup.xyz/b2/LICENSE.txt)
# This module defines function to help with two main tasks:
#
# - Discovering build-time configuration for the purposes of adjusting the build
# process.
# - Reporting what is built, and how it is configured.
import "class" : new ;
import common ;
import indirect ;
import path ;
import project ;
import property ;
import property-set ;
import targets ;
import config-cache ;
import feature ;
import modules ;
import sequence ;
import utility ;
import virtual-target ;
rule log-summary ( )
{
}
.width = 30 ;
rule set-width ( width )
{
.width = $(width) ;
}
# Declare that the components specified by the parameter exist.
#
rule register-components ( components * )
{
.components += $(components) ;
}
# Declare that the components specified by the parameters will be built.
#
rule components-building ( components * )
{
.built-components += $(components) ;
}
# Report something about component configuration that the user should better
# know.
#
rule log-component-configuration ( component : message )
{
# FIXME: Implement per-property-set logs.
.component-logs.$(component) += $(message) ;
}
.variant_index = 0 ;
.nl = "\n" ;
.check_notes = ;
rule log-check-result ( result variant ? )
{
if ! $(.announced-checks)
{
ECHO "Performing configuration checks\n" ;
.announced-checks = 1 ;
}
if $(variant)
{
if $(.variant_index.$(variant))
{
result = "$(result) [$(.variant_index.$(variant))]" ;
}
else
{
.variant_index = [ CALC $(.variant_index) + 1 ] ;
.variant_index.$(variant) = $(.variant_index) ;
result = "$(result) [$(.variant_index.$(variant))]" ;
.check_notes += "[$(.variant_index.$(variant))] $(variant)" ;
}
}
# else
# {
# result = "$(result) [?]" ;
# }
ECHO $(result) ;
# FIXME: Unfinished code. Nothing seems to set .check-results at the moment.
#.check-results += $(result) ;
}
rule log-library-search-result ( library : result variant ? )
{
local x = [ PAD " - $(library)" : $(.width) ] ;
log-check-result "$(x) : $(result)" $(variant) ;
}
rule print-component-configuration ( )
{
# FIXME: See what was intended with this initial assignment.
# local c = [ sequence.unique $(.components) ] ;
ECHO "\nComponent configuration:\n" ;
local c ;
for c in $(.components)
{
local s ;
if $(c) in $(.built-components)
{
s = "building" ;
}
else
{
s = "not building" ;
}
ECHO [ PAD " - $(c)" : $(.width) ] ": $(s)" ;
for local m in $(.component-logs.$(c))
{
ECHO " -" $(m) ;
}
}
ECHO ;
}
rule print-configure-checks-summary ( )
{
if $(.check_notes)
{
ECHO ;
for local l in $(.check_notes) { ECHO $(l) ; }
}
# FIXME: The problem with this approach is that the user sees the checks
# summary when all checks are done, and has no progress reporting while the
# checks are being executed.
if $(.check-results)
{
ECHO "Configuration checks summary\n" ;
for local r in $(.check-results)
{
ECHO $(r) ;
}
ECHO ;
}
}
if --reconfigure in [ modules.peek : ARGV ]
{
.reconfigure = true ;
}
# Handle the --reconfigure option
rule maybe-force-rebuild ( targets * )
{
if $(.reconfigure)
{
local all-targets ;
for local t in $(targets)
{
all-targets += [ virtual-target.traverse $(t) ] ;
}
for local t in [ sequence.unique $(all-targets) ]
{
$(t).always ;
}
}
}
# Attempts to build a set of virtual targets
rule try-build ( targets * : ps : what : retry ? )
{
local cache-props = [ $(ps).raw ] ;
local cache-name = $(what) $(cache-props) ;
cache-name = $(cache-name:J=-) ;
local value = [ config-cache.get $(cache-name) ] ;
local cache-min = [ property.as-path [ SORT [ feature.minimize $(cache-props) ] ] ] ;
local result ;
local jam-targets ;
maybe-force-rebuild $(targets) ;
for local t in $(targets)
{
jam-targets += [ $(t).actualize ] ;
}
local x ;
if $(value)
{
x = [ PAD " - $(what)" : $(.width) ] ;
if $(value) = true
{
.$(what)-supported.$(ps) = yes ;
result = true ;
x = "$(x) : yes (cached)" ;
}
else
{
x = "$(x) : no (cached)" ;
}
}
else if ! UPDATE_NOW in [ RULENAMES ]
{
# Cannot determine. Assume existence.
}
else
{
x = [ PAD " - $(what)" : $(.width) ] ;
if [ UPDATE_NOW $(jam-targets) :
$(.log-fd) : ignore-minus-n : ignore-minus-q ]
{
.$(what)-supported.$(ps) = yes ;
result = true ;
x = "$(x) : yes" ;
}
else
{
x = "$(x) : no" ;
}
}
if $(x)
{
log-check-result "$(x)" "$(cache-min:J= )" ;
}
if ! $(value)
{
if $(result)
{
config-cache.set $(cache-name) : true ;
}
else
{
config-cache.set $(cache-name) : false ;
}
}
return $(result) ;
}
# Attempts to build several sets of virtual targets. Returns the
# the index of the first set that builds.
rule try-find-build ( ps : what : * )
{
local args = 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 ;
# The outer layer only needs to check $(what), but we
# also need to check the individual elements, in case
# the set of targets has changed since the last build.
local cache-props = [ $(ps).raw ] ;
local cache-name = $(what) $($(args)[1]) $(cache-props) ;
cache-name = $(cache-name:J=-) ;
local value = [ config-cache.get $(cache-name) ] ;
local cache-min = [ property.as-path [ SORT [ feature.minimize $(cache-props) ] ] ] ;
local result ;
local jam-targets ;
maybe-force-rebuild $($(args)[2-]) ;
# Make sure that the targets are always actualized,
# even if the result is cached. This is needed to
# allow clean-all to find them and also to avoid
# unintentional behavior changes.
for local t in $($(args)[2-])
{
$(t).actualize ;
}
if $(value)
{
local none = none ; # What to show when the argument
local name = $(value) ;
if $(name) != none
{
name = [ CALC $(name) + 2 ] ;
}
local x = [ PAD " - $(what)" : $(.width) ] ;
local y = [ PAD $($(name)[1]) : 3 ] ;
result = $(value) ;
log-check-result "$(x) : $(y) (cached)" "$(cache-min:J= )" ;
}
else
{
local x = [ PAD " - $(what)" : $(.width) ] ;
for local i in $(args)
{
if ! $($(i)[1])
{
break ;
}
local jam-targets ;
for local t in $($(i)[2-])
{
jam-targets += [ $(t).actualize ] ;
}
if [ UPDATE_NOW $(jam-targets) :
$(.log-fd) : ignore-minus-n : ignore-minus-q ]
{
result = [ CALC $(i) - 2 ] ;
log-check-result "$(x) : $($(i)[1])" "$(cache-min:J= )" ;
break ;
}
}
if ! $(result)
{
log-check-result "$(x) : none" "$(cache-min:J= )" ;
result = none ;
}
}
if ! $(value)
{
if $(result)
{
config-cache.set $(cache-name) : $(result) ;
}
else
{
config-cache.set $(cache-name) : $(result) ;
}
}
if $(result) != none
{
return $(result) ;
}
}
# Attempt to build a metatarget named by 'metatarget-reference'
# in context of 'project' with properties 'ps'.
# Returns non-empty value if build is OK.
rule builds-raw ( metatarget-reference : project : ps : what : retry ? )
{
local result ;
if ! $(retry) && ! $(.$(what)-tested.$(ps))
{
.$(what)-tested.$(ps) = true ;
local targets = [ targets.generate-from-reference
$(metatarget-reference) : $(project) : $(ps) ] ;
result = [ try-build $(targets[2-]) : $(ps) : $(what) : $(retry) ] ;
.$(what)-supported.$(ps) = $(result) ;
return $(result) ;
}
else
{
return $(.$(what)-supported.$(ps)) ;
}
}
# Attempt to build a metatarget named by 'metatarget-reference'
# in context of 'project' with properties 'ps'.
# Returns the 1-based index of the first target
# that builds.
rule find-builds-raw ( project : ps : what : * )
{
local result ;
local args = 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 ;
if ! $(.$(what)-tested.$(ps))
{
.$(what)-tested.$(ps) = true ;
local targets.$(i) what.$(i) ;
for local i in $(args)
{
if ! $($(i))
{
break ;
}
targets.$(i) = [ targets.generate-from-reference
$($(i)[1]) : $(project) : $(ps) ] ;
# ignore usage requirements
targets.$(i) = $(targets.$(i)[2-]) ;
if $($(i)[2])
{
what.$(i) = $($(i)[2]) ;
}
else
{
local t = [ targets.resolve-reference
$($(i)[1]) : $(project) ] ;
what.$(i) = [ $(t[1]).name ] ;
}
}
result = [ try-find-build $(ps) : $(what)
: $(what.4) $(targets.4)
: $(what.5) $(targets.5)
: $(what.6) $(targets.6)
: $(what.7) $(targets.7)
: $(what.8) $(targets.8)
: $(what.9) $(targets.9)
: $(what.10) $(targets.10)
: $(what.11) $(targets.11)
: $(what.12) $(targets.12)
: $(what.13) $(targets.13)
: $(what.14) $(targets.14)
: $(what.15) $(targets.15)
: $(what.16) $(targets.16)
: $(what.17) $(targets.17)
: $(what.18) $(targets.18)
: $(what.19) $(targets.19) ] ;
.$(what)-result.$(ps) = $(result) ;
return $(result) ;
}
else
{
return $(.$(what)-result.$(ps)) ;
}
}
rule get-relevant-features ( properties * )
{
local ps-full = [ property-set.create $(properties) ] ;
local ps-base = [ property-set.create [ $(ps-full).base ] ] ;
local ps-min = [ feature.expand-subfeatures [ feature.minimize
[ $(ps-base).raw ] ] ] ;
local ps-relevant = [ property-set.create $(ps-min) ] ;
return [ $(ps-relevant).raw ] ;
}
rule builds ( metatarget-reference : properties * : what ? : retry ? )
{
local relevant = [ get-relevant-features $(properties) ] ;
local ps = [ property-set.create $(relevant) ] ;
local t = [ targets.current ] ;
local p = [ $(t).project ] ;
if ! $(what)
{
local resolved = [ targets.resolve-reference $(metatarget-reference) : $(p) ] ;
local name = [ $(resolved[1]).name ] ;
what = "$(name) builds" ;
}
return [ builds-raw $(metatarget-reference) : $(p) : $(ps) : $(what) :
$(retry) ] ;
}
rule find-builds ( what : properties * : * )
{
local relevant = [ get-relevant-features $(properties) ] ;
local ps = [ property-set.create $(relevant) ] ;
local t = [ targets.current ] ;
local p = [ $(t).project ] ;
return [ find-builds-raw $(p) : $(ps) : $(what) :
$(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) :
$(10) : $(11) : $(12) : $(13) : $(14) : $(15) :
$(16) : $(17) : $(18) ] ;
}
# Called by B2 startup code to specify the file to receive the
# configuration check results. Should never be called by user code.
#
rule set-log-file ( log-file )
{
path.makedirs [ path.parent $(log-file) ] ;
.log-fd = [ FILE_OPEN [ path.native $(log-file) ] : "w" ] ;
if ! $(.log-fd)
{
ECHO "warning:" failed to open log file $(log-file) for writing ;
}
}
# Frontend rules
class check-target-builds-worker
{
import configure ;
import property-set ;
import targets ;
import project ;
import property ;
rule __init__ ( target message ? : true-properties * : false-properties * )
{
local project = [ project.current ] ;
self.target = $(target) ;
self.message = $(message) ;
self.true-properties =
[ configure.translate-properties $(true-properties) : $(project) ] ;
self.false-properties =
[ configure.translate-properties $(false-properties) : $(project) ] ;
}
rule check ( properties * )
{
local choosen ;
if [ configure.builds $(self.target) : $(properties) : $(self.message) ]
{
choosen = $(self.true-properties) ;
}
else
{
choosen = $(self.false-properties) ;
}
return [ property.evaluate-conditionals-in-context $(choosen) :
$(properties) ] ;
}
}
class configure-choose-worker
{
import configure ;
import property ;
import project ;
rule __init__ ( message : * )
{
local project = [ project.current ] ;
self.message = $(message) ;
for i in 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17
{
local name = [ CALC $(i) - 1 ] ;
self.targets.$(name) = $($(i)[1]) ;
if ! $($(i)[2]:G) # Check whether the second argument is a property
{
self.what.$(name) = $($(i)[2]) ;
self.props.$(name) = $($(i)[3-]) ;
}
else
{
self.props.$(name) = $($(i)[2-]) ;
}
self.props.$(name) = [ configure.translate-properties
$(self.props.$(name)) : $(project) ] ;
}
}
rule all-properties ( )
{
local i = 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 ;
return $(self.props.$(i)) ;
}
rule check ( properties * )
{
local i = [ configure.find-builds $(self.message) : $(properties)
: $(self.targets.1) $(self.what.1)
: $(self.targets.2) $(self.what.2)
: $(self.targets.3) $(self.what.3)
: $(self.targets.4) $(self.what.4)
: $(self.targets.5) $(self.what.5)
: $(self.targets.6) $(self.what.6)
: $(self.targets.7) $(self.what.7)
: $(self.targets.8) $(self.what.8)
: $(self.targets.9) $(self.what.9)
: $(self.targets.10) $(self.what.10)
: $(self.targets.11) $(self.what.11)
: $(self.targets.12) $(self.what.12)
: $(self.targets.13) $(self.what.13)
: $(self.targets.14) $(self.what.14)
: $(self.targets.15) $(self.what.15)
: $(self.targets.16) $(self.what.16)
: $(self.targets.17) $(self.what.17) ] ;
if $(self.props.$(i))
{
return [ property.evaluate-conditionals-in-context $(self.props.$(i)) : $(properties) ] ;
}
}
}
rule translate-properties ( properties * : project ? )
{
if $(project) && [ $(project).location ]
{
local location = [ $(project).location ] ;
local m = [ $(project).project-module ] ;
local project-id = [ project.attribute $(m) id ] ;
project-id ?= [ path.root $(location) [ path.pwd ] ] ;
return [ property.translate $(properties)
: $(project-id) : $(location) : $(m) ] ;
}
else
{
return $(properties) ;
}
}
rule check-target-builds ( target message ? : true-properties * :
false-properties * )
{
local instance = [ new check-target-builds-worker $(target) $(message) :
$(true-properties) : $(false-properties) ] ;
local rulename = [ indirect.make check : $(instance) ] ;
return <conditional>@$(rulename)
[ property.evaluate-conditional-relevance
$(true-properties) $(false-properties) ] ;
}
# Usage:
# [ configure.choose "architecture"
# : /config//x86 x86 <architecture>x86
# : /config//mips mips <architecture>mips
# ]
rule choose ( message : * )
{
local instance = [ new configure-choose-worker $(message)
: $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9)
: $(10) : $(11) : $(12) : $(13) : $(14) : $(15) : $(16)
: $(17) : $(18) : $(19) ] ;
local rulename = [ indirect.make check : $(instance) ] ;
return <conditional>@$(rulename)
[ property.evaluate-conditional-relevance
[ $(instance).all-properties ] ] ;
}
IMPORT $(__name__) : check-target-builds : : check-target-builds ;

View File

@@ -0,0 +1,176 @@
# Status: ported.
# Base revision: 64488
#
# Copyright (c) 2010 Vladimir Prus.
#
# Use, modification and distribution is subject to the Boost Software
# License Version 1.0. (See accompanying file LICENSE.txt or
# https://www.bfgroup.xyz/b2/LICENSE.txt)
# This module defines function to help with two main tasks:
#
# - Discovering build-time configuration for the purposes of adjusting
# build process.
# - Reporting what is built, and how it is configured.
import b2.build.property as property
import b2.build.property_set as property_set
from b2.build import targets as targets_
from b2.manager import get_manager
from b2.util.sequence import unique
from b2.util import bjam_signature, value_to_jam, is_iterable
import bjam
import os
__width = 30
def set_width(width):
global __width
__width = 30
__components = []
__built_components = []
__component_logs = {}
__announced_checks = False
__log_file = None
__log_fd = -1
def register_components(components):
"""Declare that the components specified by the parameter exist."""
assert is_iterable(components)
__components.extend(components)
def components_building(components):
"""Declare that the components specified by the parameters will be build."""
assert is_iterable(components)
__built_components.extend(components)
def log_component_configuration(component, message):
"""Report something about component configuration that the user should better know."""
assert isinstance(component, basestring)
assert isinstance(message, basestring)
__component_logs.setdefault(component, []).append(message)
def log_check_result(result):
assert isinstance(result, basestring)
global __announced_checks
if not __announced_checks:
print "Performing configuration checks"
__announced_checks = True
print result
def log_library_search_result(library, result):
assert isinstance(library, basestring)
assert isinstance(result, basestring)
log_check_result((" - %(library)s : %(result)s" % locals()).rjust(__width))
def print_component_configuration():
print "\nComponent configuration:"
for c in __components:
if c in __built_components:
s = "building"
else:
s = "not building"
message = " - %s)" % c
message = message.rjust(__width)
message += " : " + s
for m in __component_logs.get(c, []):
print " -" + m
print ""
__builds_cache = {}
def builds(metatarget_reference, project, ps, what):
# Attempt to build a metatarget named by 'metatarget-reference'
# in context of 'project' with properties 'ps'.
# Returns non-empty value if build is OK.
assert isinstance(metatarget_reference, basestring)
assert isinstance(project, targets_.ProjectTarget)
assert isinstance(ps, property_set.PropertySet)
assert isinstance(what, basestring)
result = []
existing = __builds_cache.get((what, ps), None)
if existing is None:
result = False
__builds_cache[(what, ps)] = False
targets = targets_.generate_from_reference(
metatarget_reference, project, ps).targets()
jam_targets = []
for t in targets:
jam_targets.append(t.actualize())
x = (" - %s" % what).rjust(__width)
if bjam.call("UPDATE_NOW", jam_targets, str(__log_fd), "ignore-minus-n"):
__builds_cache[(what, ps)] = True
result = True
log_check_result("%s: yes" % x)
else:
log_check_result("%s: no" % x)
return result
else:
return existing
def set_log_file(log_file_name):
assert isinstance(log_file_name, basestring)
# Called by Boost.Build startup code to specify name of a file
# that will receive results of configure checks. This
# should never be called by users.
global __log_file, __log_fd
dirname = os.path.dirname(log_file_name)
if not os.path.exists(dirname):
os.makedirs(dirname)
# Make sure to keep the file around, so that it's not
# garbage-collected and closed
__log_file = open(log_file_name, "w")
__log_fd = __log_file.fileno()
# Frontend rules
class CheckTargetBuildsWorker:
def __init__(self, target, true_properties, false_properties):
self.target = target
self.true_properties = property.create_from_strings(true_properties, True)
self.false_properties = property.create_from_strings(false_properties, True)
def check(self, ps):
assert isinstance(ps, property_set.PropertySet)
# FIXME: this should not be hardcoded. Other checks might
# want to consider different set of features as relevant.
toolset = ps.get('toolset')[0]
toolset_version_property = "<toolset-" + toolset + ":version>" ;
relevant = ps.get_properties('target-os') + \
ps.get_properties("toolset") + \
ps.get_properties(toolset_version_property) + \
ps.get_properties("address-model") + \
ps.get_properties("architecture")
rps = property_set.create(relevant)
t = get_manager().targets().current()
p = t.project()
if builds(self.target, p, rps, "%s builds" % self.target):
choosen = self.true_properties
else:
choosen = self.false_properties
return property.evaluate_conditionals_in_context(choosen, ps)
@bjam_signature((["target"], ["true_properties", "*"], ["false_properties", "*"]))
def check_target_builds(target, true_properties, false_properties):
worker = CheckTargetBuildsWorker(target, true_properties, false_properties)
value = value_to_jam(worker.check)
return "<conditional>" + value
get_manager().projects().add_rule("check-target-builds", check_target_builds)

View File

@@ -0,0 +1,246 @@
# Copyright Pedro Ferreira 2005.
# Copyright Vladimir Prus 2007.
# Distributed under the Boost
# Software License, Version 1.0. (See accompanying
# file LICENSE.txt or copy at https://www.bfgroup.xyz/b2/LICENSE.txt)
bjam_interface = __import__('bjam')
import operator
import re
import b2.build.property_set as property_set
from b2.util import set_jam_action, is_iterable
class BjamAction(object):
"""Class representing bjam action defined from Python."""
def __init__(self, action_name, function, has_command=False):
assert isinstance(action_name, basestring)
assert callable(function) or function is None
self.action_name = action_name
self.function = function
self.has_command = has_command
def __call__(self, targets, sources, property_set_):
assert is_iterable(targets)
assert is_iterable(sources)
assert isinstance(property_set_, property_set.PropertySet)
if self.has_command:
# Bjam actions defined from Python have only the command
# to execute, and no associated jam procedural code. So
# passing 'property_set' to it is not necessary.
bjam_interface.call("set-update-action", self.action_name,
targets, sources, [])
if self.function:
self.function(targets, sources, property_set_)
class BjamNativeAction(BjamAction):
"""Class representing bjam action defined by Jam code.
We still allow to associate a Python callable that will
be called when this action is installed on any target.
"""
def __call__(self, targets, sources, property_set_):
assert is_iterable(targets)
assert is_iterable(sources)
assert isinstance(property_set_, property_set.PropertySet)
if self.function:
self.function(targets, sources, property_set_)
p = []
if property_set:
p = property_set_.raw()
set_jam_action(self.action_name, targets, sources, p)
action_modifiers = {"updated": 0x01,
"together": 0x02,
"ignore": 0x04,
"quietly": 0x08,
"piecemeal": 0x10,
"existing": 0x20}
class Engine:
""" The abstract interface to a build engine.
For now, the naming of targets, and special handling of some
target variables like SEARCH and LOCATE make this class coupled
to bjam engine.
"""
def __init__ (self):
self.actions = {}
def add_dependency (self, targets, sources):
"""Adds a dependency from 'targets' to 'sources'
Both 'targets' and 'sources' can be either list
of target names, or a single target name.
"""
if isinstance (targets, str):
targets = [targets]
if isinstance (sources, str):
sources = [sources]
assert is_iterable(targets)
assert is_iterable(sources)
for target in targets:
for source in sources:
self.do_add_dependency (target, source)
def get_target_variable(self, targets, variable):
"""Gets the value of `variable` on set on the first target in `targets`.
Args:
targets (str or list): one or more targets to get the variable from.
variable (str): the name of the variable
Returns:
the value of `variable` set on `targets` (list)
Example:
>>> ENGINE = get_manager().engine()
>>> ENGINE.set_target_variable(targets, 'MY-VAR', 'Hello World')
>>> ENGINE.get_target_variable(targets, 'MY-VAR')
['Hello World']
Equivalent Jam code:
MY-VAR on $(targets) = "Hello World" ;
echo [ on $(targets) return $(MY-VAR) ] ;
"Hello World"
"""
if isinstance(targets, str):
targets = [targets]
assert is_iterable(targets)
assert isinstance(variable, basestring)
return bjam_interface.call('get-target-variable', targets, variable)
def set_target_variable (self, targets, variable, value, append=0):
""" Sets a target variable.
The 'variable' will be available to bjam when it decides
where to generate targets, and will also be available to
updating rule for that 'taret'.
"""
if isinstance (targets, str):
targets = [targets]
if isinstance(value, str):
value = [value]
assert is_iterable(targets)
assert isinstance(variable, basestring)
assert is_iterable(value)
if targets:
if append:
bjam_interface.call("set-target-variable", targets, variable, value, "true")
else:
bjam_interface.call("set-target-variable", targets, variable, value)
def set_update_action (self, action_name, targets, sources, properties=None):
""" Binds a target to the corresponding update action.
If target needs to be updated, the action registered
with action_name will be used.
The 'action_name' must be previously registered by
either 'register_action' or 'register_bjam_action'
method.
"""
if isinstance(targets, str):
targets = [targets]
if isinstance(sources, str):
sources = [sources]
if properties is None:
properties = property_set.empty()
assert isinstance(action_name, basestring)
assert is_iterable(targets)
assert is_iterable(sources)
assert(isinstance(properties, property_set.PropertySet))
self.do_set_update_action (action_name, targets, sources, properties)
def register_action (self, action_name, command='', bound_list = [], flags = [],
function = None):
"""Creates a new build engine action.
Creates on bjam side an action named 'action_name', with
'command' as the command to be executed, 'bound_variables'
naming the list of variables bound when the command is executed
and specified flag.
If 'function' is not None, it should be a callable taking three
parameters:
- targets
- sources
- instance of the property_set class
This function will be called by set_update_action, and can
set additional target variables.
"""
assert isinstance(action_name, basestring)
assert isinstance(command, basestring)
assert is_iterable(bound_list)
assert is_iterable(flags)
assert function is None or callable(function)
bjam_flags = reduce(operator.or_,
(action_modifiers[flag] for flag in flags), 0)
# We allow command to be empty so that we can define 'action' as pure
# python function that would do some conditional logic and then relay
# to other actions.
assert command or function
if command:
bjam_interface.define_action(action_name, command, bound_list, bjam_flags)
self.actions[action_name] = BjamAction(
action_name, function, has_command=bool(command))
def register_bjam_action (self, action_name, function=None):
"""Informs self that 'action_name' is declared in bjam.
From this point, 'action_name' is a valid argument to the
set_update_action method. The action_name should be callable
in the global module of bjam.
"""
# We allow duplicate calls to this rule for the same
# action name. This way, jamfile rules that take action names
# can just register them without specially checking if
# action is already registered.
assert isinstance(action_name, basestring)
assert function is None or callable(function)
if action_name not in self.actions:
self.actions[action_name] = BjamNativeAction(action_name, function)
# Overridables
def do_set_update_action (self, action_name, targets, sources, property_set_):
assert isinstance(action_name, basestring)
assert is_iterable(targets)
assert is_iterable(sources)
assert isinstance(property_set_, property_set.PropertySet)
action = self.actions.get(action_name)
if not action:
raise Exception("No action %s was registered" % action_name)
action(targets, sources, property_set_)
def do_set_target_variable (self, target, variable, value, append):
assert isinstance(target, basestring)
assert isinstance(variable, basestring)
assert is_iterable(value)
assert isinstance(append, int) # matches bools
if append:
bjam_interface.call("set-target-variable", target, variable, value, "true")
else:
bjam_interface.call("set-target-variable", target, variable, value)
def do_add_dependency (self, target, source):
assert isinstance(target, basestring)
assert isinstance(source, basestring)
bjam_interface.call("DEPENDS", target, source)

View File

@@ -0,0 +1,135 @@
# Status: being written afresh by Vladimir Prus
# Copyright 2007 Vladimir Prus
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt)
# This file is supposed to implement error reporting for Boost.Build.
# Experience with jam version has shown that printing full backtrace
# on each error is buffling. Further, for errors printed after parsing --
# during target building, the stacktrace does not even mention what
# target is being built.
# This module implements explicit contexts -- where other code can
# communicate which projects/targets are being built, and error
# messages will show those contexts. For programming errors,
# Python assertions are to be used.
import bjam
import traceback
import sys
def format(message, prefix=""):
parts = str(message).split("\n")
return "\n".join(prefix+p for p in parts)
class Context:
def __init__(self, message, nested=None):
self.message_ = message
self.nested_ = nested
def report(self, indent=""):
print indent + " -", self.message_
if self.nested_:
print indent + " declared at:"
for n in self.nested_:
n.report(indent + " ")
class JamfileContext:
def __init__(self):
raw = bjam.backtrace()
self.raw_ = raw
def report(self, indent=""):
for r in self.raw_:
print indent + " - %s:%s" % (r[0], r[1])
class ExceptionWithUserContext(Exception):
def __init__(self, message, context,
original_exception=None, original_tb=None, stack=None):
Exception.__init__(self, message)
self.context_ = context
self.original_exception_ = original_exception
self.original_tb_ = original_tb
self.stack_ = stack
def report(self):
print "error:", self.args[0]
if self.original_exception_:
print format(str(self.original_exception_), " ")
print
print " error context (most recent first):"
for c in self.context_[::-1]:
c.report()
print
if "--stacktrace" in bjam.variable("ARGV"):
if self.original_tb_:
traceback.print_tb(self.original_tb_)
elif self.stack_:
for l in traceback.format_list(self.stack_):
print l,
else:
print " use the '--stacktrace' option to get Python stacktrace"
print
def user_error_checkpoint(callable):
def wrapper(self, *args):
errors = self.manager().errors()
try:
return callable(self, *args)
except ExceptionWithUserContext, e:
raise
except Exception, e:
errors.handle_stray_exception(e)
finally:
errors.pop_user_context()
return wrapper
class Errors:
def __init__(self):
self.contexts_ = []
self._count = 0
def count(self):
return self._count
def push_user_context(self, message, nested=None):
self.contexts_.append(Context(message, nested))
def pop_user_context(self):
del self.contexts_[-1]
def push_jamfile_context(self):
self.contexts_.append(JamfileContext())
def pop_jamfile_context(self):
del self.contexts_[-1]
def capture_user_context(self):
return self.contexts_[:]
def handle_stray_exception(self, e):
raise ExceptionWithUserContext("unexpected exception", self.contexts_[:],
e, sys.exc_info()[2])
def __call__(self, message):
self._count = self._count + 1
raise ExceptionWithUserContext(message, self.contexts_[:],
stack=traceback.extract_stack())
def nearest_user_location():
"""
Returns:
tuple: the filename and line number of the nearest user location
"""
bt = bjam.backtrace()
if not bt:
return None
last = bt[-1]
return last[0], last[1]

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,914 @@
# Status: ported, except for unit tests.
# Base revision: 64488
#
# Copyright 2001, 2002, 2003 Dave Abrahams
# Copyright 2002, 2006 Rene Rivera
# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt)
import re
from b2.manager import get_manager
from b2.util import utility, bjam_signature, is_iterable_typed
import b2.util.set
from b2.util.utility import add_grist, get_grist, ungrist, replace_grist, to_seq
from b2.exceptions import *
__re_split_subfeatures = re.compile ('<(.*):(.*)>')
__re_no_hyphen = re.compile ('^([^:]+)$')
__re_slash_or_backslash = re.compile (r'[\\/]')
VALID_ATTRIBUTES = {
'implicit',
'composite',
'optional',
'symmetric',
'free',
'incidental',
'path',
'dependency',
'propagated',
'link-incompatible',
'subfeature',
'order-sensitive'
}
class Feature(object):
def __init__(self, name, values, attributes):
assert isinstance(name, basestring)
assert is_iterable_typed(values, basestring)
assert is_iterable_typed(attributes, basestring)
self.name = name
self.values = values
self.default = None
self.subfeatures = []
self.parent = None
self.attributes_string_list = []
self._hash = hash(self.name)
for attr in attributes:
self.attributes_string_list.append(attr)
attr = attr.replace("-", "_")
setattr(self, attr, True)
def add_values(self, values):
assert is_iterable_typed(values, basestring)
self.values.extend(values)
def set_default(self, value):
assert isinstance(value, basestring)
for attr in ('free', 'optional'):
if getattr(self, attr):
get_manager().errors()('"{}" feature "<{}>" cannot have a default value.'
.format(attr, self.name))
self.default = value
def add_subfeature(self, name):
assert isinstance(name, Feature)
self.subfeatures.append(name)
def set_parent(self, feature, value):
assert isinstance(feature, Feature)
assert isinstance(value, basestring)
self.parent = (feature, value)
def __hash__(self):
return self._hash
def __str__(self):
return self.name
def reset ():
""" Clear the module state. This is mainly for testing purposes.
"""
global __all_attributes, __all_features, __implicit_features, __composite_properties
global __subfeature_from_value, __all_top_features, __free_features
global __all_subfeatures
# sets the default value of False for each valid attribute
for attr in VALID_ATTRIBUTES:
setattr(Feature, attr.replace("-", "_"), False)
# A map containing all features. The key is the feature name.
# The value is an instance of Feature class.
__all_features = {}
# All non-subfeatures.
__all_top_features = []
# Maps valus to the corresponding implicit feature
__implicit_features = {}
# A map containing all composite properties. The key is a Property instance,
# and the value is a list of Property instances
__composite_properties = {}
# Maps a value to the corresponding subfeature name.
__subfeature_from_value = {}
# All free features
__free_features = []
__all_subfeatures = []
reset ()
def enumerate ():
""" Returns an iterator to the features map.
"""
return __all_features.iteritems ()
def get(name):
"""Return the Feature instance for the specified name.
Throws if no feature by such name exists
"""
assert isinstance(name, basestring)
return __all_features[name]
# FIXME: prepare-test/finish-test?
@bjam_signature((["name"], ["values", "*"], ["attributes", "*"]))
def feature (name, values, attributes = []):
""" Declares a new feature with the given name, values, and attributes.
name: the feature name
values: a sequence of the allowable values - may be extended later with feature.extend
attributes: a sequence of the feature's attributes (e.g. implicit, free, propagated, ...)
"""
__validate_feature_attributes (name, attributes)
feature = Feature(name, [], attributes)
__all_features[name] = feature
# Temporary measure while we have not fully moved from 'gristed strings'
__all_features["<" + name + ">"] = feature
name = add_grist(name)
if 'subfeature' in attributes:
__all_subfeatures.append(name)
else:
__all_top_features.append(feature)
extend (name, values)
# FIXME: why his is needed.
if 'free' in attributes:
__free_features.append (name)
return feature
@bjam_signature((["feature"], ["value"]))
def set_default (feature, value):
""" Sets the default value of the given feature, overriding any previous default.
feature: the name of the feature
value: the default value to assign
"""
f = __all_features[feature]
bad_attribute = None
if f.free:
bad_attribute = "free"
elif f.optional:
bad_attribute = "optional"
if bad_attribute:
raise InvalidValue ("%s property %s cannot have a default" % (bad_attribute, f.name))
if value not in f.values:
raise InvalidValue ("The specified default value, '%s' is invalid.\n" % value + "allowed values are: %s" % f.values)
f.set_default(value)
def defaults(features):
""" Returns the default property values for the given features.
"""
assert is_iterable_typed(features, Feature)
# FIXME: should merge feature and property modules.
from . import property
result = []
for f in features:
if not f.free and not f.optional and f.default:
result.append(property.Property(f, f.default))
return result
def valid (names):
""" Returns true iff all elements of names are valid features.
"""
if isinstance(names, str):
names = [names]
assert is_iterable_typed(names, basestring)
return all(name in __all_features for name in names)
def attributes (feature):
""" Returns the attributes of the given feature.
"""
assert isinstance(feature, basestring)
return __all_features[feature].attributes_string_list
def values (feature):
""" Return the values of the given feature.
"""
assert isinstance(feature, basestring)
validate_feature (feature)
return __all_features[feature].values
def is_implicit_value (value_string):
""" Returns true iff 'value_string' is a value_string
of an implicit feature.
"""
assert isinstance(value_string, basestring)
if value_string in __implicit_features:
return __implicit_features[value_string]
v = value_string.split('-')
if v[0] not in __implicit_features:
return False
feature = __implicit_features[v[0]]
for subvalue in (v[1:]):
if not __find_implied_subfeature(feature, subvalue, v[0]):
return False
return True
def implied_feature (implicit_value):
""" Returns the implicit feature associated with the given implicit value.
"""
assert isinstance(implicit_value, basestring)
components = implicit_value.split('-')
if components[0] not in __implicit_features:
raise InvalidValue ("'%s' is not a value of an implicit feature" % implicit_value)
return __implicit_features[components[0]]
def __find_implied_subfeature (feature, subvalue, value_string):
assert isinstance(feature, Feature)
assert isinstance(subvalue, basestring)
assert isinstance(value_string, basestring)
try:
return __subfeature_from_value[feature][value_string][subvalue]
except KeyError:
return None
# Given a feature and a value of one of its subfeatures, find the name
# of the subfeature. If value-string is supplied, looks for implied
# subfeatures that are specific to that value of feature
# feature # The main feature name
# subvalue # The value of one of its subfeatures
# value-string # The value of the main feature
def implied_subfeature (feature, subvalue, value_string):
assert isinstance(feature, Feature)
assert isinstance(subvalue, basestring)
assert isinstance(value_string, basestring)
result = __find_implied_subfeature (feature, subvalue, value_string)
if not result:
raise InvalidValue ("'%s' is not a known subfeature value of '%s%s'" % (subvalue, feature, value_string))
return result
def validate_feature (name):
""" Checks if all name is a valid feature. Otherwise, raises an exception.
"""
assert isinstance(name, basestring)
if name not in __all_features:
raise InvalidFeature ("'%s' is not a valid feature name" % name)
else:
return __all_features[name]
# Uses Property
def __expand_subfeatures_aux (property_, dont_validate = False):
""" Helper for expand_subfeatures.
Given a feature and value, or just a value corresponding to an
implicit feature, returns a property set consisting of all component
subfeatures and their values. For example:
expand_subfeatures <toolset>gcc-2.95.2-linux-x86
-> <toolset>gcc <toolset-version>2.95.2 <toolset-os>linux <toolset-cpu>x86
equivalent to:
expand_subfeatures gcc-2.95.2-linux-x86
feature: The name of the feature, or empty if value corresponds to an implicit property
value: The value of the feature.
dont_validate: If True, no validation of value string will be done.
"""
from . import property # no __debug__ since Property is used elsewhere
assert isinstance(property_, property.Property)
assert isinstance(dont_validate, int) # matches bools
f = property_.feature
v = property_.value
if not dont_validate:
validate_value_string(f, v)
components = v.split ("-")
v = components[0]
result = [property.Property(f, components[0])]
subvalues = components[1:]
while len(subvalues) > 0:
subvalue = subvalues [0] # pop the head off of subvalues
subvalues = subvalues [1:]
subfeature = __find_implied_subfeature (f, subvalue, v)
# If no subfeature was found, reconstitute the value string and use that
if not subfeature:
return [property.Property(f, '-'.join(components))]
result.append(property.Property(subfeature, subvalue))
return result
def expand_subfeatures(properties, dont_validate = False):
"""
Make all elements of properties corresponding to implicit features
explicit, and express all subfeature values as separate properties
in their own right. For example, the property
gcc-2.95.2-linux-x86
might expand to
<toolset>gcc <toolset-version>2.95.2 <toolset-os>linux <toolset-cpu>x86
properties: A sequence with elements of the form
<feature>value-string or just value-string in the
case of implicit features.
: dont_validate: If True, no validation of value string will be done.
"""
if __debug__:
from .property import Property
assert is_iterable_typed(properties, Property)
assert isinstance(dont_validate, int) # matches bools
result = []
for p in properties:
# Don't expand subfeatures in subfeatures
if p.feature.subfeature:
result.append (p)
else:
result.extend(__expand_subfeatures_aux (p, dont_validate))
return result
# rule extend was defined as below:
# Can be called three ways:
#
# 1. extend feature : values *
# 2. extend <feature> subfeature : values *
# 3. extend <feature>value-string subfeature : values *
#
# * Form 1 adds the given values to the given feature
# * Forms 2 and 3 add subfeature values to the given feature
# * Form 3 adds the subfeature values as specific to the given
# property value-string.
#
#rule extend ( feature-or-property subfeature ? : values * )
#
# Now, the specific rule must be called, depending on the desired operation:
# extend_feature
# extend_subfeature
@bjam_signature([['name'], ['values', '*']])
def extend (name, values):
""" Adds the given values to the given feature.
"""
assert isinstance(name, basestring)
assert is_iterable_typed(values, basestring)
name = add_grist (name)
__validate_feature (name)
feature = __all_features [name]
if feature.implicit:
for v in values:
if v in __implicit_features:
raise BaseException ("'%s' is already associated with the feature '%s'" % (v, __implicit_features [v]))
__implicit_features[v] = feature
if values and not feature.values and not(feature.free or feature.optional):
# This is the first value specified for this feature,
# take it as default value
feature.set_default(values[0])
feature.add_values(values)
def validate_value_string (f, value_string):
""" Checks that value-string is a valid value-string for the given feature.
"""
assert isinstance(f, Feature)
assert isinstance(value_string, basestring)
if f.free or value_string in f.values:
return
values = [value_string]
if f.subfeatures:
if not value_string in f.values and \
not value_string in f.subfeatures:
values = value_string.split('-')
# An empty value is allowed for optional features
if not values[0] in f.values and \
(values[0] or not f.optional):
raise InvalidValue ("'%s' is not a known value of feature '%s'\nlegal values: '%s'" % (values [0], f.name, f.values))
for v in values [1:]:
# this will validate any subfeature values in value-string
implied_subfeature(f, v, values[0])
""" Extends the given subfeature with the subvalues. If the optional
value-string is provided, the subvalues are only valid for the given
value of the feature. Thus, you could say that
<target-platform>mingw is specific to <toolset>gcc-2.95.2 as follows:
extend-subfeature toolset gcc-2.95.2 : target-platform : mingw ;
feature: The feature whose subfeature is being extended.
value-string: If supplied, specifies a specific value of the
main feature for which the new subfeature values
are valid.
subfeature: The name of the subfeature.
subvalues: The additional values of the subfeature being defined.
"""
def extend_subfeature (feature_name, value_string, subfeature_name, subvalues):
assert isinstance(feature_name, basestring)
assert isinstance(value_string, basestring)
assert isinstance(subfeature_name, basestring)
assert is_iterable_typed(subvalues, basestring)
feature = validate_feature(feature_name)
if value_string:
validate_value_string(feature, value_string)
subfeature_name = feature_name + '-' + __get_subfeature_name (subfeature_name, value_string)
extend(subfeature_name, subvalues) ;
subfeature = __all_features[subfeature_name]
if value_string == None: value_string = ''
if feature not in __subfeature_from_value:
__subfeature_from_value[feature] = {}
if value_string not in __subfeature_from_value[feature]:
__subfeature_from_value[feature][value_string] = {}
for subvalue in subvalues:
__subfeature_from_value [feature][value_string][subvalue] = subfeature
@bjam_signature((["feature_name", "value_string", "?"], ["subfeature"],
["subvalues", "*"], ["attributes", "*"]))
def subfeature (feature_name, value_string, subfeature, subvalues, attributes = []):
""" Declares a subfeature.
feature_name: Root feature that is not a subfeature.
value_string: An optional value-string specifying which feature or
subfeature values this subfeature is specific to,
if any.
subfeature: The name of the subfeature being declared.
subvalues: The allowed values of this subfeature.
attributes: The attributes of the subfeature.
"""
parent_feature = validate_feature (feature_name)
# Add grist to the subfeature name if a value-string was supplied
subfeature_name = __get_subfeature_name (subfeature, value_string)
if subfeature_name in __all_features[feature_name].subfeatures:
message = "'%s' already declared as a subfeature of '%s'" % (subfeature, feature_name)
message += " specific to '%s'" % value_string
raise BaseException (message)
# First declare the subfeature as a feature in its own right
f = feature (feature_name + '-' + subfeature_name, subvalues, attributes + ['subfeature'])
f.set_parent(parent_feature, value_string)
parent_feature.add_subfeature(f)
# Now make sure the subfeature values are known.
extend_subfeature (feature_name, value_string, subfeature, subvalues)
@bjam_signature((["composite_property_s"], ["component_properties_s", "*"]))
def compose (composite_property_s, component_properties_s):
""" Sets the components of the given composite property.
All parameters are <feature>value strings
"""
from . import property
component_properties_s = to_seq (component_properties_s)
composite_property = property.create_from_string(composite_property_s)
f = composite_property.feature
if len(component_properties_s) > 0 and isinstance(component_properties_s[0], property.Property):
component_properties = component_properties_s
else:
component_properties = [property.create_from_string(p) for p in component_properties_s]
if not f.composite:
raise BaseException ("'%s' is not a composite feature" % f)
if property in __composite_properties:
raise BaseException ('components of "%s" already set: %s' % (composite_property, str (__composite_properties[composite_property])))
if composite_property in component_properties:
raise BaseException ('composite property "%s" cannot have itself as a component' % composite_property)
__composite_properties[composite_property] = component_properties
def expand_composite(property_):
if __debug__:
from .property import Property
assert isinstance(property_, Property)
result = [ property_ ]
if property_ in __composite_properties:
for p in __composite_properties[property_]:
result.extend(expand_composite(p))
return result
@bjam_signature((['feature'], ['properties', '*']))
def get_values (feature, properties):
""" Returns all values of the given feature specified by the given property set.
"""
if feature[0] != '<':
feature = '<' + feature + '>'
result = []
for p in properties:
if get_grist (p) == feature:
result.append (replace_grist (p, ''))
return result
def free_features ():
""" Returns all free features.
"""
return __free_features
def expand_composites (properties):
""" Expand all composite properties in the set so that all components
are explicitly expressed.
"""
if __debug__:
from .property import Property
assert is_iterable_typed(properties, Property)
explicit_features = set(p.feature for p in properties)
result = []
# now expand composite features
for p in properties:
expanded = expand_composite(p)
for x in expanded:
if not x in result:
f = x.feature
if f.free:
result.append (x)
elif not x in properties: # x is the result of expansion
if not f in explicit_features: # not explicitly-specified
if any(r.feature == f for r in result):
raise FeatureConflict(
"expansions of composite features result in "
"conflicting values for '%s'\nvalues: '%s'\none contributing composite property was '%s'" %
(f.name, [r.value for r in result if r.feature == f] + [x.value], p))
else:
result.append (x)
elif any(r.feature == f for r in result):
raise FeatureConflict ("explicitly-specified values of non-free feature '%s' conflict\n"
"existing values: '%s'\nvalue from expanding '%s': '%s'" % (f,
[r.value for r in result if r.feature == f], p, x.value))
else:
result.append (x)
return result
# Uses Property
def is_subfeature_of (parent_property, f):
""" Return true iff f is an ordinary subfeature of the parent_property's
feature, or if f is a subfeature of the parent_property's feature
specific to the parent_property's value.
"""
if __debug__:
from .property import Property
assert isinstance(parent_property, Property)
assert isinstance(f, Feature)
if not f.subfeature:
return False
p = f.parent
if not p:
return False
parent_feature = p[0]
parent_value = p[1]
if parent_feature != parent_property.feature:
return False
if parent_value and parent_value != parent_property.value:
return False
return True
def __is_subproperty_of (parent_property, p):
""" As is_subfeature_of, for subproperties.
"""
if __debug__:
from .property import Property
assert isinstance(parent_property, Property)
assert isinstance(p, Property)
return is_subfeature_of (parent_property, p.feature)
# Returns true iff the subvalue is valid for the feature. When the
# optional value-string is provided, returns true iff the subvalues
# are valid for the given value of the feature.
def is_subvalue(feature, value_string, subfeature, subvalue):
assert isinstance(feature, basestring)
assert isinstance(value_string, basestring)
assert isinstance(subfeature, basestring)
assert isinstance(subvalue, basestring)
if not value_string:
value_string = ''
try:
return __subfeature_from_value[feature][value_string][subvalue] == subfeature
except KeyError:
return False
# Uses Property
def expand (properties):
""" Given a property set which may consist of composite and implicit
properties and combined subfeature values, returns an expanded,
normalized property set with all implicit features expressed
explicitly, all subfeature values individually expressed, and all
components of composite properties expanded. Non-free features
directly expressed in the input properties cause any values of
those features due to composite feature expansion to be dropped. If
two values of a given non-free feature are directly expressed in the
input, an error is issued.
"""
if __debug__:
from .property import Property
assert is_iterable_typed(properties, Property)
expanded = expand_subfeatures(properties)
return expand_composites (expanded)
# Accepts list of Property objects
def add_defaults (properties):
""" Given a set of properties, add default values for features not
represented in the set.
Note: if there's there's ordinary feature F1 and composite feature
F2, which includes some value for F1, and both feature have default values,
then the default value of F1 will be added, not the value in F2. This might
not be right idea: consider
feature variant : debug ... ;
<variant>debug : .... <runtime-debugging>on
feature <runtime-debugging> : off on ;
Here, when adding default for an empty property set, we'll get
<variant>debug <runtime_debugging>off
and that's kind of strange.
"""
if __debug__:
from .property import Property
assert is_iterable_typed(properties, Property)
# create a copy since properties will be modified
result = list(properties)
# We don't add default for conditional properties. We don't want
# <variant>debug:<define>DEBUG to be takes as specified value for <variant>
handled_features = set(p.feature for p in properties if not p.condition)
missing_top = [f for f in __all_top_features if not f in handled_features]
more = defaults(missing_top)
result.extend(more)
handled_features.update(p.feature for p in more)
# Add defaults for subfeatures of features which are present
for p in result[:]:
subfeatures = [s for s in p.feature.subfeatures if not s in handled_features]
more = defaults(__select_subfeatures(p, subfeatures))
handled_features.update(h.feature for h in more)
result.extend(more)
return result
def minimize (properties):
""" Given an expanded property set, eliminate all redundancy: properties
which are elements of other (composite) properties in the set will
be eliminated. Non-symmetric properties equal to default values will be
eliminated, unless the override a value from some composite property.
Implicit properties will be expressed without feature
grist, and sub-property values will be expressed as elements joined
to the corresponding main property.
"""
if __debug__:
from .property import Property
assert is_iterable_typed(properties, Property)
# remove properties implied by composite features
components = []
component_features = set()
for property in properties:
if property in __composite_properties:
cs = __composite_properties[property]
components.extend(cs)
component_features.update(c.feature for c in cs)
properties = b2.util.set.difference (properties, components)
# handle subfeatures and implicit features
# move subfeatures to the end of the list
properties = [p for p in properties if not p.feature.subfeature] +\
[p for p in properties if p.feature.subfeature]
result = []
while properties:
p = properties[0]
f = p.feature
# locate all subproperties of $(x[1]) in the property set
subproperties = [x for x in properties if is_subfeature_of(p, x.feature)]
if subproperties:
# reconstitute the joined property name
subproperties.sort ()
joined = b2.build.property.Property(p.feature, p.value + '-' + '-'.join ([sp.value for sp in subproperties]))
result.append(joined)
properties = b2.util.set.difference(properties[1:], subproperties)
else:
# eliminate properties whose value is equal to feature's
# default and which are not symmetric and which do not
# contradict values implied by composite properties.
# since all component properties of composites in the set
# have been eliminated, any remaining property whose
# feature is the same as a component of a composite in the
# set must have a non-redundant value.
if p.value != f.default or f.symmetric or f in component_features:
result.append (p)
properties = properties[1:]
return result
def split (properties):
""" Given a property-set of the form
v1/v2/...vN-1/<fN>vN/<fN+1>vN+1/...<fM>vM
Returns
v1 v2 ... vN-1 <fN>vN <fN+1>vN+1 ... <fM>vM
Note that vN...vM may contain slashes. This is resilient to the
substitution of backslashes for slashes, since Jam, unbidden,
sometimes swaps slash direction on NT.
"""
assert isinstance(properties, basestring)
def split_one (properties):
pieces = re.split (__re_slash_or_backslash, properties)
result = []
for x in pieces:
if not get_grist (x) and len (result) > 0 and get_grist (result [-1]):
result = result [0:-1] + [ result [-1] + '/' + x ]
else:
result.append (x)
return result
if isinstance (properties, str):
return split_one (properties)
result = []
for p in properties:
result += split_one (p)
return result
def compress_subproperties (properties):
""" Combine all subproperties into their parent properties
Requires: for every subproperty, there is a parent property. All
features are explicitly expressed.
This rule probably shouldn't be needed, but
build-request.expand-no-defaults is being abused for unintended
purposes and it needs help
"""
from .property import Property
assert is_iterable_typed(properties, Property)
result = []
matched_subs = set()
all_subs = set()
for p in properties:
f = p.feature
if not f.subfeature:
subs = [x for x in properties if is_subfeature_of(p, x.feature)]
if subs:
matched_subs.update(subs)
subvalues = '-'.join (sub.value for sub in subs)
result.append(Property(
p.feature, p.value + '-' + subvalues,
p.condition))
else:
result.append(p)
else:
all_subs.add(p)
# TODO: this variables are used just for debugging. What's the overhead?
assert all_subs == matched_subs
return result
######################################################################################
# Private methods
def __select_subproperties (parent_property, properties):
if __debug__:
from .property import Property
assert is_iterable_typed(properties, Property)
assert isinstance(parent_property, Property)
return [ x for x in properties if __is_subproperty_of (parent_property, x) ]
def __get_subfeature_name (subfeature, value_string):
assert isinstance(subfeature, basestring)
assert isinstance(value_string, basestring) or value_string is None
if value_string == None:
prefix = ''
else:
prefix = value_string + ':'
return prefix + subfeature
def __validate_feature_attributes (name, attributes):
assert isinstance(name, basestring)
assert is_iterable_typed(attributes, basestring)
for attribute in attributes:
if attribute not in VALID_ATTRIBUTES:
raise InvalidAttribute ("unknown attributes: '%s' in feature declaration: '%s'" % (str (b2.util.set.difference (attributes, __all_attributes)), name))
if name in __all_features:
raise AlreadyDefined ("feature '%s' already defined" % name)
elif 'implicit' in attributes and 'free' in attributes:
raise InvalidAttribute ("free features cannot also be implicit (in declaration of feature '%s')" % name)
elif 'free' in attributes and 'propagated' in attributes:
raise InvalidAttribute ("free features cannot also be propagated (in declaration of feature '%s')" % name)
def __validate_feature (feature):
""" Generates an error if the feature is unknown.
"""
assert isinstance(feature, basestring)
if feature not in __all_features:
raise BaseException ('unknown feature "%s"' % feature)
def __select_subfeatures (parent_property, features):
""" Given a property, return the subset of features consisting of all
ordinary subfeatures of the property's feature, and all specific
subfeatures of the property's feature which are conditional on the
property's value.
"""
if __debug__:
from .property import Property
assert isinstance(parent_property, Property)
assert is_iterable_typed(features, Feature)
return [f for f in features if is_subfeature_of (parent_property, f)]
# FIXME: copy over tests.

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,604 @@
# Copyright 2003 Dave Abrahams
# Copyright 2003, 2004, 2005, 2006 Vladimir Prus
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE.txt or copy at
# https://www.bfgroup.xyz/b2/LICENSE.txt)
import "class" : new ;
import feature ;
import indirect ;
import path ;
import project ;
import property ;
import sequence ;
import set ;
import option ;
# Class for storing a set of properties.
#
# There is 1<->1 correspondence between identity and value. No two instances
# of the class are equal. To maintain this property, the 'property-set.create'
# rule should be used to create new instances. Instances are immutable.
#
# Each property is classified with regard to its effect on build results.
# Incidental properties have no effect on build results, from B2's
# point of view. Others are either free, or non-free and we refer to non-free
# ones as 'base'. Each property belongs to exactly one of those categories.
#
# It is possible to get a list of properties belonging to each category as
# well as a list of properties with a specific attribute.
#
# Several operations, like and refine and as-path are provided. They all use
# caching whenever possible.
#
class property-set
{
import errors ;
import feature ;
import modules ;
import path ;
import property ;
import property-set ;
import set ;
rule __init__ ( raw-properties * )
{
self.raw = $(raw-properties) ;
for local p in $(raw-properties)
{
if ! $(p:G)
{
errors.error "Invalid property: '$(p)'" ;
}
}
}
# Returns Jam list of stored properties.
#
rule raw ( )
{
return $(self.raw) ;
}
rule str ( )
{
return "[" $(self.raw) "]" ;
}
# Returns properties that are neither incidental nor free.
#
rule base ( )
{
if ! $(self.base-initialized)
{
init-base ;
}
return $(self.base) ;
}
# Returns free properties which are not incidental.
#
rule free ( )
{
if ! $(self.base-initialized)
{
init-base ;
}
return $(self.free) ;
}
# Returns relevant base properties. This is used for computing
# target paths, so it must return the expanded set of relevant
# properties.
#
rule base-relevant ( )
{
if ! $(self.relevant-initialized)
{
init-relevant ;
}
return $(self.base-relevant) ;
}
# Returns all properties marked as relevant by features-ps
# Does not attempt to expand features-ps in any way, as
# this matches what virtual-target.register needs.
#
rule relevant ( features-ps )
{
if ! $(self.relevant.$(features-ps))
{
local result ;
local features = [ $(features-ps).get <relevant> ] ;
features = <$(features)> ;
local ignore-relevance = [ modules.peek
property-set : .ignore-relevance ] ;
for local p in $(self.raw)
{
if $(ignore-relevance) || $(p:G) in $(features)
{
local att = [ feature.attributes $(p:G) ] ;
if ! ( incidental in $(att) )
{
result += $(p) ;
}
}
}
self.relevant.$(features-ps) = [ property-set.create $(result) ] ;
}
return $(self.relevant.$(features-ps)) ;
}
# Returns dependency properties.
#
rule dependency ( )
{
if ! $(self.dependency-initialized)
{
init-dependency ;
}
return $(self.dependency) ;
}
rule non-dependency ( )
{
if ! $(self.dependency-initialized)
{
init-dependency ;
}
return $(self.non-dependency) ;
}
rule conditional ( )
{
if ! $(self.conditional-initialized)
{
init-conditional ;
}
return $(self.conditional) ;
}
rule non-conditional ( )
{
if ! $(self.conditional-initialized)
{
init-conditional ;
}
return $(self.non-conditional) ;
}
# Returns incidental properties.
#
rule incidental ( )
{
if ! $(self.base-initialized)
{
init-base ;
}
return $(self.incidental) ;
}
rule refine ( ps )
{
if ! $(self.refined.$(ps))
{
local r = [ property.refine $(self.raw) : [ $(ps).raw ] ] ;
if $(r[1]) != "@error"
{
self.refined.$(ps) = [ property-set.create $(r) ] ;
}
else
{
self.refined.$(ps) = $(r) ;
}
}
return $(self.refined.$(ps)) ;
}
rule expand ( )
{
if ! $(self.expanded)
{
self.expanded = [ property-set.create [ feature.expand $(self.raw) ]
] ;
}
return $(self.expanded) ;
}
rule expand-composites ( )
{
if ! $(self.composites)
{
self.composites = [ property-set.create
[ feature.expand-composites $(self.raw) ] ] ;
}
return $(self.composites) ;
}
rule evaluate-conditionals ( context ? )
{
context ?= $(__name__) ;
if ! $(self.evaluated.$(context))
{
self.evaluated.$(context) = [ property-set.create
[ property.evaluate-conditionals-in-context $(self.raw) : [
$(context).raw ] ] ] ;
}
return $(self.evaluated.$(context)) ;
}
rule propagated ( )
{
if ! $(self.propagated-ps)
{
local result ;
for local p in $(self.raw)
{
if propagated in [ feature.attributes $(p:G) ]
{
result += $(p) ;
}
}
self.propagated-ps = [ property-set.create $(result) ] ;
}
return $(self.propagated-ps) ;
}
rule add-defaults ( )
{
if ! $(self.defaults)
{
self.defaults = [ property-set.create
[ feature.add-defaults $(self.raw) ] ] ;
}
return $(self.defaults) ;
}
rule as-path ( )
{
if ! $(self.as-path)
{
self.as-path = [ property.as-path [ base-relevant ] ] ;
}
return $(self.as-path) ;
}
# Computes the path to be used for a target with the given properties.
# Returns a list of
# - the computed path
# - if the path is relative to the build directory, a value of 'true'.
#
rule target-path ( )
{
if ! $(self.target-path)
{
# The <location> feature can be used to explicitly change the
# location of generated targets.
local l = [ get <location> ] ;
if $(l)
{
self.target-path = $(l) ;
}
else
{
local p = [ property-set.hash-maybe [ as-path ] ] ;
# A real ugly hack. Boost regression test system requires
# specific target paths, and it seems that changing it to handle
# other directory layout is really hard. For that reason, we
# teach V2 to do the things regression system requires. The
# value of '<location-prefix>' is prepended to the path.
local prefix = [ get <location-prefix> ] ;
if $(prefix)
{
self.target-path = [ path.join $(prefix) $(p) ] ;
}
else
{
self.target-path = $(p) ;
}
if ! $(self.target-path)
{
self.target-path = . ;
}
# The path is relative to build dir.
self.target-path += true ;
}
}
return $(self.target-path) ;
}
rule add ( ps )
{
if ! $(self.added.$(ps))
{
self.added.$(ps) = [ property-set.create $(self.raw) [ $(ps).raw ] ]
;
}
return $(self.added.$(ps)) ;
}
rule add-raw ( properties * )
{
return [ add [ property-set.create $(properties) ] ] ;
}
# Returns all values of 'feature'.
#
rule get ( feature )
{
if ! $(self.map-built)
{
# For each feature, create a member var and assign all values to it.
# Since all regular member vars start with 'self', there will be no
# conflicts between names.
self.map-built = true ;
for local v in $(self.raw)
{
$(v:G) += $(v:G=) ;
}
}
return $($(feature)) ;
}
# Returns true if the property-set contains all the
# specified properties.
#
rule contains-raw ( properties * )
{
if $(properties) in $(self.raw)
{
return true ;
}
}
# Returns true if the property-set has values for
# all the specified features
#
rule contains-features ( features * )
{
if $(features) in $(self.raw:G)
{
return true ;
}
}
# private
rule init-base ( )
{
for local p in $(self.raw)
{
local att = [ feature.attributes $(p:G) ] ;
# A feature can be both incidental and free, in which case we add it
# to incidental.
if incidental in $(att)
{
self.incidental += $(p) ;
}
else if free in $(att)
{
self.free += $(p) ;
}
else
{
self.base += $(p) ;
}
}
self.base-initialized = true ;
}
rule init-relevant ( )
{
local relevant-features = [ get <relevant> ] ;
relevant-features = [ feature.expand-relevant $(relevant-features) ] ;
relevant-features = <$(relevant-features)> ;
ignore-relevance = [ modules.peek property-set : .ignore-relevance ] ;
for local p in $(self.raw)
{
if $(ignore-relevance) || $(p:G) in $(relevant-features)
{
local att = [ feature.attributes $(p:G) ] ;
if ! ( incidental in $(att) )
{
self.relevant += $(p) ;
if ! ( free in $(att) )
{
self.base-relevant += $(p) ;
}
}
}
}
self.relevant-initialized = true ;
}
rule init-dependency ( )
{
for local p in $(self.raw)
{
if dependency in [ feature.attributes $(p:G) ]
{
self.dependency += $(p) ;
}
else
{
self.non-dependency += $(p) ;
}
}
self.dependency-initialized = true ;
}
rule init-conditional ( )
{
for local p in $(self.raw)
{
# TODO: Note that non-conditional properties may contain colon (':')
# characters as well, e.g. free or indirect properties. Indirect
# properties for example contain a full Jamfile path in their value
# which on Windows file systems contains ':' as the drive separator.
if ( [ MATCH "(:)" : $(p:G=) ] && ! ( free in [ feature.attributes $(p:G) ] ) ) || $(p:G) = <conditional>
{
self.conditional += $(p) ;
}
else
{
self.non-conditional += $(p) ;
}
}
self.conditional-initialized = true ;
}
}
# This is a temporary measure to help users work around
# any problems. Remove it once we've verified that
# everything works.
if --ignore-relevance in [ modules.peek : ARGV ]
{
.ignore-relevance = true ;
}
# Creates a new 'property-set' instance for the given raw properties or returns
# an already existing ones.
#
rule create ( raw-properties * )
{
raw-properties = [ sequence.unique
[ sequence.insertion-sort $(raw-properties) ] ] ;
local key = $(raw-properties:J=-:E=) ;
if ! $(.ps.$(key))
{
.ps.$(key) = [ new property-set $(raw-properties) ] ;
}
return $(.ps.$(key)) ;
}
NATIVE_RULE property-set : create ;
if [ HAS_NATIVE_RULE class@property-set : get : 1 ]
{
NATIVE_RULE class@property-set : get ;
}
if [ HAS_NATIVE_RULE class@property-set : contains-features : 1 ]
{
NATIVE_RULE class@property-set : contains-features ;
}
# Creates a new 'property-set' instance after checking that all properties are
# valid and converting implicit properties into gristed form.
#
rule create-with-validation ( raw-properties * )
{
property.validate $(raw-properties) ;
return [ create [ property.make $(raw-properties) ] ] ;
}
# Creates a property-set from the input given by the user, in the context of
# 'jamfile-module' at 'location'.
#
rule create-from-user-input ( raw-properties * : jamfile-module location )
{
local project-id = [ project.attribute $(jamfile-module) id ] ;
project-id ?= [ path.root $(location) [ path.pwd ] ] ;
return [ property-set.create [ property.translate $(raw-properties)
: $(project-id) : $(location) : $(jamfile-module) ] ] ;
}
# Refines requirements with requirements provided by the user. Specially handles
# "-<property>value" syntax in specification to remove given requirements.
# - parent-requirements -- property-set object with requirements to refine.
# - specification -- string list of requirements provided by the user.
# - project-module -- module to which context indirect features will be
# bound.
# - location -- path to which path features are relative.
#
rule refine-from-user-input ( parent-requirements : specification * :
project-module : location )
{
if ! $(specification)
{
return $(parent-requirements) ;
}
else
{
local add-requirements ;
local remove-requirements ;
for local r in $(specification)
{
local m = [ MATCH "^-(.*)" : $(r) ] ;
if $(m)
{
remove-requirements += $(m) ;
}
else
{
add-requirements += $(r) ;
}
}
if $(remove-requirements)
{
# Need to create a property set, so that path features and indirect
# features are translated just like they are in project
# requirements.
local ps = [ property-set.create-from-user-input
$(remove-requirements) : $(project-module) $(location) ] ;
parent-requirements = [ property-set.create
[ set.difference
[ indirect.difference
[ $(parent-requirements).raw ] : [ $(ps).raw ] ]
: [ $(ps).raw ]
] ] ;
specification = $(add-requirements) ;
}
local requirements = [ property-set.create-from-user-input
$(specification) : $(project-module) $(location) ] ;
return [ $(parent-requirements).refine $(requirements) ] ;
}
}
# Returns a property-set with an empty set of properties.
#
rule empty ( )
{
if ! $(.empty)
{
.empty = [ create ] ;
}
return $(.empty) ;
}
if [ option.get hash : : yes ] = yes
{
rule hash-maybe ( path ? )
{
path ?= "" ;
return [ MD5 $(path) ] ;
}
}
else
{
rule hash-maybe ( path ? )
{
return $(path) ;
}
}
rule __test__ ( )
{
import errors : try catch ;
try ;
create invalid-property ;
catch "Invalid property: 'invalid-property'" ;
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,750 @@
# Status: ported, except for tests.
# Base revision: 64070
#
# Copyright 2001, 2002, 2003 Dave Abrahams
# Copyright 2006 Rene Rivera
# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt)
import re
import sys
from functools import total_ordering
from b2.util.utility import *
from b2.build import feature
from b2.util import sequence, qualify_jam_action, is_iterable_typed
import b2.util.set
from b2.manager import get_manager
__re_two_ampersands = re.compile ('&&')
__re_comma = re.compile (',')
__re_split_condition = re.compile ('(.*):(<.*)')
__re_split_conditional = re.compile (r'(.+):<(.+)')
__re_colon = re.compile (':')
__re_has_condition = re.compile (r':<')
__re_separate_condition_and_property = re.compile (r'(.*):(<.*)')
_not_applicable_feature='not-applicable-in-this-context'
feature.feature(_not_applicable_feature, [], ['free'])
__abbreviated_paths = False
class PropertyMeta(type):
"""
This class exists to implement the isinstance() and issubclass()
hooks for the Property class. Since we've introduce the concept of
a LazyProperty, isinstance(p, Property) will fail when p is a LazyProperty.
Implementing both __instancecheck__ and __subclasscheck__ will allow
LazyProperty instances to pass the isinstance() and issubclass check for
the Property class.
Additionally, the __call__ method intercepts the call to the Property
constructor to ensure that calling Property with the same arguments
will always return the same Property instance.
"""
_registry = {}
current_id = 1
def __call__(mcs, f, value, condition=None):
"""
This intercepts the call to the Property() constructor.
This exists so that the same arguments will always return the same Property
instance. This allows us to give each instance a unique ID.
"""
from b2.build.feature import Feature
if not isinstance(f, Feature):
f = feature.get(f)
if condition is None:
condition = []
key = (f, value) + tuple(sorted(condition))
if key not in mcs._registry:
instance = super(PropertyMeta, mcs).__call__(f, value, condition)
mcs._registry[key] = instance
return mcs._registry[key]
@staticmethod
def check(obj):
return (hasattr(obj, 'feature') and
hasattr(obj, 'value') and
hasattr(obj, 'condition'))
def __instancecheck__(self, instance):
return self.check(instance)
def __subclasscheck__(self, subclass):
return self.check(subclass)
@total_ordering
class Property(object):
__slots__ = ('feature', 'value', 'condition', '_to_raw', '_hash', 'id')
__metaclass__ = PropertyMeta
def __init__(self, f, value, condition=None):
assert(f.free or ':' not in value)
if condition is None:
condition = []
self.feature = f
self.value = value
self.condition = condition
self._hash = hash((self.feature, self.value) + tuple(sorted(self.condition)))
self.id = PropertyMeta.current_id
# increment the id counter.
# this allows us to take a list of Property
# instances and use their unique integer ID
# to create a key for PropertySet caching. This is
# much faster than string comparison.
PropertyMeta.current_id += 1
condition_str = ''
if condition:
condition_str = ",".join(str(p) for p in self.condition) + ':'
self._to_raw = '{}<{}>{}'.format(condition_str, f.name, value)
def to_raw(self):
return self._to_raw
def __str__(self):
return self._to_raw
def __hash__(self):
return self._hash
def __eq__(self, other):
return self._hash == other._hash
def __lt__(self, other):
return (self.feature.name, self.value) < (other.feature.name, other.value)
@total_ordering
class LazyProperty(object):
def __init__(self, feature_name, value, condition=None):
if condition is None:
condition = []
self.__property = Property(
feature.get(_not_applicable_feature), feature_name + value, condition=condition)
self.__name = feature_name
self.__value = value
self.__condition = condition
self.__feature = None
def __getattr__(self, item):
if self.__feature is None:
try:
self.__feature = feature.get(self.__name)
self.__property = Property(self.__feature, self.__value, self.__condition)
except KeyError:
pass
return getattr(self.__property, item)
def __hash__(self):
return hash(self.__property)
def __str__(self):
return self.__property._to_raw
def __eq__(self, other):
return self.__property == other
def __lt__(self, other):
return (self.feature.name, self.value) < (other.feature.name, other.value)
def create_from_string(s, allow_condition=False,allow_missing_value=False):
assert isinstance(s, basestring)
assert isinstance(allow_condition, bool)
assert isinstance(allow_missing_value, bool)
condition = []
import types
if not isinstance(s, types.StringType):
print type(s)
if __re_has_condition.search(s):
if not allow_condition:
raise BaseException("Conditional property is not allowed in this context")
m = __re_separate_condition_and_property.match(s)
condition = m.group(1)
s = m.group(2)
# FIXME: break dependency cycle
from b2.manager import get_manager
if condition:
condition = [create_from_string(x) for x in condition.split(',')]
feature_name = get_grist(s)
if not feature_name:
if feature.is_implicit_value(s):
f = feature.implied_feature(s)
value = s
p = Property(f, value, condition=condition)
else:
raise get_manager().errors()("Invalid property '%s' -- unknown feature" % s)
else:
value = get_value(s)
if not value and not allow_missing_value:
get_manager().errors()("Invalid property '%s' -- no value specified" % s)
if feature.valid(feature_name):
p = Property(feature.get(feature_name), value, condition=condition)
else:
# In case feature name is not known, it is wrong to do a hard error.
# Feature sets change depending on the toolset. So e.g.
# <toolset-X:version> is an unknown feature when using toolset Y.
#
# Ideally we would like to ignore this value, but most of
# Boost.Build code expects that we return a valid Property. For this
# reason we use a sentinel <not-applicable-in-this-context> feature.
#
# The underlying cause for this problem is that python port Property
# is more strict than its Jam counterpart and must always reference
# a valid feature.
p = LazyProperty(feature_name, value, condition=condition)
return p
def create_from_strings(string_list, allow_condition=False):
assert is_iterable_typed(string_list, basestring)
return [create_from_string(s, allow_condition) for s in string_list]
def reset ():
""" Clear the module state. This is mainly for testing purposes.
"""
global __results
# A cache of results from as_path
__results = {}
reset ()
def set_abbreviated_paths(on=True):
global __abbreviated_paths
if on == 'off':
on = False
on = bool(on)
__abbreviated_paths = on
def get_abbreviated_paths():
return __abbreviated_paths or '--abbreviated-paths' in sys.argv
def path_order (x, y):
""" Helper for as_path, below. Orders properties with the implicit ones
first, and within the two sections in alphabetical order of feature
name.
"""
if x == y:
return 0
xg = get_grist (x)
yg = get_grist (y)
if yg and not xg:
return -1
elif xg and not yg:
return 1
else:
if not xg:
x = feature.expand_subfeatures([x])
y = feature.expand_subfeatures([y])
if x < y:
return -1
elif x > y:
return 1
else:
return 0
def identify(string):
return string
# Uses Property
def refine (properties, requirements):
""" Refines 'properties' by overriding any non-free properties
for which a different value is specified in 'requirements'.
Conditional requirements are just added without modification.
Returns the resulting list of properties.
"""
assert is_iterable_typed(properties, Property)
assert is_iterable_typed(requirements, Property)
# The result has no duplicates, so we store it in a set
result = set()
# Records all requirements.
required = {}
# All the elements of requirements should be present in the result
# Record them so that we can handle 'properties'.
for r in requirements:
# Don't consider conditional requirements.
if not r.condition:
required[r.feature] = r
for p in properties:
# Skip conditional properties
if p.condition:
result.add(p)
# No processing for free properties
elif p.feature.free:
result.add(p)
else:
if p.feature in required:
result.add(required[p.feature])
else:
result.add(p)
return sequence.unique(list(result) + requirements)
def translate_paths (properties, path):
""" Interpret all path properties in 'properties' as relative to 'path'
The property values are assumed to be in system-specific form, and
will be translated into normalized form.
"""
assert is_iterable_typed(properties, Property)
result = []
for p in properties:
if p.feature.path:
values = __re_two_ampersands.split(p.value)
new_value = "&&".join(os.path.normpath(os.path.join(path, v)) for v in values)
if new_value != p.value:
result.append(Property(p.feature, new_value, p.condition))
else:
result.append(p)
else:
result.append (p)
return result
def translate_indirect(properties, context_module):
"""Assumes that all feature values that start with '@' are
names of rules, used in 'context-module'. Such rules can be
either local to the module or global. Qualified local rules
with the name of the module."""
assert is_iterable_typed(properties, Property)
assert isinstance(context_module, basestring)
result = []
for p in properties:
if p.value[0] == '@':
q = qualify_jam_action(p.value[1:], context_module)
get_manager().engine().register_bjam_action(q)
result.append(Property(p.feature, '@' + q, p.condition))
else:
result.append(p)
return result
def validate (properties):
""" Exit with error if any of the properties is not valid.
properties may be a single property or a sequence of properties.
"""
if isinstance(properties, Property):
properties = [properties]
assert is_iterable_typed(properties, Property)
for p in properties:
__validate1(p)
def expand_subfeatures_in_conditions (properties):
assert is_iterable_typed(properties, Property)
result = []
for p in properties:
if not p.condition:
result.append(p)
else:
expanded = []
for c in p.condition:
# It common that condition includes a toolset which
# was never defined, or mentiones subfeatures which
# were never defined. In that case, validation will
# only produce an spirious error, so don't validate.
expanded.extend(feature.expand_subfeatures ([c], True))
# we need to keep LazyProperties lazy
if isinstance(p, LazyProperty):
value = p.value
feature_name = get_grist(value)
value = value.replace(feature_name, '')
result.append(LazyProperty(feature_name, value, condition=expanded))
else:
result.append(Property(p.feature, p.value, expanded))
return result
# FIXME: this should go
def split_conditional (property):
""" If 'property' is conditional property, returns
condition and the property, e.g
<variant>debug,<toolset>gcc:<inlining>full will become
<variant>debug,<toolset>gcc <inlining>full.
Otherwise, returns empty string.
"""
assert isinstance(property, basestring)
m = __re_split_conditional.match (property)
if m:
return (m.group (1), '<' + m.group (2))
return None
def select (features, properties):
""" Selects properties which correspond to any of the given features.
"""
assert is_iterable_typed(properties, basestring)
result = []
# add any missing angle brackets
features = add_grist (features)
return [p for p in properties if get_grist(p) in features]
def validate_property_sets (sets):
if __debug__:
from .property_set import PropertySet
assert is_iterable_typed(sets, PropertySet)
for s in sets:
validate(s.all())
def evaluate_conditionals_in_context (properties, context):
""" Removes all conditional properties which conditions are not met
For those with met conditions, removes the condition. Properties
in conditions are looked up in 'context'
"""
if __debug__:
from .property_set import PropertySet
assert is_iterable_typed(properties, Property)
assert isinstance(context, PropertySet)
base = []
conditional = []
for p in properties:
if p.condition:
conditional.append (p)
else:
base.append (p)
result = base[:]
for p in conditional:
# Evaluate condition
# FIXME: probably inefficient
if all(x in context for x in p.condition):
result.append(Property(p.feature, p.value))
return result
def change (properties, feature, value = None):
""" Returns a modified version of properties with all values of the
given feature replaced by the given value.
If 'value' is None the feature will be removed.
"""
assert is_iterable_typed(properties, basestring)
assert isinstance(feature, basestring)
assert isinstance(value, (basestring, type(None)))
result = []
feature = add_grist (feature)
for p in properties:
if get_grist (p) == feature:
if value:
result.append (replace_grist (value, feature))
else:
result.append (p)
return result
################################################################
# Private functions
def __validate1 (property):
""" Exit with error if property is not valid.
"""
assert isinstance(property, Property)
msg = None
if not property.feature.free:
feature.validate_value_string (property.feature, property.value)
###################################################################
# Still to port.
# Original lines are prefixed with "# "
#
#
# import utility : ungrist ;
# import sequence : unique ;
# import errors : error ;
# import feature ;
# import regex ;
# import sequence ;
# import set ;
# import path ;
# import assert ;
#
#
# rule validate-property-sets ( property-sets * )
# {
# for local s in $(property-sets)
# {
# validate [ feature.split $(s) ] ;
# }
# }
#
def remove(attributes, properties):
"""Returns a property sets which include all the elements
in 'properties' that do not have attributes listed in 'attributes'."""
if isinstance(attributes, basestring):
attributes = [attributes]
assert is_iterable_typed(attributes, basestring)
assert is_iterable_typed(properties, basestring)
result = []
for e in properties:
attributes_new = feature.attributes(get_grist(e))
has_common_features = 0
for a in attributes_new:
if a in attributes:
has_common_features = 1
break
if not has_common_features:
result += e
return result
def take(attributes, properties):
"""Returns a property set which include all
properties in 'properties' that have any of 'attributes'."""
assert is_iterable_typed(attributes, basestring)
assert is_iterable_typed(properties, basestring)
result = []
for e in properties:
if b2.util.set.intersection(attributes, feature.attributes(get_grist(e))):
result.append(e)
return result
def translate_dependencies(properties, project_id, location):
assert is_iterable_typed(properties, Property)
assert isinstance(project_id, basestring)
assert isinstance(location, basestring)
result = []
for p in properties:
if not p.feature.dependency:
result.append(p)
else:
v = p.value
m = re.match("(.*)//(.*)", v)
if m:
rooted = m.group(1)
if rooted[0] == '/':
# Either project id or absolute Linux path, do nothing.
pass
else:
rooted = os.path.join(os.getcwd(), location, rooted)
result.append(Property(p.feature, rooted + "//" + m.group(2), p.condition))
elif os.path.isabs(v):
result.append(p)
else:
result.append(Property(p.feature, project_id + "//" + v, p.condition))
return result
class PropertyMap:
""" Class which maintains a property set -> string mapping.
"""
def __init__ (self):
self.__properties = []
self.__values = []
def insert (self, properties, value):
""" Associate value with properties.
"""
assert is_iterable_typed(properties, basestring)
assert isinstance(value, basestring)
self.__properties.append(properties)
self.__values.append(value)
def find (self, properties):
""" Return the value associated with properties
or any subset of it. If more than one
subset has value assigned to it, return the
value for the longest subset, if it's unique.
"""
assert is_iterable_typed(properties, basestring)
return self.find_replace (properties)
def find_replace(self, properties, value=None):
assert is_iterable_typed(properties, basestring)
assert isinstance(value, (basestring, type(None)))
matches = []
match_ranks = []
for i in range(0, len(self.__properties)):
p = self.__properties[i]
if b2.util.set.contains (p, properties):
matches.append (i)
match_ranks.append(len(p))
best = sequence.select_highest_ranked (matches, match_ranks)
if not best:
return None
if len (best) > 1:
raise NoBestMatchingAlternative ()
best = best [0]
original = self.__values[best]
if value:
self.__values[best] = value
return original
# local rule __test__ ( )
# {
# import errors : try catch ;
# import feature ;
# import feature : feature subfeature compose ;
#
# # local rules must be explicitly re-imported
# import property : path-order ;
#
# feature.prepare-test property-test-temp ;
#
# feature toolset : gcc : implicit symmetric ;
# subfeature toolset gcc : version : 2.95.2 2.95.3 2.95.4
# 3.0 3.0.1 3.0.2 : optional ;
# feature define : : free ;
# feature runtime-link : dynamic static : symmetric link-incompatible ;
# feature optimization : on off ;
# feature variant : debug release : implicit composite symmetric ;
# feature rtti : on off : link-incompatible ;
#
# compose <variant>debug : <define>_DEBUG <optimization>off ;
# compose <variant>release : <define>NDEBUG <optimization>on ;
#
# import assert ;
# import "class" : new ;
#
# validate <toolset>gcc <toolset>gcc-3.0.1 : $(test-space) ;
#
# assert.result <toolset>gcc <rtti>off <define>FOO
# : refine <toolset>gcc <rtti>off
# : <define>FOO
# : $(test-space)
# ;
#
# assert.result <toolset>gcc <optimization>on
# : refine <toolset>gcc <optimization>off
# : <optimization>on
# : $(test-space)
# ;
#
# assert.result <toolset>gcc <rtti>off
# : refine <toolset>gcc : <rtti>off : $(test-space)
# ;
#
# assert.result <toolset>gcc <rtti>off <rtti>off:<define>FOO
# : refine <toolset>gcc : <rtti>off <rtti>off:<define>FOO
# : $(test-space)
# ;
#
# assert.result <toolset>gcc:<define>foo <toolset>gcc:<define>bar
# : refine <toolset>gcc:<define>foo : <toolset>gcc:<define>bar
# : $(test-space)
# ;
#
# assert.result <define>MY_RELEASE
# : evaluate-conditionals-in-context
# <variant>release,<rtti>off:<define>MY_RELEASE
# : <toolset>gcc <variant>release <rtti>off
#
# ;
#
# try ;
# validate <feature>value : $(test-space) ;
# catch "Invalid property '<feature>value': unknown feature 'feature'." ;
#
# try ;
# validate <rtti>default : $(test-space) ;
# catch \"default\" is not a known value of feature <rtti> ;
#
# validate <define>WHATEVER : $(test-space) ;
#
# try ;
# validate <rtti> : $(test-space) ;
# catch "Invalid property '<rtti>': No value specified for feature 'rtti'." ;
#
# try ;
# validate value : $(test-space) ;
# catch "value" is not a value of an implicit feature ;
#
#
# assert.result <rtti>on
# : remove free implicit : <toolset>gcc <define>foo <rtti>on : $(test-space) ;
#
# assert.result <include>a
# : select include : <include>a <toolset>gcc ;
#
# assert.result <include>a
# : select include bar : <include>a <toolset>gcc ;
#
# assert.result <include>a <toolset>gcc
# : select include <bar> <toolset> : <include>a <toolset>gcc ;
#
# assert.result <toolset>kylix <include>a
# : change <toolset>gcc <include>a : <toolset> kylix ;
#
# # Test ordinary properties
# assert.result
# : split-conditional <toolset>gcc
# ;
#
# # Test properties with ":"
# assert.result
# : split-conditional <define>FOO=A::B
# ;
#
# # Test conditional feature
# assert.result <toolset>gcc,<toolset-gcc:version>3.0 <define>FOO
# : split-conditional <toolset>gcc,<toolset-gcc:version>3.0:<define>FOO
# ;
#
# feature.finish-test property-test-temp ;
# }
#

View File

@@ -0,0 +1,498 @@
# Status: ported.
# Base revision: 40480
# Copyright (C) Vladimir Prus 2002. Permission to copy, use, modify, sell and
# distribute this software is granted provided this copyright notice appears in
# all copies. This software is provided "as is" without express or implied
# warranty, and with no claim as to its suitability for any purpose.
import hashlib
import bjam
from b2.util.utility import *
import property, feature
import b2.build.feature
from b2.exceptions import *
from b2.build.property import get_abbreviated_paths
from b2.util.sequence import unique
from b2.util.set import difference
from b2.util import cached, abbreviate_dashed, is_iterable_typed
from b2.manager import get_manager
def reset ():
""" Clear the module state. This is mainly for testing purposes.
"""
global __cache
# A cache of property sets
# TODO: use a map of weak refs?
__cache = {}
reset ()
def create (raw_properties = []):
""" Creates a new 'PropertySet' instance for the given raw properties,
or returns an already existing one.
"""
assert (is_iterable_typed(raw_properties, property.Property)
or is_iterable_typed(raw_properties, basestring))
# FIXME: propagate to callers.
if len(raw_properties) > 0 and isinstance(raw_properties[0], property.Property):
x = raw_properties
else:
x = [property.create_from_string(ps) for ps in raw_properties]
# These two lines of code are optimized to the current state
# of the Property class. Since this function acts as the caching
# frontend to the PropertySet class modifying these two lines
# could have a severe performance penalty. Be careful.
# It would be faster to sort by p.id, but some projects may rely
# on the fact that the properties are ordered alphabetically. So,
# we maintain alphabetical sorting so as to maintain backward compatibility.
x = sorted(set(x), key=lambda p: (p.feature.name, p.value, p.condition))
key = tuple(p.id for p in x)
if key not in __cache:
__cache [key] = PropertySet(x)
return __cache [key]
def create_with_validation (raw_properties):
""" Creates new 'PropertySet' instances after checking
that all properties are valid and converting implicit
properties into gristed form.
"""
assert is_iterable_typed(raw_properties, basestring)
properties = [property.create_from_string(s) for s in raw_properties]
property.validate(properties)
return create(properties)
def empty ():
""" Returns PropertySet with empty set of properties.
"""
return create ()
def create_from_user_input(raw_properties, jamfile_module, location):
"""Creates a property-set from the input given by the user, in the
context of 'jamfile-module' at 'location'"""
assert is_iterable_typed(raw_properties, basestring)
assert isinstance(jamfile_module, basestring)
assert isinstance(location, basestring)
properties = property.create_from_strings(raw_properties, True)
properties = property.translate_paths(properties, location)
properties = property.translate_indirect(properties, jamfile_module)
project_id = get_manager().projects().attributeDefault(jamfile_module, 'id', None)
if not project_id:
project_id = os.path.abspath(location)
properties = property.translate_dependencies(properties, project_id, location)
properties = property.expand_subfeatures_in_conditions(properties)
return create(properties)
def refine_from_user_input(parent_requirements, specification, jamfile_module,
location):
"""Refines requirements with requirements provided by the user.
Specially handles "-<property>value" syntax in specification
to remove given requirements.
- parent-requirements -- property-set object with requirements
to refine
- specification -- string list of requirements provided by the use
- project-module -- the module to which context indirect features
will be bound.
- location -- the path to which path features are relative."""
assert isinstance(parent_requirements, PropertySet)
assert is_iterable_typed(specification, basestring)
assert isinstance(jamfile_module, basestring)
assert isinstance(location, basestring)
if not specification:
return parent_requirements
add_requirements = []
remove_requirements = []
for r in specification:
if r[0] == '-':
remove_requirements.append(r[1:])
else:
add_requirements.append(r)
if remove_requirements:
# Need to create property set, so that path features
# and indirect features are translated just like they
# are in project requirements.
ps = create_from_user_input(remove_requirements,
jamfile_module, location)
parent_requirements = create(difference(parent_requirements.all(),
ps.all()))
specification = add_requirements
requirements = create_from_user_input(specification,
jamfile_module, location)
return parent_requirements.refine(requirements)
class PropertySet:
""" Class for storing a set of properties.
- there's 1<->1 correspondence between identity and value. No
two instances of the class are equal. To maintain this property,
the 'PropertySet.create' rule should be used to create new instances.
Instances are immutable.
- each property is classified with regard to it's effect on build
results. Incidental properties have no effect on build results, from
Boost.Build point of view. Others are either free, or non-free, which we
call 'base'. Each property belong to exactly one of those categories and
it's possible to get list of properties in each category.
In addition, it's possible to get list of properties with specific
attribute.
- several operations, like and refine and as_path are provided. They all use
caching whenever possible.
"""
def __init__ (self, properties=None):
if properties is None:
properties = []
assert is_iterable_typed(properties, property.Property)
self.all_ = properties
self._all_set = {p.id for p in properties}
self.incidental_ = []
self.free_ = []
self.base_ = []
self.dependency_ = []
self.non_dependency_ = []
self.conditional_ = []
self.non_conditional_ = []
self.propagated_ = []
self.link_incompatible = []
# A cache of refined properties.
self.refined_ = {}
# A cache of property sets created by adding properties to this one.
self.added_ = {}
# Cache for the default properties.
self.defaults_ = None
# Cache for the expanded properties.
self.expanded_ = None
# Cache for the expanded composite properties
self.composites_ = None
# Cache for property set with expanded subfeatures
self.subfeatures_ = None
# Cache for the property set containing propagated properties.
self.propagated_ps_ = None
# A map of features to its values.
self.feature_map_ = None
# A tuple (target path, is relative to build directory)
self.target_path_ = None
self.as_path_ = None
# A cache for already evaluated sets.
self.evaluated_ = {}
# stores the list of LazyProperty instances.
# these are being kept separate from the normal
# Property instances so that when this PropertySet
# tries to return one of its attributes, it
# will then try to evaluate the LazyProperty instances
# first before returning.
self.lazy_properties = []
for p in properties:
f = p.feature
if isinstance(p, property.LazyProperty):
self.lazy_properties.append(p)
# A feature can be both incidental and free,
# in which case we add it to incidental.
elif f.incidental:
self.incidental_.append(p)
elif f.free:
self.free_.append(p)
else:
self.base_.append(p)
if p.condition:
self.conditional_.append(p)
else:
self.non_conditional_.append(p)
if f.dependency:
self.dependency_.append (p)
elif not isinstance(p, property.LazyProperty):
self.non_dependency_.append (p)
if f.propagated:
self.propagated_.append(p)
if f.link_incompatible:
self.link_incompatible.append(p)
def all(self):
return self.all_
def raw (self):
""" Returns the list of stored properties.
"""
# create a new list due to the LazyProperties.
# this gives them a chance to evaluate to their
# true Property(). This approach is being
# taken since calculations should not be using
# PropertySet.raw()
return [p._to_raw for p in self.all_]
def __str__(self):
return ' '.join(p._to_raw for p in self.all_)
def base (self):
""" Returns properties that are neither incidental nor free.
"""
result = [p for p in self.lazy_properties
if not(p.feature.incidental or p.feature.free)]
result.extend(self.base_)
return result
def free (self):
""" Returns free properties which are not dependency properties.
"""
result = [p for p in self.lazy_properties
if not p.feature.incidental and p.feature.free]
result.extend(self.free_)
return result
def non_free(self):
return self.base() + self.incidental()
def dependency (self):
""" Returns dependency properties.
"""
result = [p for p in self.lazy_properties if p.feature.dependency]
result.extend(self.dependency_)
return self.dependency_
def non_dependency (self):
""" Returns properties that are not dependencies.
"""
result = [p for p in self.lazy_properties if not p.feature.dependency]
result.extend(self.non_dependency_)
return result
def conditional (self):
""" Returns conditional properties.
"""
return self.conditional_
def non_conditional (self):
""" Returns properties that are not conditional.
"""
return self.non_conditional_
def incidental (self):
""" Returns incidental properties.
"""
result = [p for p in self.lazy_properties if p.feature.incidental]
result.extend(self.incidental_)
return result
def refine (self, requirements):
""" Refines this set's properties using the requirements passed as an argument.
"""
assert isinstance(requirements, PropertySet)
if requirements not in self.refined_:
r = property.refine(self.all_, requirements.all_)
self.refined_[requirements] = create(r)
return self.refined_[requirements]
def expand (self):
if not self.expanded_:
expanded = feature.expand(self.all_)
self.expanded_ = create(expanded)
return self.expanded_
def expand_subfeatures(self):
if not self.subfeatures_:
self.subfeatures_ = create(feature.expand_subfeatures(self.all_))
return self.subfeatures_
def evaluate_conditionals(self, context=None):
assert isinstance(context, (PropertySet, type(None)))
if not context:
context = self
if context not in self.evaluated_:
# FIXME: figure why the call messes up first parameter
self.evaluated_[context] = create(
property.evaluate_conditionals_in_context(self.all(), context))
return self.evaluated_[context]
def propagated (self):
if not self.propagated_ps_:
self.propagated_ps_ = create (self.propagated_)
return self.propagated_ps_
def add_defaults (self):
# FIXME: this caching is invalidated when new features
# are declare inside non-root Jamfiles.
if not self.defaults_:
expanded = feature.add_defaults(self.all_)
self.defaults_ = create(expanded)
return self.defaults_
def as_path (self):
if not self.as_path_:
def path_order (p1, p2):
i1 = p1.feature.implicit
i2 = p2.feature.implicit
if i1 != i2:
return i2 - i1
else:
return cmp(p1.feature.name, p2.feature.name)
# trim redundancy
properties = feature.minimize(self.base_)
# sort according to path_order
properties.sort (path_order)
components = []
for p in properties:
f = p.feature
if f.implicit:
components.append(p.value)
else:
value = f.name.replace(':', '-') + "-" + p.value
if property.get_abbreviated_paths():
value = abbreviate_dashed(value)
components.append(value)
self.as_path_ = '/'.join(components)
return self.as_path_
def target_path (self):
""" Computes the target path that should be used for
target with these properties.
Returns a tuple of
- the computed path
- if the path is relative to build directory, a value of
'true'.
"""
if not self.target_path_:
# The <location> feature can be used to explicitly
# change the location of generated targets
l = self.get ('<location>')
if l:
computed = l[0]
is_relative = False
else:
p = self.as_path()
if hash_maybe:
p = hash_maybe(p)
# Really, an ugly hack. Boost regression test system requires
# specific target paths, and it seems that changing it to handle
# other directory layout is really hard. For that reason,
# we teach V2 to do the things regression system requires.
# The value o '<location-prefix>' is predended to the path.
prefix = self.get ('<location-prefix>')
if prefix:
if len (prefix) > 1:
raise AlreadyDefined ("Two <location-prefix> properties specified: '%s'" % prefix)
computed = os.path.join(prefix[0], p)
else:
computed = p
if not computed:
computed = "."
is_relative = True
self.target_path_ = (computed, is_relative)
return self.target_path_
def add (self, ps):
""" Creates a new property set containing the properties in this one,
plus the ones of the property set passed as argument.
"""
assert isinstance(ps, PropertySet)
if ps not in self.added_:
self.added_[ps] = create(self.all_ + ps.all())
return self.added_[ps]
def add_raw (self, properties):
""" Creates a new property set containing the properties in this one,
plus the ones passed as argument.
"""
return self.add (create (properties))
def get (self, feature):
""" Returns all values of 'feature'.
"""
if type(feature) == type([]):
feature = feature[0]
if not isinstance(feature, b2.build.feature.Feature):
feature = b2.build.feature.get(feature)
assert isinstance(feature, b2.build.feature.Feature)
if self.feature_map_ is None:
self.feature_map_ = {}
for v in self.all_:
if v.feature not in self.feature_map_:
self.feature_map_[v.feature] = []
self.feature_map_[v.feature].append(v.value)
return self.feature_map_.get(feature, [])
@cached
def get_properties(self, feature):
"""Returns all contained properties associated with 'feature'"""
if not isinstance(feature, b2.build.feature.Feature):
feature = b2.build.feature.get(feature)
assert isinstance(feature, b2.build.feature.Feature)
result = []
for p in self.all_:
if p.feature == feature:
result.append(p)
return result
def __contains__(self, item):
return item.id in self._all_set
def hash(p):
m = hashlib.md5()
m.update(p)
return m.hexdigest()
hash_maybe = hash if "--hash" in bjam.variable("ARGV") else None

View File

@@ -0,0 +1,11 @@
Copyright 2001, 2002 Dave Abrahams
Copyright 2002 Vladimir Prus
Distributed under the Boost Software License, Version 1.0.
(See accompanying file LICENSE_1_0.txt or copy at
http://www.boost.org/LICENSE_1_0.txt)
Development code for new build system. To run unit tests for jam code, execute:
bjam --debug --build-system=test
Comprehensive tests require Python. See ../test/readme.txt

View File

@@ -0,0 +1,163 @@
# Copyright 2003 Dave Abrahams
# Copyright 2002, 2003, 2004, 2005 Vladimir Prus
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE.txt or copy at
# https://www.bfgroup.xyz/b2/LICENSE.txt)
# Implements scanners: objects computing implicit dependencies for files, such
# as includes in C++.
#
# A scanner has a regular expression used to find the dependencies, some data
# needed to interpret those dependencies (e.g., include paths), and code which
# establishing needed relationships between actual jam targets.
#
# Scanner objects are created by actions when they try to actualize virtual
# targets, passed to the virtual-target.actualize() method and are then
# associated with actual targets. It is possible to use several scanners for a
# single virtual-target. For example, a single source file might be compiled
# twice - each time using a different include path. In this case, two separate
# actual targets will be created, each having a scanner of its own.
#
# Typically, scanners are created from target type and the action's properties,
# using the rule 'get' in this module. Directly creating scanners is not
# recommended, as it might create multiple equvivalent but different instances,
# and lead to unnecessary actual target duplication. However, actions can also
# create scanners in a special way, instead of relying on just the target type.
import "class" : new ;
import property ;
import property-set ;
import virtual-target ;
# Base scanner class.
#
class scanner
{
rule __init__ ( )
{
}
# Returns a pattern to use for scanning.
#
rule pattern ( )
{
import errors : error : errors.error ;
errors.error "method must be overridden" ;
}
# Establish necessary relationship between targets, given an actual target
# being scanned and a list of pattern matches in that file.
#
rule process ( target : matches * )
{
import errors : error : errors.error ;
errors.error "method must be overridden" ;
}
}
# Registers a new generator class, specifying a set of properties relevant to
# this scanner. Constructor for that class should have one parameter: a list of
# properties.
#
rule register ( scanner-class : relevant-properties * )
{
.registered += $(scanner-class) ;
.relevant-properties.$(scanner-class) = $(relevant-properties) ;
}
# Common scanner class, usable when there is only one kind of includes (unlike
# C, where "" and <> includes have different search paths).
#
class common-scanner : scanner
{
import scanner ;
rule __init__ ( includes * )
{
scanner.__init__ ;
self.includes = $(includes) ;
}
rule process ( target : matches * : binding )
{
local target_path = [ NORMALIZE_PATH $(binding:D) ] ;
NOCARE $(matches) ;
INCLUDES $(target) : $(matches) ;
SEARCH on $(matches) = $(target_path) $(self.includes:G=) ;
ISFILE $(matches) ;
scanner.propagate $(__name__) : $(matches) : $(target) ;
}
}
# Returns an instance of a previously registered scanner, with the specified
# properties.
#
rule get ( scanner-class : property-set )
{
if ! $(scanner-class) in $(.registered)
{
import errors ;
errors.error "attempt to get an unregistered scanner" ;
}
local r = $(.rv-cache.$(property-set)) ;
if ! $(r)
{
r = [ property-set.create
[ property.select $(.relevant-properties.$(scanner-class)) :
[ $(property-set).raw ] ] ] ;
.rv-cache.$(property-set) = $(r) ;
}
if ! $(scanner.$(scanner-class).$(r:J=-))
{
local s = [ new $(scanner-class) [ $(r).raw ] ] ;
scanner.$(scanner-class).$(r:J=-) = $(s) ;
}
return $(scanner.$(scanner-class).$(r:J=-)) ;
}
# Installs the specified scanner on the actual target 'target'.
#
rule install ( scanner : target )
{
HDRSCAN on $(target) = [ $(scanner).pattern ] ;
SCANNER on $(target) = $(scanner) ;
HDRRULE on $(target) = scanner.hdrrule ;
# Scanner reflects differences in properties affecting binding of 'target',
# which will be known when processing includes for it, and give information
# on how to interpret different include types (e.g. quoted vs. those in
# angle brackets in C files).
HDRGRIST on $(target) = $(scanner) ;
}
# Propagate scanner settings from 'including-target' to 'targets'.
#
rule propagate ( scanner : targets * : including-target )
{
HDRSCAN on $(targets) = [ on $(including-target) return $(HDRSCAN) ] ;
SCANNER on $(targets) = $(scanner) ;
HDRRULE on $(targets) = scanner.hdrrule ;
HDRGRIST on $(targets) = [ on $(including-target) return $(HDRGRIST) ] ;
}
rule hdrrule ( target : matches * : binding )
{
local scanner = [ on $(target) return $(SCANNER) ] ;
$(scanner).process $(target) : $(matches) : $(binding) ;
}
# hdrrule must be available at global scope so it can be invoked by header
# scanning.
#
IMPORT scanner : hdrrule : : scanner.hdrrule ;

View File

@@ -0,0 +1,167 @@
# Status: ported.
# Base revision: 45462
#
# Copyright 2003 Dave Abrahams
# Copyright 2002, 2003, 2004, 2005 Vladimir Prus
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt)
# Implements scanners: objects that compute implicit dependencies for
# files, such as includes in C++.
#
# Scanner has a regular expression used to find dependencies, some
# data needed to interpret those dependencies (for example, include
# paths), and a code which actually established needed relationship
# between actual jam targets.
#
# Scanner objects are created by actions, when they try to actualize
# virtual targets, passed to 'virtual-target.actualize' method and are
# then associated with actual targets. It is possible to use
# several scanners for a virtual-target. For example, a single source
# might be used by to compile actions, with different include paths.
# In this case, two different actual targets will be created, each
# having scanner of its own.
#
# Typically, scanners are created from target type and action's
# properties, using the rule 'get' in this module. Directly creating
# scanners is not recommended, because it might create many equvivalent
# but different instances, and lead in unneeded duplication of
# actual targets. However, actions can also create scanners in a special
# way, instead of relying on just target type.
import property
import bjam
import os
from b2.manager import get_manager
from b2.util import is_iterable_typed
def reset ():
""" Clear the module state. This is mainly for testing purposes.
"""
global __scanners, __rv_cache, __scanner_cache
# Maps registered scanner classes to relevant properties
__scanners = {}
# A cache of scanners.
# The key is: class_name.properties_tag, where properties_tag is the concatenation
# of all relevant properties, separated by '-'
__scanner_cache = {}
reset ()
def register(scanner_class, relevant_properties):
""" Registers a new generator class, specifying a set of
properties relevant to this scanner. Ctor for that class
should have one parameter: list of properties.
"""
assert issubclass(scanner_class, Scanner)
assert isinstance(relevant_properties, basestring)
__scanners[str(scanner_class)] = relevant_properties
def registered(scanner_class):
""" Returns true iff a scanner of that class is registered
"""
return str(scanner_class) in __scanners
def get(scanner_class, properties):
""" Returns an instance of previously registered scanner
with the specified properties.
"""
assert issubclass(scanner_class, Scanner)
assert is_iterable_typed(properties, basestring)
scanner_name = str(scanner_class)
if not registered(scanner_name):
raise BaseException ("attempt to get unregistered scanner: %s" % scanner_name)
relevant_properties = __scanners[scanner_name]
r = property.select(relevant_properties, properties)
scanner_id = scanner_name + '.' + '-'.join(r)
if scanner_id not in __scanner_cache:
__scanner_cache[scanner_id] = scanner_class(r)
return __scanner_cache[scanner_id]
class Scanner:
""" Base scanner class.
"""
def __init__ (self):
pass
def pattern (self):
""" Returns a pattern to use for scanning.
"""
raise BaseException ("method must be overridden")
def process (self, target, matches, binding):
""" Establish necessary relationship between targets,
given actual target being scanned, and a list of
pattern matches in that file.
"""
raise BaseException ("method must be overridden")
# Common scanner class, which can be used when there's only one
# kind of includes (unlike C, where "" and <> includes have different
# search paths).
class CommonScanner(Scanner):
def __init__ (self, includes):
Scanner.__init__(self)
self.includes = includes
def process(self, target, matches, binding):
target_path = os.path.normpath(os.path.dirname(binding[0]))
bjam.call("mark-included", target, matches)
get_manager().engine().set_target_variable(matches, "SEARCH",
[target_path] + self.includes)
get_manager().scanners().propagate(self, matches)
class ScannerRegistry:
def __init__ (self, manager):
self.manager_ = manager
self.count_ = 0
self.exported_scanners_ = {}
def install (self, scanner, target, vtarget):
""" Installs the specified scanner on actual target 'target'.
vtarget: virtual target from which 'target' was actualized.
"""
assert isinstance(scanner, Scanner)
assert isinstance(target, basestring)
assert isinstance(vtarget, basestring)
engine = self.manager_.engine()
engine.set_target_variable(target, "HDRSCAN", scanner.pattern())
if scanner not in self.exported_scanners_:
exported_name = "scanner_" + str(self.count_)
self.count_ = self.count_ + 1
self.exported_scanners_[scanner] = exported_name
bjam.import_rule("", exported_name, scanner.process)
else:
exported_name = self.exported_scanners_[scanner]
engine.set_target_variable(target, "HDRRULE", exported_name)
# scanner reflects difference in properties affecting
# binding of 'target', which will be known when processing
# includes for it, will give information on how to
# interpret quoted includes.
engine.set_target_variable(target, "HDRGRIST", str(id(scanner)))
pass
def propagate(self, scanner, targets):
assert isinstance(scanner, Scanner)
assert is_iterable_typed(targets, basestring) or isinstance(targets, basestring)
engine = self.manager_.engine()
engine.set_target_variable(targets, "HDRSCAN", scanner.pattern())
engine.set_target_variable(targets, "HDRRULE",
self.exported_scanners_[scanner])
engine.set_target_variable(targets, "HDRGRIST", str(id(scanner)))

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,703 @@
# Copyright 2003 Dave Abrahams
# Copyright 2005 Rene Rivera
# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE.txt or copy at
# https://www.bfgroup.xyz/b2/LICENSE.txt)
# Support for toolset definition.
import errors ;
import feature ;
import generators ;
import numbers ;
import path ;
import property ;
import regex ;
import sequence ;
import set ;
import property-set ;
import order ;
import "class" : new ;
import utility ;
.flag-no = 1 ;
.ignore-requirements = ;
# This is used only for testing, to make sure we do not get random extra
# elements in paths.
if --ignore-toolset-requirements in [ modules.peek : ARGV ]
{
.ignore-requirements = 1 ;
}
# Initializes an additional toolset-like module. First load the 'toolset-module'
# and then calls its 'init' rule with trailing arguments.
#
rule using ( toolset-module : * )
{
import $(toolset-module) ;
$(toolset-module).init $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9)
;
}
# Expands subfeatures in each property sets, e.g. '<toolset>gcc-3.2' will be
# converted to '<toolset>gcc/<toolset-version>3.2'.
#
local rule normalize-condition ( property-sets * )
{
local result ;
for local p in $(property-sets)
{
local split = [ feature.split $(p) ] ;
local expanded = [ feature.expand-subfeatures [ feature.split $(p) ] ] ;
result += $(expanded:J=/) ;
}
return $(result) ;
}
# Specifies if the 'flags' rule should check that the invoking module is the
# same as the module we are setting the flag for. 'v' can be either 'checked' or
# 'unchecked'. Subsequent call to 'pop-checking-for-flags-module' will restore
# the setting that was in effect before calling this rule.
#
rule push-checking-for-flags-module ( v )
{
.flags-module-checking = $(v) $(.flags-module-checking) ;
}
rule pop-checking-for-flags-module ( )
{
.flags-module-checking = $(.flags-module-checking[2-]) ;
}
# Specifies features that are referenced by the action rule.
# This is necessary in order to detect that these features
# are relevant.
#
rule uses-features ( rule-or-module : features * : unchecked ? )
{
local caller = [ CALLER_MODULE ] ;
if ! [ MATCH ".*([.]).*" : $(rule-or-module) ]
&& [ MATCH "(Jamfile<.*)" : $(caller) ]
{
# Unqualified rule name, used inside Jamfile. Most likely used with
# 'make' or 'notfile' rules. This prevents setting flags on the entire
# Jamfile module (this will be considered as rule), but who cares?
# Probably, 'flags' rule should be split into 'flags' and
# 'flags-on-module'.
rule-or-module = $(caller).$(rule-or-module) ;
}
else
{
local module_ = [ MATCH "([^.]*).*" : $(rule-or-module) ] ;
if $(unchecked) != unchecked
&& $(.flags-module-checking[1]) != unchecked
&& $(module_) != $(caller)
{
errors.error "Module $(caller) attempted to set flags for module $(module_)" ;
}
}
.uses-features.$(rule-or-module) += $(features) ;
}
# Specifies the flags (variables) that must be set on targets under certain
# conditions, described by arguments.
#
rule flags (
rule-or-module # If contains a dot, should be a rule name. The flags will
# be applied when that rule is used to set up build
# actions.
#
# If does not contain dot, should be a module name. The
# flag will be applied for all rules in that module. If
# module for rule is different from the calling module, an
# error is issued.
variable-name # Variable that should be set on target.
condition * : # A condition when this flag should be applied. Should be a
# set of property sets. If one of those property sets is
# contained in the build properties, the flag will be used.
# Implied values are not allowed: "<toolset>gcc" should be
# used, not just "gcc". Subfeatures, like in
# "<toolset>gcc-3.2" are allowed. If left empty, the flag
# will be used unconditionally.
#
# Property sets may use value-less properties ('<a>' vs.
# '<a>value') to match absent properties. This allows to
# separately match:
#
# <architecture>/<address-model>64
# <architecture>ia64/<address-model>
#
# Where both features are optional. Without this syntax
# we would be forced to define "default" values.
values * : # The value to add to variable. If <feature> is specified,
# then the value of 'feature' will be added.
unchecked ? # If value 'unchecked' is passed, will not test that flags
# are set for the calling module.
: hack-hack ? # For
# flags rule OPTIONS <cxx-abi> : -model ansi
# Treat <cxx-abi> as condition
# FIXME: ugly hack.
)
{
local caller = [ CALLER_MODULE ] ;
if ! [ MATCH ".*([.]).*" : $(rule-or-module) ]
&& [ MATCH "(Jamfile<.*)" : $(caller) ]
{
# Unqualified rule name, used inside Jamfile. Most likely used with
# 'make' or 'notfile' rules. This prevents setting flags on the entire
# Jamfile module (this will be considered as rule), but who cares?
# Probably, 'flags' rule should be split into 'flags' and
# 'flags-on-module'.
rule-or-module = $(caller).$(rule-or-module) ;
}
else
{
local module_ = [ MATCH "([^.]*).*" : $(rule-or-module) ] ;
if $(unchecked) != unchecked
&& $(.flags-module-checking[1]) != unchecked
&& $(module_) != $(caller)
{
errors.error "Module $(caller) attempted to set flags for module $(module_)" ;
}
}
if $(condition) && ! $(condition:G=) && ! $(hack-hack)
{
# We have condition in the form '<feature>', that is, without value.
# That is an older syntax:
# flags gcc.link RPATH <dll-path> ;
# for compatibility, convert it to
# flags gcc.link RPATH : <dll-path> ;
values = $(condition) ;
condition = ;
}
if $(condition)
{
property.validate-property-sets $(condition) ;
condition = [ normalize-condition $(condition) ] ;
}
add-flag $(rule-or-module) : $(variable-name) : $(condition) : $(values) ;
}
# Adds a new flag setting with the specified values. Does no checking.
#
local rule add-flag ( rule-or-module : variable-name : condition * : values * )
{
.$(rule-or-module).flags += $(.flag-no) ;
# Store all flags for a module.
local module_ = [ MATCH "([^.]*).*" : $(rule-or-module) ] ;
.module-flags.$(module_) += $(.flag-no) ;
# Store flag-no -> rule-or-module mapping.
.rule-or-module.$(.flag-no) = $(rule-or-module) ;
.$(rule-or-module).variable.$(.flag-no) += $(variable-name) ;
.$(rule-or-module).values.$(.flag-no) += $(values) ;
.$(rule-or-module).condition.$(.flag-no) += $(condition) ;
.flag-no = [ numbers.increment $(.flag-no) ] ;
}
# Returns the first element of 'property-sets' which is a subset of
# 'properties' or an empty list if no such element exists.
#
rule find-property-subset ( property-sets * : properties * )
{
# Cut property values off.
local prop-keys = $(properties:G) ;
local result ;
for local s in $(property-sets)
{
if ! $(result)
{
# Handle value-less properties like '<architecture>' (compare with
# '<architecture>x86').
local set = [ feature.split $(s) ] ;
# Find the set of features that
# - have no property specified in required property set
# - are omitted in the build property set.
local default-props ;
for local i in $(set)
{
# If $(i) is a value-less property it should match default value
# of an optional property. See the first line in the example
# below:
#
# property set properties result
# <a> <b>foo <b>foo match
# <a> <b>foo <a>foo <b>foo no match
# <a>foo <b>foo <b>foo no match
# <a>foo <b>foo <a>foo <b>foo match
if ! ( $(i:G=) || ( $(i:G) in $(prop-keys) ) )
{
default-props += $(i) ;
}
}
if $(set) in $(properties) $(default-props)
{
result = $(s) ;
}
}
}
return $(result) ;
}
# Returns a value to be added to some flag for some target based on the flag's
# value definition and the given target's property set.
#
rule handle-flag-value ( value * : properties * )
{
local result ;
if $(value:G)
{
local matches = [ property.select $(value) : $(properties) ] ;
local order ;
for local p in $(matches)
{
local att = [ feature.attributes $(p:G) ] ;
if dependency in $(att)
{
# The value of a dependency feature is a target and needs to be
# actualized.
result += [ $(p:G=).actualize ] ;
}
else if path in $(att) || free in $(att)
{
local values ;
# Treat features with && in the value specially -- each
# &&-separated element is considered a separate value. This is
# needed to handle searched libraries or include paths, which
# may need to be in a specific order.
if ! [ MATCH (&&) : $(p:G=) ]
{
values = $(p:G=) ;
}
else
{
values = [ regex.split $(p:G=) "&&" ] ;
}
if path in $(att)
{
values = [ sequence.transform path.native : $(values) ] ;
}
result += $(values) ;
if $(values[2])
{
if ! $(order)
{
order = [ new order ] ;
}
local prev ;
for local v in $(values)
{
if $(prev)
{
$(order).add-pair $(prev) $(v) ;
}
prev = $(v) ;
}
}
}
else
{
result += $(p:G=) ;
}
}
if $(order)
{
result = [ $(order).order [ sequence.unique $(result) : stable ] ] ;
DELETE_MODULE $(order) ;
}
}
else
{
result += $(value) ;
}
return $(result) ;
}
# Given a rule name and a property set, returns a list of interleaved variables
# names and values which must be set on targets for that rule/property-set
# combination.
#
rule set-target-variables-aux ( rule-or-module : property-set )
{
local result ;
properties = [ $(property-set).raw ] ;
for local f in $(.$(rule-or-module).flags)
{
local variable = $(.$(rule-or-module).variable.$(f)) ;
local condition = $(.$(rule-or-module).condition.$(f)) ;
local values = $(.$(rule-or-module).values.$(f)) ;
if ! $(condition) ||
[ find-property-subset $(condition) : $(properties) ]
{
local processed ;
for local v in $(values)
{
# The value might be <feature-name> so needs special treatment.
processed += [ handle-flag-value $(v) : $(properties) ] ;
}
for local r in $(processed)
{
result += $(variable) $(r) ;
}
}
}
# Strip away last dot separated part and recurse.
local next = [ MATCH "^(.+)\\.([^\\.])*" : $(rule-or-module) ] ;
if $(next)
{
result += [ set-target-variables-aux $(next[1]) : $(property-set) ] ;
}
return $(result) ;
}
rule relevant-features ( rule-or-module )
{
local result ;
if ! $(.relevant-features.$(rule-or-module))
{
for local f in $(.$(rule-or-module).flags)
{
local condition = $(.$(rule-or-module).condition.$(f)) ;
local values = $(.$(rule-or-module).values.$(f)) ;
for local c in $(condition)
{
for local p in [ feature.split $(c) ]
{
if $(p:G)
{
result += $(p:G) ;
}
else
{
local temp = [ feature.expand-subfeatures $(p) ] ;
result += $(temp:G) ;
}
}
}
for local v in $(values)
{
if $(v:G)
{
result += $(v:G) ;
}
}
}
# Strip away last dot separated part and recurse.
local next = [ MATCH "^(.+)\\.([^\\.])*" : $(rule-or-module) ] ;
if $(next)
{
result += [ relevant-features $(next[1]) ] ;
}
result = [ sequence.unique $(result) ] ;
if $(result[1]) = ""
{
result = $(result) ;
}
.relevant-features.$(rule-or-module) = $(result) ;
return $(result) ;
}
else
{
return $(.relevant-features.$(rule-or-module)) ;
}
}
# Returns a list of all the features which were
# passed to uses-features.
local rule used-features ( rule-or-module )
{
if ! $(.used-features.$(rule-or-module))
{
local result = $(.uses-features.$(rule-or-module)) ;
# Strip away last dot separated part and recurse.
local next = [ MATCH "^(.+)\\.([^\\.])*" : $(rule-or-module) ] ;
if $(next)
{
result += [ used-features $(next[1]) ] ;
}
result = [ sequence.unique $(result) ] ;
if $(result[1]) = ""
{
result = $(result) ;
}
.used-features.$(rule-or-module) = $(result) ;
return $(result) ;
}
else
{
return $(.used-features.$(rule-or-module)) ;
}
}
rule filter-property-set ( rule-or-module : property-set )
{
local key = .filtered.property-set.$(rule-or-module).$(property-set) ;
if ! $($(key))
{
local relevant = [ relevant-features $(rule-or-module) ] ;
local result ;
for local p in [ $(property-set).raw ]
{
if $(p:G) in $(relevant)
{
result += $(p) ;
}
}
$(key) = [ property-set.create $(result) ] ;
}
return $($(key)) ;
}
rule set-target-variables ( rule-or-module targets + : property-set )
{
property-set = [ filter-property-set $(rule-or-module) : $(property-set) ] ;
local key = .stv.$(rule-or-module).$(property-set) ;
local settings = $($(key)) ;
if ! $(settings)
{
settings = [ set-target-variables-aux $(rule-or-module) :
$(property-set) ] ;
if ! $(settings)
{
settings = none ;
}
$(key) = $(settings) ;
}
if $(settings) != none
{
local var-name = ;
for local name-or-value in $(settings)
{
if $(var-name)
{
$(var-name) on $(targets) += $(name-or-value) ;
var-name = ;
}
else
{
var-name = $(name-or-value) ;
}
}
}
}
# Returns a property-set indicating which features are relevant
# for the given rule.
#
rule relevant ( rule-name )
{
if ! $(.relevant-features-ps.$(rule-name))
{
local features = [ sequence.transform utility.ungrist :
[ relevant-features $(rule-name) ]
[ used-features $(rule-name) ] ] ;
.relevant-features-ps.$(rule-name) =
[ property-set.create <relevant>$(features) ] ;
}
return $(.relevant-features-ps.$(rule-name)) ;
}
# Make toolset 'toolset', defined in a module of the same name, inherit from
# 'base'.
# 1. The 'init' rule from 'base' is imported into 'toolset' with full name.
# Another 'init' is called, which forwards to the base one.
# 2. All generators from 'base' are cloned. The ids are adjusted and <toolset>
# property in requires is adjusted too.
# 3. All flags are inherited.
# 4. All rules are imported.
#
rule inherit ( toolset : base )
{
import $(base) ;
inherit-generators $(toolset) : $(base) ;
inherit-flags $(toolset) : $(base) ;
inherit-rules $(toolset) : $(base) ;
}
rule inherit-generators ( toolset properties * : base : generators-to-ignore * )
{
properties ?= <toolset>$(toolset) ;
local base-generators = [ generators.generators-for-toolset $(base) ] ;
for local g in $(base-generators)
{
local id = [ $(g).id ] ;
if ! $(id) in $(generators-to-ignore)
{
# Some generator names have multiple periods in their name, so
# $(id:B=$(toolset)) does not generate the right new-id name. E.g.
# if id = gcc.compile.c++ then $(id:B=darwin) = darwin.c++, which is
# not what we want. Manually parse the base and suffix. If there is
# a better way to do this, I would love to see it. See also the
# register() rule in the generators module.
local base = $(id) ;
local suffix = "" ;
while $(base:S)
{
suffix = $(base:S)$(suffix) ;
base = $(base:B) ;
}
local new-id = $(toolset)$(suffix) ;
generators.register [ $(g).clone $(new-id) : $(properties) ] ;
}
}
}
# Brings all flag definitions from the 'base' toolset into the 'toolset'
# toolset. Flag definitions whose conditions make use of properties in
# 'prohibited-properties' are ignored. Do not confuse property and feature, for
# example <debug-symbols>on and <debug-symbols>off, so blocking one of them does
# not block the other one.
#
# The flag conditions are not altered at all, so if a condition includes a name,
# or version of a base toolset, it will not ever match the inheriting toolset.
# When such flag settings must be inherited, define a rule in base toolset
# module and call it as needed.
#
rule inherit-flags ( toolset : base : prohibited-properties * : prohibited-vars * )
{
for local f in $(.module-flags.$(base))
{
local rule-or-module = $(.rule-or-module.$(f)) ;
if ( [ set.difference
$(.$(rule-or-module).condition.$(f)) :
$(prohibited-properties) ]
|| ! $(.$(rule-or-module).condition.$(f))
) && ( ! $(.$(rule-or-module).variable.$(f)) in $(prohibited-vars) )
{
local rule_ = [ MATCH "[^.]*\.(.*)" : $(rule-or-module) ] ;
local new-rule-or-module ;
if $(rule_)
{
new-rule-or-module = $(toolset).$(rule_) ;
}
else
{
new-rule-or-module = $(toolset) ;
}
add-flag
$(new-rule-or-module)
: $(.$(rule-or-module).variable.$(f))
: $(.$(rule-or-module).condition.$(f))
: $(.$(rule-or-module).values.$(f)) ;
}
}
}
rule inherit-rules ( toolset : base : localize ? )
{
# It appears that "action" creates a local rule.
local base-generators = [ generators.generators-for-toolset $(base) ] ;
local rules ;
for local g in $(base-generators)
{
rules += [ MATCH "[^.]*\.(.*)" : [ $(g).rule-name ] ] ;
}
rules = [ sequence.unique $(rules) ] ;
IMPORT $(base) : $(rules) : $(toolset) : $(rules) : $(localize) ;
IMPORT $(toolset) : $(rules) : : $(toolset).$(rules) ;
}
.requirements = [ property-set.empty ] ;
# Return the list of global 'toolset requirements'. Those requirements will be
# automatically added to the requirements of any main target.
#
rule requirements ( )
{
return $(.requirements) ;
}
# Adds elements to the list of global 'toolset requirements'. The requirements
# will be automatically added to the requirements for all main targets, as if
# they were specified literally. For best results, all requirements added should
# be conditional or indirect conditional.
#
rule add-requirements ( requirements * )
{
if ! $(.ignore-requirements)
{
requirements = [ property.translate-indirect $(requirements) : [ CALLER_MODULE ] ] ;
requirements = [ property.expand-subfeatures-in-conditions $(requirements) ] ;
requirements = [ property.make $(requirements) ] ;
.requirements = [ $(.requirements).add-raw $(requirements) ] ;
}
}
# Returns the global toolset defaults.
#
.defaults = [ property-set.empty ] ;
rule defaults ( )
{
return $(.defaults) ;
}
# Add elements to the list of global toolset defaults. These properties
# should be conditional and will override the default value of the feature.
# Do not use this for non-conditionals. Use feature.set-default instead.
#
rule add-defaults ( properties * )
{
if ! $(.ignore-requirements)
{
properties = [ property.translate-indirect $(properties) : [ CALLER_MODULE ] ] ;
properties = [ property.expand-subfeatures-in-conditions $(properties) ] ;
properties = [ property.make $(properties) ] ;
.defaults = [ $(.defaults).add-raw $(properties) ] ;
}
}
rule __test__ ( )
{
import assert ;
local p = <b>0 <c>1 <d>2 <e>3 <f>4 ;
assert.result <c>1/<d>2/<e>3 : find-property-subset <c>1/<d>2/<e>3 <a>0/<b>0/<c>1 <d>2/<e>5 <a>9 : $(p) ;
assert.result : find-property-subset <a>0/<b>0/<c>9/<d>9/<e>5 <a>9 : $(p) ;
local p-set = <a>/<b> <a>0/<b> <a>/<b>1 <a>0/<b>1 ;
assert.result <a>/<b> : find-property-subset $(p-set) : ;
assert.result <a>0/<b> : find-property-subset $(p-set) : <a>0 <c>2 ;
assert.result <a>/<b>1 : find-property-subset $(p-set) : <b>1 <c>2 ;
assert.result <a>0/<b>1 : find-property-subset $(p-set) : <a>0 <b>1 ;
}

View File

@@ -0,0 +1,417 @@
# Status: being ported by Vladimir Prus
# Base revision: 40958
#
# Copyright 2003 Dave Abrahams
# Copyright 2005 Rene Rivera
# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt)
""" Support for toolset definition.
"""
import sys
import feature, property, generators, property_set
import b2.util.set
import bjam
from b2.util import cached, qualify_jam_action, is_iterable_typed, is_iterable
from b2.util.utility import *
from b2.util import bjam_signature, sequence
from b2.manager import get_manager
__re_split_last_segment = re.compile (r'^(.+)\.([^\.])*')
__re_two_ampersands = re.compile ('(&&)')
__re_first_segment = re.compile ('([^.]*).*')
__re_first_group = re.compile (r'[^.]*\.(.*)')
_ignore_toolset_requirements = '--ignore-toolset-requirements' not in sys.argv
# Flag is a mechanism to set a value
# A single toolset flag. Specifies that when certain
# properties are in build property set, certain values
# should be appended to some variable.
#
# A flag applies to a specific action in specific module.
# The list of all flags for a module is stored, and each
# flag further contains the name of the rule it applies
# for,
class Flag:
def __init__(self, variable_name, values, condition, rule = None):
assert isinstance(variable_name, basestring)
assert is_iterable(values) and all(
isinstance(v, (basestring, type(None))) for v in values)
assert is_iterable_typed(condition, property_set.PropertySet)
assert isinstance(rule, (basestring, type(None)))
self.variable_name = variable_name
self.values = values
self.condition = condition
self.rule = rule
def __str__(self):
return("Flag(" + str(self.variable_name) + ", " + str(self.values) +\
", " + str(self.condition) + ", " + str(self.rule) + ")")
def reset ():
""" Clear the module state. This is mainly for testing purposes.
"""
global __module_flags, __flags, __stv
# Mapping from module name to a list of all flags that apply
# to either that module directly, or to any rule in that module.
# Each element of the list is Flag instance.
# So, for module named xxx this might contain flags for 'xxx',
# for 'xxx.compile', for 'xxx.compile.c++', etc.
__module_flags = {}
# Mapping from specific rule or module name to a list of Flag instances
# that apply to that name.
# Say, it might contain flags for 'xxx.compile.c++'. If there are
# entries for module name 'xxx', they are flags for 'xxx' itself,
# not including any rules in that module.
__flags = {}
# A cache for variable settings. The key is generated from the rule name and the properties.
__stv = {}
reset ()
# FIXME: --ignore-toolset-requirements
def using(toolset_module, *args):
if isinstance(toolset_module, (list, tuple)):
toolset_module = toolset_module[0]
loaded_toolset_module= get_manager().projects().load_module(toolset_module, [os.getcwd()]);
loaded_toolset_module.init(*args)
# FIXME push-checking-for-flags-module ....
# FIXME: investigate existing uses of 'hack-hack' parameter
# in jam code.
@bjam_signature((["rule_or_module", "variable_name", "condition", "*"],
["values", "*"]))
def flags(rule_or_module, variable_name, condition, values = []):
""" Specifies the flags (variables) that must be set on targets under certain
conditions, described by arguments.
rule_or_module: If contains dot, should be a rule name.
The flags will be applied when that rule is
used to set up build actions.
If does not contain dot, should be a module name.
The flags will be applied for all rules in that
module.
If module for rule is different from the calling
module, an error is issued.
variable_name: Variable that should be set on target
condition A condition when this flag should be applied.
Should be set of property sets. If one of
those property sets is contained in build
properties, the flag will be used.
Implied values are not allowed:
"<toolset>gcc" should be used, not just
"gcc". Subfeatures, like in "<toolset>gcc-3.2"
are allowed. If left empty, the flag will
always used.
Property sets may use value-less properties
('<a>' vs. '<a>value') to match absent
properties. This allows to separately match
<architecture>/<address-model>64
<architecture>ia64/<address-model>
Where both features are optional. Without this
syntax we'd be forced to define "default" value.
values: The value to add to variable. If <feature>
is specified, then the value of 'feature'
will be added.
"""
assert isinstance(rule_or_module, basestring)
assert isinstance(variable_name, basestring)
assert is_iterable_typed(condition, basestring)
assert is_iterable(values) and all(isinstance(v, (basestring, type(None))) for v in values)
caller = bjam.caller()
if not '.' in rule_or_module and caller and caller[:-1].startswith("Jamfile"):
# Unqualified rule name, used inside Jamfile. Most likely used with
# 'make' or 'notfile' rules. This prevents setting flags on the entire
# Jamfile module (this will be considered as rule), but who cares?
# Probably, 'flags' rule should be split into 'flags' and
# 'flags-on-module'.
rule_or_module = qualify_jam_action(rule_or_module, caller)
else:
# FIXME: revive checking that we don't set flags for a different
# module unintentionally
pass
if condition and not replace_grist (condition, ''):
# We have condition in the form '<feature>', that is, without
# value. That's a previous syntax:
#
# flags gcc.link RPATH <dll-path> ;
# for compatibility, convert it to
# flags gcc.link RPATH : <dll-path> ;
values = [ condition ]
condition = None
if condition:
transformed = []
for c in condition:
# FIXME: 'split' might be a too raw tool here.
pl = [property.create_from_string(s,False,True) for s in c.split('/')]
pl = feature.expand_subfeatures(pl);
transformed.append(property_set.create(pl))
condition = transformed
property.validate_property_sets(condition)
__add_flag (rule_or_module, variable_name, condition, values)
def set_target_variables (manager, rule_or_module, targets, ps):
"""
"""
assert isinstance(rule_or_module, basestring)
assert is_iterable_typed(targets, basestring)
assert isinstance(ps, property_set.PropertySet)
settings = __set_target_variables_aux(manager, rule_or_module, ps)
if settings:
for s in settings:
for target in targets:
manager.engine ().set_target_variable (target, s [0], s[1], True)
def find_satisfied_condition(conditions, ps):
"""Returns the first element of 'property-sets' which is a subset of
'properties', or an empty list if no such element exists."""
assert is_iterable_typed(conditions, property_set.PropertySet)
assert isinstance(ps, property_set.PropertySet)
for condition in conditions:
found_all = True
for i in condition.all():
if i.value:
found = i.value in ps.get(i.feature)
else:
# Handle value-less properties like '<architecture>' (compare with
# '<architecture>x86').
# If $(i) is a value-less property it should match default
# value of an optional property. See the first line in the
# example below:
#
# property set properties result
# <a> <b>foo <b>foo match
# <a> <b>foo <a>foo <b>foo no match
# <a>foo <b>foo <b>foo no match
# <a>foo <b>foo <a>foo <b>foo match
found = not ps.get(i.feature)
found_all = found_all and found
if found_all:
return condition
return None
def register (toolset):
""" Registers a new toolset.
"""
assert isinstance(toolset, basestring)
feature.extend('toolset', [toolset])
def inherit_generators (toolset, properties, base, generators_to_ignore = []):
assert isinstance(toolset, basestring)
assert is_iterable_typed(properties, basestring)
assert isinstance(base, basestring)
assert is_iterable_typed(generators_to_ignore, basestring)
if not properties:
properties = [replace_grist (toolset, '<toolset>')]
base_generators = generators.generators_for_toolset(base)
for g in base_generators:
id = g.id()
if not id in generators_to_ignore:
# Some generator names have multiple periods in their name, so
# $(id:B=$(toolset)) doesn't generate the right new_id name.
# e.g. if id = gcc.compile.c++, $(id:B=darwin) = darwin.c++,
# which is not what we want. Manually parse the base and suffix
# (if there's a better way to do this, I'd love to see it.)
# See also register in module generators.
(base, suffix) = split_action_id(id)
new_id = toolset + '.' + suffix
generators.register(g.clone(new_id, properties))
def inherit_flags(toolset, base, prohibited_properties = []):
"""Brings all flag definitions from the 'base' toolset into the 'toolset'
toolset. Flag definitions whose conditions make use of properties in
'prohibited-properties' are ignored. Don't confuse property and feature, for
example <debug-symbols>on and <debug-symbols>off, so blocking one of them does
not block the other one.
The flag conditions are not altered at all, so if a condition includes a name,
or version of a base toolset, it won't ever match the inheriting toolset. When
such flag settings must be inherited, define a rule in base toolset module and
call it as needed."""
assert isinstance(toolset, basestring)
assert isinstance(base, basestring)
assert is_iterable_typed(prohibited_properties, basestring)
for f in __module_flags.get(base, []):
if not f.condition or b2.util.set.difference(f.condition, prohibited_properties):
match = __re_first_group.match(f.rule)
rule_ = None
if match:
rule_ = match.group(1)
new_rule_or_module = ''
if rule_:
new_rule_or_module = toolset + '.' + rule_
else:
new_rule_or_module = toolset
__add_flag (new_rule_or_module, f.variable_name, f.condition, f.values)
def inherit_rules(toolset, base):
engine = get_manager().engine()
new_actions = {}
for action_name, action in engine.actions.iteritems():
module, id = split_action_id(action_name)
if module == base:
new_action_name = toolset + '.' + id
# make sure not to override any existing actions
# that may have been declared already
if new_action_name not in engine.actions:
new_actions[new_action_name] = action
engine.actions.update(new_actions)
######################################################################################
# Private functions
@cached
def __set_target_variables_aux (manager, rule_or_module, ps):
""" Given a rule name and a property set, returns a list of tuples of
variables names and values, which must be set on targets for that
rule/properties combination.
"""
assert isinstance(rule_or_module, basestring)
assert isinstance(ps, property_set.PropertySet)
result = []
for f in __flags.get(rule_or_module, []):
if not f.condition or find_satisfied_condition (f.condition, ps):
processed = []
for v in f.values:
# The value might be <feature-name> so needs special
# treatment.
processed += __handle_flag_value (manager, v, ps)
for r in processed:
result.append ((f.variable_name, r))
# strip away last dot separated part and recurse.
next = __re_split_last_segment.match(rule_or_module)
if next:
result.extend(__set_target_variables_aux(
manager, next.group(1), ps))
return result
def __handle_flag_value (manager, value, ps):
assert isinstance(value, basestring)
assert isinstance(ps, property_set.PropertySet)
result = []
if get_grist (value):
f = feature.get(value)
values = ps.get(f)
for value in values:
if f.dependency:
# the value of a dependency feature is a target
# and must be actualized
result.append(value.actualize())
elif f.path or f.free:
# Treat features with && in the value
# specially -- each &&-separated element is considered
# separate value. This is needed to handle searched
# libraries, which must be in specific order.
if not __re_two_ampersands.search(value):
result.append(value)
else:
result.extend(value.split ('&&'))
else:
result.append (value)
else:
result.append (value)
return sequence.unique(result, stable=True)
def __add_flag (rule_or_module, variable_name, condition, values):
""" Adds a new flag setting with the specified values.
Does no checking.
"""
assert isinstance(rule_or_module, basestring)
assert isinstance(variable_name, basestring)
assert is_iterable_typed(condition, property_set.PropertySet)
assert is_iterable(values) and all(
isinstance(v, (basestring, type(None))) for v in values)
f = Flag(variable_name, values, condition, rule_or_module)
# Grab the name of the module
m = __re_first_segment.match (rule_or_module)
assert m
module = m.group(1)
__module_flags.setdefault(module, []).append(f)
__flags.setdefault(rule_or_module, []).append(f)
__requirements = []
def requirements():
"""Return the list of global 'toolset requirements'.
Those requirements will be automatically added to the requirements of any main target."""
return __requirements
def add_requirements(requirements):
"""Adds elements to the list of global 'toolset requirements'. The requirements
will be automatically added to the requirements for all main targets, as if
they were specified literally. For best results, all requirements added should
be conditional or indirect conditional."""
assert is_iterable_typed(requirements, basestring)
if _ignore_toolset_requirements:
__requirements.extend(requirements)
# Make toolset 'toolset', defined in a module of the same name,
# inherit from 'base'
# 1. The 'init' rule from 'base' is imported into 'toolset' with full
# name. Another 'init' is called, which forwards to the base one.
# 2. All generators from 'base' are cloned. The ids are adjusted and
# <toolset> property in requires is adjusted too
# 3. All flags are inherited
# 4. All rules are imported.
def inherit(toolset, base):
assert isinstance(toolset, basestring)
assert isinstance(base, basestring)
get_manager().projects().load_module(base, ['.']);
inherit_generators(toolset, [], base)
inherit_flags(toolset, base)
inherit_rules(toolset, base)

View File

@@ -0,0 +1,410 @@
# Copyright 2002, 2003 Dave Abrahams
# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt)
# Deals with target type declaration and defines target class which supports
# typed targets.
import "class" : new ;
import feature ;
import generators : * ;
import os ;
import param ;
import project ;
import property ;
import scanner ;
# The following import would create a circular dependency:
# project -> project-root -> builtin -> type -> targets -> project
# import targets ;
# The feature is optional so it would never get added implicitly. It is used
# only for internal purposes and in all cases we want to use it explicitly.
feature.feature target-type : : composite optional ;
feature.feature main-target-type : : optional incidental ;
feature.feature base-target-type : : composite optional free ;
# Registers a target type, possible derived from a 'base-type'. Providing a list
# of 'suffixes' here is a shortcut for separately calling the register-suffixes
# rule with the given suffixes and the set-generated-target-suffix rule with the
# first given suffix.
#
rule register ( type : suffixes * : base-type ? )
{
# Type names cannot contain hyphens, because when used as feature-values
# they would be interpreted as composite features which need to be
# decomposed.
switch $(type)
{
case *-* :
import errors ;
errors.error "type name \"$(type)\" contains a hyphen" ;
}
if $(type) in $(.types)
{
import errors ;
errors.error "Type $(type) is already registered." ;
}
if $(base-type) && ! $(base-type) in $(.types)
{
import errors ;
errors.error "Type $(base-type) is not registered." ;
}
{
.types += $(type) ;
.base.$(type) = $(base-type) ;
.derived.$(base-type) += $(type) ;
.bases.$(type) = $(type) $(.bases.$(base-type)) ;
# Store suffixes for generated targets.
.suffixes.$(type) = [ new property-map ] ;
# Store prefixes for generated targets (e.g. "lib" for library).
.prefixes.$(type) = [ new property-map ] ;
if $(suffixes)-is-defined
{
# Specify mapping from suffixes to type.
register-suffixes $(suffixes) : $(type) ;
# By default generated targets of 'type' will use the first of
#'suffixes'. This may be overridden.
set-generated-target-suffix $(type) : : $(suffixes[1]) ;
}
feature.extend target-type : $(type) ;
feature.extend main-target-type : $(type) ;
feature.extend base-target-type : $(type) ;
feature.compose <target-type>$(type) : $(base-type:G=<base-target-type>) ;
feature.compose <base-target-type>$(type) : <base-target-type>$(base-type) ;
# We used to declare the main target rule only when a 'main' parameter
# has been specified. However, it is hard to decide that a type will
# *never* need a main target rule and so from time to time we needed to
# make yet another type 'main'. So now a main target rule is defined for
# each type.
main-rule-name = [ type-to-rule-name $(type) ] ;
.main-target-type.$(main-rule-name) = $(type) ;
IMPORT $(__name__) : main-target-rule : : $(main-rule-name) ;
# Adding a new derived type affects generator selection so we need to
# make the generator selection module update any of its cached
# information related to a new derived type being defined.
generators.update-cached-information-with-a-new-type $(type) ;
}
}
# Given a type, returns the name of the main target rule which creates targets
# of that type.
#
rule type-to-rule-name ( type )
{
# Lowercase everything. Convert underscores to dashes.
import regex ;
local n = [ regex.split $(type:L) "_" ] ;
return $(n:J=-) ;
}
# Given a main target rule name, returns the type for which it creates targets.
#
rule type-from-rule-name ( rule-name )
{
return $(.main-target-type.$(rule-name)) ;
}
# Specifies that files with suffix from 'suffixes' be recognized as targets of
# type 'type'. Issues an error if a different type is already specified for any
# of the suffixes.
#
rule register-suffixes ( suffixes + : type )
{
for local s in $(suffixes)
{
if ! $(.type.$(s))
{
.type.$(s) = $(type) ;
}
else if $(.type.$(s)) != $(type)
{
import errors ;
errors.error Attempting to specify multiple types for suffix
\"$(s)\" : "Old type $(.type.$(s)), New type $(type)" ;
}
}
}
# Returns true iff type has been registered.
#
rule registered ( type )
{
if $(type) in $(.types)
{
return true ;
}
}
# Issues an error if 'type' is unknown.
#
rule validate ( type )
{
if ! [ registered $(type) ]
{
import errors ;
errors.error "Unknown target type $(type)" ;
}
}
# Sets a scanner class that will be used for this 'type'.
#
rule set-scanner ( type : scanner )
{
validate $(type) ;
.scanner.$(type) = $(scanner) ;
}
# Returns a scanner instance appropriate to 'type' and 'properties'.
#
rule get-scanner ( type : property-set )
{
if $(.scanner.$(type))
{
return [ scanner.get $(.scanner.$(type)) : $(property-set) ] ;
}
}
# Returns a base type for the given type or nothing in case the given type is
# not derived.
#
rule base ( type )
{
return $(.base.$(type)) ;
}
# Returns the given type and all of its base types in order of their distance
# from type.
#
rule all-bases ( type )
{
return $(.bases.$(type)) ;
}
# Returns the given type and all of its derived types in order of their distance
# from type.
#
rule all-derived ( type )
{
local result = $(type) ;
for local d in $(.derived.$(type))
{
result += [ all-derived $(d) ] ;
}
return $(result) ;
}
# Returns true if 'type' is equal to 'base' or has 'base' as its direct or
# indirect base.
#
rule is-derived ( type base )
{
if $(base) in $(.bases.$(type))
{
return true ;
}
}
# Returns true if 'type' is either derived from or is equal to 'base'.
#
# TODO: It might be that is-derived and is-subtype were meant to be different
# rules - one returning true for type = base and one not, but as currently
# implemented they are actually the same. Clean this up.
#
rule is-subtype ( type base )
{
return [ is-derived $(type) $(base) ] ;
}
# Sets a file suffix to be used when generating a target of 'type' with the
# specified properties. Can be called with no properties if no suffix has
# already been specified for the 'type'. The 'suffix' parameter can be an empty
# string ("") to indicate that no suffix should be used.
#
# Note that this does not cause files with 'suffix' to be automatically
# recognized as being of 'type'. Two different types can use the same suffix for
# their generated files but only one type can be auto-detected for a file with
# that suffix. User should explicitly specify which one using the
# register-suffixes rule.
#
rule set-generated-target-suffix ( type : properties * : suffix )
{
set-generated-target-ps suffix : $(type) : $(properties) : $(suffix) ;
}
# Change the suffix previously registered for this type/properties combination.
# If suffix is not yet specified, sets it.
#
rule change-generated-target-suffix ( type : properties * : suffix )
{
change-generated-target-ps suffix : $(type) : $(properties) : $(suffix) ;
}
# Returns the suffix used when generating a file of 'type' with the given
# properties.
#
rule generated-target-suffix ( type : property-set )
{
return [ generated-target-ps suffix : $(type) : $(property-set) ] ;
}
# Sets a target prefix that should be used when generating targets of 'type'
# with the specified properties. Can be called with empty properties if no
# prefix for 'type' has been specified yet.
#
# The 'prefix' parameter can be empty string ("") to indicate that no prefix
# should be used.
#
# Usage example: library names use the "lib" prefix on unix.
#
rule set-generated-target-prefix ( type : properties * : prefix )
{
set-generated-target-ps prefix : $(type) : $(properties) : $(prefix) ;
}
# Change the prefix previously registered for this type/properties combination.
# If prefix is not yet specified, sets it.
#
rule change-generated-target-prefix ( type : properties * : prefix )
{
change-generated-target-ps prefix : $(type) : $(properties) : $(prefix) ;
}
rule generated-target-prefix ( type : property-set )
{
return [ generated-target-ps prefix : $(type) : $(property-set) ] ;
}
# Common rules for prefix/suffix provisioning follow.
local rule set-generated-target-ps ( ps : type : properties * : psval )
{
$(.$(ps)es.$(type)).insert $(properties) : $(psval) ;
}
local rule change-generated-target-ps ( ps : type : properties * : psval )
{
local prev = [ $(.$(ps)es.$(type)).find-replace $(properties) : $(psval) ] ;
if ! $(prev)
{
set-generated-target-ps $(ps) : $(type) : $(properties) : $(psval) ;
}
}
# Returns either prefix or suffix (as indicated by 'ps') that should be used
# when generating a target of 'type' with the specified properties. Parameter
# 'ps' can be either "prefix" or "suffix". If no prefix/suffix is specified for
# 'type', returns prefix/suffix for base type, if any.
#
local rule generated-target-ps ( ps : type : property-set )
{
local result ;
local found ;
while $(type) && ! $(found)
{
result = [ $(.$(ps)es.$(type)).find $(property-set) ] ;
# If the prefix/suffix is explicitly set to an empty string, we consider
# prefix/suffix to be found. If we were not to compare with "", there
# would be no way to specify an empty prefix/suffix.
if $(result)-is-defined
{
found = true ;
}
type = $(.base.$(type)) ;
}
if $(result) = ""
{
result = ;
}
return $(result) ;
}
# Returns file type given its name. If there are several dots in filename, tries
# each suffix. E.g. for name of "file.so.1.2" suffixes "2", "1", and "so" will
# be tried.
#
rule type ( filename )
{
if [ os.name ] in NT CYGWIN
{
filename = $(filename:L) ;
}
local type ;
while ! $(type) && $(filename:S)
{
local suffix = $(filename:S) ;
type = $(.type$(suffix)) ;
filename = $(filename:S=) ;
}
return $(type) ;
}
# Rule used to construct all main targets. Note that this rule gets imported
# into the global namespace under different alias names and the exact target
# type to construct is selected based on the alias used to actually invoke this
# rule.
#
rule main-target-rule ( name : sources * : requirements * : default-build * :
usage-requirements * )
{
param.handle-named-params
sources requirements default-build usage-requirements ;
# First discover the required target type based on the exact alias used to
# invoke this rule.
local bt = [ BACKTRACE 1 ] ;
local rulename = $(bt[4]) ;
local target-type = [ type-from-rule-name $(rulename) ] ;
# This is a circular module dependency and so must be imported here.
import targets ;
return [ targets.create-typed-target $(target-type) : [ project.current ] :
$(name) : $(sources) : $(requirements) : $(default-build) :
$(usage-requirements) ] ;
}
rule __test__ ( )
{
import assert ;
# TODO: Add tests for all the is-derived, is-base & related type relation
# checking rules.
}

View File

@@ -0,0 +1,381 @@
# Status: ported.
# Base revision: 45462.
# Copyright (C) Vladimir Prus 2002. Permission to copy, use, modify, sell and
# distribute this software is granted provided this copyright notice appears in
# all copies. This software is provided "as is" without express or implied
# warranty, and with no claim as to its suitability for any purpose.
import re
import os
import os.path
from b2.util.utility import replace_grist, os_name
from b2.exceptions import *
from b2.build import feature, property, scanner
from b2.util import bjam_signature, is_iterable_typed
__re_hyphen = re.compile ('-')
def __register_features ():
""" Register features need by this module.
"""
# The feature is optional so that it is never implicitly added.
# It's used only for internal purposes, and in all cases we
# want to explicitly use it.
feature.feature ('target-type', [], ['composite', 'optional'])
feature.feature ('main-target-type', [], ['optional', 'incidental'])
feature.feature ('base-target-type', [], ['composite', 'optional', 'free'])
def reset ():
""" Clear the module state. This is mainly for testing purposes.
Note that this must be called _after_ resetting the module 'feature'.
"""
global __prefixes_suffixes, __suffixes_to_types, __types, __rule_names_to_types, __target_suffixes_cache
__register_features ()
# Stores suffixes for generated targets.
__prefixes_suffixes = [property.PropertyMap(), property.PropertyMap()]
# Maps suffixes to types
__suffixes_to_types = {}
# A map with all the registered types, indexed by the type name
# Each entry is a dictionary with following values:
# 'base': the name of base type or None if type has no base
# 'derived': a list of names of type which derive from this one
# 'scanner': the scanner class registered for this type, if any
__types = {}
# Caches suffixes for targets with certain properties.
__target_suffixes_cache = {}
reset ()
@bjam_signature((["type"], ["suffixes", "*"], ["base_type", "?"]))
def register (type, suffixes = [], base_type = None):
""" Registers a target type, possibly derived from a 'base-type'.
If 'suffixes' are provided, they list all the suffixes that mean a file is of 'type'.
Also, the first element gives the suffix to be used when constructing and object of
'type'.
type: a string
suffixes: None or a sequence of strings
base_type: None or a string
"""
# Type names cannot contain hyphens, because when used as
# feature-values they will be interpreted as composite features
# which need to be decomposed.
if __re_hyphen.search (type):
raise BaseException ('type name "%s" contains a hyphen' % type)
# it's possible for a type to be registered with a
# base type that hasn't been registered yet. in the
# check for base_type below and the following calls to setdefault()
# the key `type` will be added to __types. When the base type
# actually gets registered, it would fail after the simple check
# of "type in __types"; thus the check for "'base' in __types[type]"
if type in __types and 'base' in __types[type]:
raise BaseException ('Type "%s" is already registered.' % type)
entry = __types.setdefault(type, {})
entry['base'] = base_type
entry.setdefault('derived', [])
entry.setdefault('scanner', None)
if base_type:
__types.setdefault(base_type, {}).setdefault('derived', []).append(type)
if len (suffixes) > 0:
# Generated targets of 'type' will use the first of 'suffixes'
# (this may be overridden)
set_generated_target_suffix (type, [], suffixes [0])
# Specify mapping from suffixes to type
register_suffixes (suffixes, type)
feature.extend('target-type', [type])
feature.extend('main-target-type', [type])
feature.extend('base-target-type', [type])
if base_type:
feature.compose ('<target-type>' + type, [replace_grist (base_type, '<base-target-type>')])
feature.compose ('<base-target-type>' + type, ['<base-target-type>' + base_type])
import b2.build.generators as generators
# Adding a new derived type affects generator selection so we need to
# make the generator selection module update any of its cached
# information related to a new derived type being defined.
generators.update_cached_information_with_a_new_type(type)
# FIXME: resolving recursive dependency.
from b2.manager import get_manager
get_manager().projects().project_rules().add_rule_for_type(type)
# FIXME: quick hack.
def type_from_rule_name(rule_name):
assert isinstance(rule_name, basestring)
return rule_name.upper().replace("-", "_")
def register_suffixes (suffixes, type):
""" Specifies that targets with suffix from 'suffixes' have the type 'type'.
If a different type is already specified for any of syffixes, issues an error.
"""
assert is_iterable_typed(suffixes, basestring)
assert isinstance(type, basestring)
for s in suffixes:
if s in __suffixes_to_types:
old_type = __suffixes_to_types [s]
if old_type != type:
raise BaseException ('Attempting to specify type for suffix "%s"\nOld type: "%s", New type "%s"' % (s, old_type, type))
else:
__suffixes_to_types [s] = type
def registered (type):
""" Returns true iff type has been registered.
"""
assert isinstance(type, basestring)
return type in __types
def validate (type):
""" Issues an error if 'type' is unknown.
"""
assert isinstance(type, basestring)
if not registered (type):
raise BaseException ("Unknown target type '%s'" % type)
def set_scanner (type, scanner):
""" Sets a scanner class that will be used for this 'type'.
"""
if __debug__:
from .scanner import Scanner
assert isinstance(type, basestring)
assert issubclass(scanner, Scanner)
validate (type)
__types [type]['scanner'] = scanner
def get_scanner (type, prop_set):
""" Returns a scanner instance appropriate to 'type' and 'property_set'.
"""
if __debug__:
from .property_set import PropertySet
assert isinstance(type, basestring)
assert isinstance(prop_set, PropertySet)
if registered (type):
scanner_type = __types [type]['scanner']
if scanner_type:
return scanner.get (scanner_type, prop_set.raw ())
pass
return None
def base(type):
"""Returns a base type for the given type or nothing in case the given type is
not derived."""
assert isinstance(type, basestring)
return __types[type]['base']
def all_bases (type):
""" Returns type and all of its bases, in the order of their distance from type.
"""
assert isinstance(type, basestring)
result = []
while type:
result.append (type)
type = __types [type]['base']
return result
def all_derived (type):
""" Returns type and all classes that derive from it, in the order of their distance from type.
"""
assert isinstance(type, basestring)
result = [type]
for d in __types [type]['derived']:
result.extend (all_derived (d))
return result
def is_derived (type, base):
""" Returns true if 'type' is 'base' or has 'base' as its direct or indirect base.
"""
assert isinstance(type, basestring)
assert isinstance(base, basestring)
# TODO: this isn't very efficient, especially for bases close to type
if base in all_bases (type):
return True
else:
return False
def is_subtype (type, base):
""" Same as is_derived. Should be removed.
"""
assert isinstance(type, basestring)
assert isinstance(base, basestring)
# TODO: remove this method
return is_derived (type, base)
@bjam_signature((["type"], ["properties", "*"], ["suffix"]))
def set_generated_target_suffix (type, properties, suffix):
""" Sets a target suffix that should be used when generating target
of 'type' with the specified properties. Can be called with
empty properties if no suffix for 'type' was specified yet.
This does not automatically specify that files 'suffix' have
'type' --- two different types can use the same suffix for
generating, but only one type should be auto-detected for
a file with that suffix. User should explicitly specify which
one.
The 'suffix' parameter can be empty string ("") to indicate that
no suffix should be used.
"""
assert isinstance(type, basestring)
assert is_iterable_typed(properties, basestring)
assert isinstance(suffix, basestring)
set_generated_target_ps(1, type, properties, suffix)
def change_generated_target_suffix (type, properties, suffix):
""" Change the suffix previously registered for this type/properties
combination. If suffix is not yet specified, sets it.
"""
assert isinstance(type, basestring)
assert is_iterable_typed(properties, basestring)
assert isinstance(suffix, basestring)
change_generated_target_ps(1, type, properties, suffix)
def generated_target_suffix(type, properties):
if __debug__:
from .property_set import PropertySet
assert isinstance(type, basestring)
assert isinstance(properties, PropertySet)
return generated_target_ps(1, type, properties)
@bjam_signature((["type"], ["properties", "*"], ["prefix"]))
def set_generated_target_prefix(type, properties, prefix):
"""
Sets a file prefix to be used when generating a target of 'type' with the
specified properties. Can be called with no properties if no prefix has
already been specified for the 'type'. The 'prefix' parameter can be an empty
string ("") to indicate that no prefix should be used.
Note that this does not cause files with 'prefix' to be automatically
recognized as being of 'type'. Two different types can use the same prefix for
their generated files but only one type can be auto-detected for a file with
that prefix. User should explicitly specify which one using the
register-prefixes rule.
Usage example: library names use the "lib" prefix on unix.
"""
set_generated_target_ps(0, type, properties, prefix)
# Change the prefix previously registered for this type/properties combination.
# If prefix is not yet specified, sets it.
def change_generated_target_prefix(type, properties, prefix):
assert isinstance(type, basestring)
assert is_iterable_typed(properties, basestring)
assert isinstance(prefix, basestring)
change_generated_target_ps(0, type, properties, prefix)
def generated_target_prefix(type, properties):
if __debug__:
from .property_set import PropertySet
assert isinstance(type, basestring)
assert isinstance(properties, PropertySet)
return generated_target_ps(0, type, properties)
def set_generated_target_ps(is_suffix, type, properties, val):
assert isinstance(is_suffix, (int, bool))
assert isinstance(type, basestring)
assert is_iterable_typed(properties, basestring)
assert isinstance(val, basestring)
properties.append ('<target-type>' + type)
__prefixes_suffixes[is_suffix].insert (properties, val)
def change_generated_target_ps(is_suffix, type, properties, val):
assert isinstance(is_suffix, (int, bool))
assert isinstance(type, basestring)
assert is_iterable_typed(properties, basestring)
assert isinstance(val, basestring)
properties.append ('<target-type>' + type)
prev = __prefixes_suffixes[is_suffix].find_replace(properties, val)
if not prev:
set_generated_target_ps(is_suffix, type, properties, val)
# Returns either prefix or suffix (as indicated by 'is_suffix') that should be used
# when generating a target of 'type' with the specified properties.
# If no prefix/suffix is specified for 'type', returns prefix/suffix for
# base type, if any.
def generated_target_ps_real(is_suffix, type, properties):
assert isinstance(is_suffix, (int, bool))
assert isinstance(type, basestring)
assert is_iterable_typed(properties, basestring)
result = ''
found = False
while type and not found:
result = __prefixes_suffixes[is_suffix].find (['<target-type>' + type] + properties)
# Note that if the string is empty (""), but not null, we consider
# suffix found. Setting prefix or suffix to empty string is fine.
if result is not None:
found = True
type = __types [type]['base']
if not result:
result = ''
return result
def generated_target_ps(is_suffix, type, prop_set):
""" Returns suffix that should be used when generating target of 'type',
with the specified properties. If not suffix were specified for
'type', returns suffix for base type, if any.
"""
if __debug__:
from .property_set import PropertySet
assert isinstance(is_suffix, (int, bool))
assert isinstance(type, basestring)
assert isinstance(prop_set, PropertySet)
key = (is_suffix, type, prop_set)
v = __target_suffixes_cache.get(key, None)
if not v:
v = generated_target_ps_real(is_suffix, type, prop_set.raw())
__target_suffixes_cache [key] = v
return v
def type(filename):
""" Returns file type given it's name. If there are several dots in filename,
tries each suffix. E.g. for name of "file.so.1.2" suffixes "2", "1", and
"so" will be tried.
"""
assert isinstance(filename, basestring)
while 1:
filename, suffix = os.path.splitext (filename)
if not suffix: return None
suffix = suffix[1:]
if suffix in __suffixes_to_types:
return __suffixes_to_types[suffix]
# NOTE: moved from tools/types/register
def register_type (type, suffixes, base_type = None, os = []):
""" Register the given type on the specified OSes, or on remaining OSes
if os is not specified. This rule is injected into each of the type
modules for the sake of convenience.
"""
assert isinstance(type, basestring)
assert is_iterable_typed(suffixes, basestring)
assert isinstance(base_type, basestring) or base_type is None
assert is_iterable_typed(os, basestring)
if registered (type):
return
if not os or os_name () in os:
register (type, suffixes, base_type)

View File

@@ -0,0 +1,225 @@
# Copyright 2021 Nikita Kniazev
# Copyright 2002, 2003, 2004, 2006 Vladimir Prus
# Copyright 2008, 2012 Jurko Gospodnetic
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt)
import numbers ;
# Mirror engine JAM_VERSION
.major = "4" ;
.minor = "7" ;
rule boost-build ( )
{
return "$(.major).$(.minor)-git" ;
}
rule print ( )
{
if [ verify-engine-version ]
{
ECHO "B2" [ boost-build ] ;
}
}
rule verify-engine-version ( )
{
local v = [ modules.peek : JAM_VERSION ] ;
if $(v[1]) != $(.major) || $(v[2]) != $(.minor)
{
local argv = [ modules.peek : ARGV ] ;
local e = $(argv[1]) ;
local l = [ modules.binding version ] ;
l = $(l:D) ;
l = $(l:D) ;
ECHO "warning: mismatched versions of B2 engine and core" ;
ECHO "warning: B2 engine ($(e)) is $(v:J=.)" ;
ECHO "warning: B2 core (at $(l)) is" [ boost-build ] ;
}
else
{
return true ;
}
}
# Utility rule for testing whether all elements in a sequence are equal to 0.
#
local rule is-all-zeroes ( sequence * )
{
local result = "true" ;
for local e in $(sequence)
{
if $(e) != "0"
{
result = "" ;
}
}
return $(result) ;
}
# Returns "true" if the first version is less than the second one.
#
rule version-less ( lhs + : rhs + )
{
numbers.check $(lhs) ;
numbers.check $(rhs) ;
local done ;
local result ;
while ! $(done) && $(lhs) && $(rhs)
{
if [ numbers.less $(lhs[1]) $(rhs[1]) ]
{
done = "true" ;
result = "true" ;
}
else if [ numbers.less $(rhs[1]) $(lhs[1]) ]
{
done = "true" ;
}
else
{
lhs = $(lhs[2-]) ;
rhs = $(rhs[2-]) ;
}
}
if ( ! $(done) && ! $(lhs) && ! [ is-all-zeroes $(rhs) ] )
{
result = "true" ;
}
return $(result) ;
}
# Returns "true" if the required version is compatible with the having one.
# This uses sematic versioning where (major.x.y) is compatible with
# (major.n.m) and (major.x.z). And is incompatible for other values.
#
rule version-compatible ( req + : has + )
{
numbers.check $(req) ;
numbers.check $(has) ;
if $(req) = $(has)
{
return true ;
}
while $(req) && [ numbers.equal $(req[1]) $(has[1]:E=0) ]
{
req = $(req[2-]) ;
has = $(has[2-]) ;
}
if $(req)
{
return ;
}
return true ;
}
# Returns "true" if the current JAM version version is at least the given
# version.
#
rule check-jam-version ( version + )
{
local version-tag = $(version:J=.) ;
if ! $(version-tag)
{
import errors ;
errors.error Invalid version "specifier:" : $(version:E="(undefined)") ;
}
if ! $(.jam-version-check.$(version-tag))-is-defined
{
local jam-version = [ modules.peek : JAM_VERSION ] ;
if ! $(jam-version)
{
import errors ;
errors.error "Unable to deduce Boost Jam version. Your Boost Jam"
"installation is most likely terribly outdated." ;
}
.jam-version-check.$(version-tag) = "true" ;
if [ version-less [ modules.peek : JAM_VERSION ] : $(version) ]
{
.jam-version-check.$(version-tag) = "" ;
}
}
return $(.jam-version-check.$(version-tag)) ;
}
rule __test__ ( )
{
import assert ;
local jam-version = [ modules.peek : JAM_VERSION ] ;
local future-version = $(jam-version) ;
future-version += "1" ;
assert.true check-jam-version $(jam-version) ;
assert.false check-jam-version $(future-version) ;
assert.true version-less 0 : 1 ;
assert.false version-less 0 : 0 ;
assert.true version-less 1 : 2 ;
assert.false version-less 1 : 1 ;
assert.false version-less 2 : 1 ;
assert.true version-less 3 1 20 : 3 4 10 ;
assert.false version-less 3 1 10 : 3 1 10 ;
assert.false version-less 3 4 10 : 3 1 20 ;
assert.true version-less 3 1 20 5 1 : 3 4 10 ;
assert.false version-less 3 1 10 5 1 : 3 1 10 ;
assert.false version-less 3 4 10 5 1 : 3 1 20 ;
assert.true version-less 3 1 20 : 3 4 10 5 1 ;
assert.true version-less 3 1 10 : 3 1 10 5 1 ;
assert.false version-less 3 4 10 : 3 1 20 5 1 ;
assert.false version-less 3 1 10 : 3 1 10 0 0 ;
assert.false version-less 3 1 10 0 0 : 3 1 10 ;
assert.false version-less 3 1 10 0 : 3 1 10 0 0 ;
assert.false version-less 3 1 10 0 : 03 1 10 0 0 ;
assert.false version-less 03 1 10 0 : 3 1 10 0 0 ;
# TODO: Add tests for invalid input data being sent to version-less.
assert.true version-compatible 4 : 4 ;
assert.true version-compatible 4 : 4 9 ;
assert.false version-compatible 4 9 : 4 ;
assert.true version-compatible 4 9 : 4 9 ;
assert.false version-compatible 4 9 1 : 4 9 ;
assert.true version-compatible 4 9 1 : 4 9 1 ;
assert.false version-compatible 4 8 : 4 9 ;
assert.false version-compatible 4 8 1 : 4 9 ;
assert.false version-compatible 4 8 1 : 4 9 1 ;
assert.true version-compatible 5 : 5 ;
assert.false version-compatible 5 : 4 ;
assert.false version-compatible 5 : 4 9 ;
assert.false version-compatible 5 1 : 5 ;
assert.true version-compatible 5 1 : 5 1 ;
assert.false version-compatible 5 1 : 5 2 ;
assert.false version-compatible 5 1 1 : 5 ;
assert.false version-compatible 5 1 1 : 5 1 ;
assert.false version-compatible 5 2 : 5 ;
assert.false version-compatible 5 2 : 5 1 ;
assert.true version-compatible 5 2 : 5 2 ;
assert.true version-compatible 4 : 4 0 ;
assert.true version-compatible 4 0 : 4 ;
assert.true version-compatible 04 : 4 ;
assert.true version-compatible 04 : 04 ;
assert.true version-compatible 04 : 4 ;
assert.true version-compatible 04 00 : 04 ;
assert.true version-compatible 04 : 04 00 ;
}

View File

@@ -0,0 +1,38 @@
import os
import sys
import bjam
from b2.manager import get_manager
MANAGER = get_manager()
ERROR_HANDLER = MANAGER.errors()
_major = "2015"
_minor = "07"
def boost_build():
return "{}.{}-git".format(_major, _minor)
def verify_engine_version():
major, minor, _ = v = bjam.variable('JAM_VERSION')
if major != _major or minor != _minor:
from textwrap import dedent
engine = sys.argv[0]
core = os.path.dirname(os.path.dirname(__file__))
print dedent("""\
warning: mismatched version of Boost.Build engine core
warning: Boost.Build engine "{}" is "{}"
warning: Boost.Build core at {} is {}
""".format(engine, '.'.join(v), core, boost_build()))
return False
return True
def report():
if verify_engine_version():
print "Boost.Build " + boost_build()

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,682 @@
# Status: mostly ported. Missing is --out-xml support, 'configure' integration
# and some FIXME.
# Base revision: 64351
# Copyright 2003, 2005 Dave Abrahams
# Copyright 2006 Rene Rivera
# Copyright 2003, 2004, 2005, 2006, 2007 Vladimir Prus
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE.txt or copy at
# https://www.bfgroup.xyz/b2/LICENSE.txt)
import os
import sys
import re
import bjam
# set this early on since some of the following modules
# require looking at the sys.argv
sys.argv = bjam.variable("ARGV")
from b2.build.engine import Engine
from b2.manager import Manager
from b2.util.path import glob
from b2.build import feature, property_set
import b2.build.virtual_target
from b2.build.targets import ProjectTarget
import b2.build.build_request
from b2.build.errors import ExceptionWithUserContext
import b2.tools.common
from b2.build.toolset import using
import b2.build.virtual_target as virtual_target
import b2.build.build_request as build_request
import b2.util.regex
from b2.manager import get_manager
from b2.util import cached
from b2.util import option
################################################################################
#
# Module global data.
#
################################################################################
# Flag indicating we should display additional debugging information related to
# locating and loading Boost Build configuration files.
debug_config = False
# The cleaning is tricky. Say, if user says 'bjam --clean foo' where 'foo' is a
# directory, then we want to clean targets which are in 'foo' as well as those
# in any children Jamfiles under foo but not in any unrelated Jamfiles. To
# achieve this we collect a list of projects under which cleaning is allowed.
project_targets = []
# Virtual targets obtained when building main targets references on the command
# line. When running 'bjam --clean main_target' we want to clean only files
# belonging to that main target so we need to record which targets are produced
# for it.
results_of_main_targets = []
# Was an XML dump requested?
out_xml = False
# Default toolset & version to be used in case no other toolset has been used
# explicitly by either the loaded configuration files, the loaded project build
# scripts or an explicit toolset request on the command line. If not specified,
# an arbitrary default will be used based on the current host OS. This value,
# while not strictly necessary, has been added to allow testing Boost-Build's
# default toolset usage functionality.
default_toolset = None
default_toolset_version = None
################################################################################
#
# Public rules.
#
################################################################################
# Returns the property set with the free features from the currently processed
# build request.
#
def command_line_free_features():
return command_line_free_features
# Sets the default toolset & version to be used in case no other toolset has
# been used explicitly by either the loaded configuration files, the loaded
# project build scripts or an explicit toolset request on the command line. For
# more detailed information see the comment related to used global variables.
#
def set_default_toolset(toolset, version=None):
default_toolset = toolset
default_toolset_version = version
pre_build_hook = []
def add_pre_build_hook(callable):
pre_build_hook.append(callable)
post_build_hook = None
def set_post_build_hook(callable):
post_build_hook = callable
################################################################################
#
# Local rules.
#
################################################################################
# Returns actual Jam targets to be used for executing a clean request.
#
def actual_clean_targets(targets):
# Construct a list of projects explicitly detected as targets on this build
# system run. These are the projects under which cleaning is allowed.
for t in targets:
if isinstance(t, b2.build.targets.ProjectTarget):
project_targets.append(t.project_module())
# Construct a list of targets explicitly detected on this build system run
# as a result of building main targets.
targets_to_clean = set()
for t in results_of_main_targets:
# Do not include roots or sources.
targets_to_clean.update(virtual_target.traverse(t))
to_clean = []
for t in get_manager().virtual_targets().all_targets():
# Remove only derived targets.
if t.action():
p = t.project()
if t in targets_to_clean or should_clean_project(p.project_module()):
to_clean.append(t)
return [t.actualize() for t in to_clean]
_target_id_split = re.compile("(.*)//(.*)")
# Given a target id, try to find and return the corresponding target. This is
# only invoked when there is no Jamfile in ".". This code somewhat duplicates
# code in project-target.find but we can not reuse that code without a
# project-targets instance.
#
def find_target(target_id):
projects = get_manager().projects()
m = _target_id_split.match(target_id)
if m:
pm = projects.find(m.group(1), ".")
else:
pm = projects.find(target_id, ".")
if pm:
result = projects.target(pm)
if m:
result = result.find(m.group(2))
return result
def initialize_config_module(module_name, location=None):
get_manager().projects().initialize(module_name, location)
# Helper rule used to load configuration files. Loads the first configuration
# file with the given 'filename' at 'path' into module with name 'module-name'.
# Not finding the requested file may or may not be treated as an error depending
# on the must-find parameter. Returns a normalized path to the loaded
# configuration file or nothing if no file was loaded.
#
def load_config(module_name, filename, paths, must_find=False):
if debug_config:
print "notice: Searching '%s' for '%s' configuration file '%s." \
% (paths, module_name, filename)
where = None
for path in paths:
t = os.path.join(path, filename)
if os.path.exists(t):
where = t
break
if where:
where = os.path.realpath(where)
if debug_config:
print "notice: Loading '%s' configuration file '%s' from '%s'." \
% (module_name, filename, where)
# Set source location so that path-constant in config files
# with relative paths work. This is of most importance
# for project-config.jam, but may be used in other
# config files as well.
attributes = get_manager().projects().attributes(module_name) ;
attributes.set('source-location', os.path.dirname(where), True)
get_manager().projects().load_standalone(module_name, where)
else:
msg = "Configuration file '%s' not found in '%s'." % (filename, path)
if must_find:
get_manager().errors()(msg)
elif debug_config:
print msg
return where
# Loads all the configuration files used by Boost Build in the following order:
#
# -- test-config --
# Loaded only if specified on the command-line using the --test-config
# command-line parameter. It is ok for this file not to exist even if
# specified. If this configuration file is loaded, regular site and user
# configuration files will not be. If a relative path is specified, file is
# searched for in the current folder.
#
# -- site-config --
# Always named site-config.jam. Will only be found if located on the system
# root path (Windows), /etc (non-Windows), user's home folder or the Boost
# Build path, in that order. Not loaded in case the test-config configuration
# file is loaded or the --ignore-site-config command-line option is specified.
#
# -- user-config --
# Named user-config.jam by default or may be named explicitly using the
# --user-config command-line option or the BOOST_BUILD_USER_CONFIG environment
# variable. If named explicitly the file is looked for from the current working
# directory and if the default one is used then it is searched for in the
# user's home directory and the Boost Build path, in that order. Not loaded in
# case either the test-config configuration file is loaded or an empty file
# name is explicitly specified. If the file name has been given explicitly then
# the file must exist.
#
# Test configurations have been added primarily for use by Boost Build's
# internal unit testing system but may be used freely in other places as well.
#
def load_configuration_files():
# Flag indicating that site configuration should not be loaded.
ignore_site_config = "--ignore-site-config" in sys.argv
initialize_config_module("test-config")
test_config = None
for a in sys.argv:
m = re.match("--test-config=(.*)$", a)
if m:
test_config = b2.util.unquote(m.group(1))
break
if test_config:
where = load_config("test-config", os.path.basename(test_config), [os.path.dirname(test_config)])
if where:
if debug_config:
print "notice: Regular site and user configuration files will"
print "notice: be ignored due to the test configuration being loaded."
user_path = [os.path.expanduser("~")] + bjam.variable("BOOST_BUILD_PATH")
site_path = ["/etc"] + user_path
if os.name in ["nt"]:
site_path = [os.getenv("SystemRoot")] + user_path
if debug_config and not test_config and ignore_site_config:
print "notice: Site configuration files will be ignored due to the"
print "notice: --ignore-site-config command-line option."
initialize_config_module("site-config")
if not test_config and not ignore_site_config:
load_config('site-config', 'site-config.jam', site_path)
initialize_config_module('user-config')
if not test_config:
# Here, user_config has value of None if nothing is explicitly
# specified, and value of '' if user explicitly does not want
# to load any user config.
user_config = None
for a in sys.argv:
m = re.match("--user-config=(.*)$", a)
if m:
user_config = m.group(1)
break
if user_config is None:
user_config = os.getenv("BOOST_BUILD_USER_CONFIG")
# Special handling for the case when the OS does not strip the quotes
# around the file name, as is the case when using Cygwin bash.
user_config = b2.util.unquote(user_config)
explicitly_requested = user_config
if user_config is None:
user_config = "user-config.jam"
if user_config:
if explicitly_requested:
user_config = os.path.abspath(user_config)
if debug_config:
print "notice: Loading explicitly specified user configuration file:"
print " " + user_config
load_config('user-config', os.path.basename(user_config), [os.path.dirname(user_config)], True)
else:
load_config('user-config', os.path.basename(user_config), user_path)
else:
if debug_config:
print "notice: User configuration file loading explicitly disabled."
# We look for project-config.jam from "." upward. I am not sure this is
# 100% right decision, we might as well check for it only alongside the
# Jamroot file. However:
# - We need to load project-config.jam before Jamroot
# - We probably need to load project-config.jam even if there is no Jamroot
# - e.g. to implement automake-style out-of-tree builds.
if os.path.exists("project-config.jam"):
file = ["project-config.jam"]
else:
file = b2.util.path.glob_in_parents(".", ["project-config.jam"])
if file:
initialize_config_module('project-config', os.path.dirname(file[0]))
load_config('project-config', "project-config.jam", [os.path.dirname(file[0])], True)
get_manager().projects().end_load()
# Autoconfigure toolsets based on any instances of --toolset=xx,yy,...zz or
# toolset=xx,yy,...zz in the command line. May return additional properties to
# be processed as if they had been specified by the user.
#
def process_explicit_toolset_requests():
extra_properties = []
option_toolsets = [e for option in b2.util.regex.transform(sys.argv, "^--toolset=(.*)$")
for e in option.split(',')]
feature_toolsets = [e for option in b2.util.regex.transform(sys.argv, "^toolset=(.*)$")
for e in option.split(',')]
for t in option_toolsets + feature_toolsets:
# Parse toolset-version/properties.
(toolset_version, toolset, version) = re.match("(([^-/]+)-?([^/]+)?)/?.*", t).groups()
if debug_config:
print "notice: [cmdline-cfg] Detected command-line request for '%s': toolset= %s version=%s" \
% (toolset_version, toolset, version)
# If the toolset is not known, configure it now.
known = False
if toolset in feature.values("toolset"):
known = True
if known and version and not feature.is_subvalue("toolset", toolset, "version", version):
known = False
# TODO: we should do 'using $(toolset)' in case no version has been
# specified and there are no versions defined for the given toolset to
# allow the toolset to configure its default version. For this we need
# to know how to detect whether a given toolset has any versions
# defined. An alternative would be to do this whenever version is not
# specified but that would require that toolsets correctly handle the
# case when their default version is configured multiple times which
# should be checked for all existing toolsets first.
if not known:
if debug_config:
print "notice: [cmdline-cfg] toolset '%s' not previously configured; attempting to auto-configure now" % toolset_version
if version is not None:
using(toolset, version)
else:
using(toolset)
else:
if debug_config:
print "notice: [cmdline-cfg] toolset '%s' already configured" % toolset_version
# Make sure we get an appropriate property into the build request in
# case toolset has been specified using the "--toolset=..." command-line
# option form.
if not t in sys.argv and not t in feature_toolsets:
if debug_config:
print "notice: [cmdline-cfg] adding toolset=%s) to the build request." % t ;
extra_properties += "toolset=%s" % t
return extra_properties
# Returns 'true' if the given 'project' is equal to or is a (possibly indirect)
# child to any of the projects requested to be cleaned in this build system run.
# Returns 'false' otherwise. Expects the .project-targets list to have already
# been constructed.
#
@cached
def should_clean_project(project):
if project in project_targets:
return True
else:
parent = get_manager().projects().attribute(project, "parent-module")
if parent and parent != "user-config":
return should_clean_project(parent)
else:
return False
################################################################################
#
# main()
# ------
#
################################################################################
def main():
# FIXME: document this option.
if "--profiling" in sys.argv:
import cProfile
r = cProfile.runctx('main_real()', globals(), locals(), "stones.prof")
import pstats
stats = pstats.Stats("stones.prof")
stats.strip_dirs()
stats.sort_stats('time', 'calls')
stats.print_callers(20)
return r
else:
try:
return main_real()
except ExceptionWithUserContext, e:
e.report()
def main_real():
global debug_config, out_xml
debug_config = "--debug-configuration" in sys.argv
out_xml = any(re.match("^--out-xml=(.*)$", a) for a in sys.argv)
engine = Engine()
global_build_dir = option.get("build-dir")
manager = Manager(engine, global_build_dir)
import b2.build.configure as configure
if "--version" in sys.argv:
from b2.build import version
version.report()
return
# This module defines types and generator and what not,
# and depends on manager's existence
import b2.tools.builtin
b2.tools.common.init(manager)
load_configuration_files()
# Load explicitly specified toolset modules.
extra_properties = process_explicit_toolset_requests()
# Load the actual project build script modules. We always load the project
# in the current folder so 'use-project' directives have any chance of
# being seen. Otherwise, we would not be able to refer to subprojects using
# target ids.
current_project = None
projects = get_manager().projects()
if projects.find(".", "."):
current_project = projects.target(projects.load("."))
# Load the default toolset module if no other has already been specified.
if not feature.values("toolset"):
dt = default_toolset
dtv = None
if default_toolset:
dtv = default_toolset_version
else:
dt = "gcc"
if os.name == 'nt':
dt = "msvc"
# FIXME:
#else if [ os.name ] = MACOSX
#{
# default-toolset = darwin ;
#}
print "warning: No toolsets are configured."
print "warning: Configuring default toolset '%s'." % dt
print "warning: If the default is wrong, your build may not work correctly."
print "warning: Use the \"toolset=xxxxx\" option to override our guess."
print "warning: For more configuration options, please consult"
print "warning: https://www.bfgroup.xyz/b2/manual/release/index.html#bbv2.overview.configuration"
using(dt, dtv)
# Parse command line for targets and properties. Note that this requires
# that all project files already be loaded.
(target_ids, properties) = build_request.from_command_line(sys.argv[1:] + extra_properties)
# Check that we actually found something to build.
if not current_project and not target_ids:
get_manager().errors()("no Jamfile in current directory found, and no target references specified.")
# FIXME:
# EXIT
# Flags indicating that this build system run has been started in order to
# clean existing instead of create new targets. Note that these are not the
# final flag values as they may get changed later on due to some special
# targets being specified on the command line.
clean = "--clean" in sys.argv
cleanall = "--clean-all" in sys.argv
# List of explicitly requested files to build. Any target references read
# from the command line parameter not recognized as one of the targets
# defined in the loaded Jamfiles will be interpreted as an explicitly
# requested file to build. If any such files are explicitly requested then
# only those files and the targets they depend on will be built and they
# will be searched for among targets that would have been built had there
# been no explicitly requested files.
explicitly_requested_files = []
# List of Boost Build meta-targets, virtual-targets and actual Jam targets
# constructed in this build system run.
targets = []
virtual_targets = []
actual_targets = []
explicitly_requested_files = []
# Process each target specified on the command-line and convert it into
# internal Boost Build target objects. Detect special clean target. If no
# main Boost Build targets were explicitly requested use the current project
# as the target.
for id in target_ids:
if id == "clean":
clean = 1
else:
t = None
if current_project:
t = current_project.find(id, no_error=1)
else:
t = find_target(id)
if not t:
print "notice: could not find main target '%s'" % id
print "notice: assuming it's a name of file to create " ;
explicitly_requested_files.append(id)
else:
targets.append(t)
if not targets:
targets = [projects.target(projects.module_name("."))]
# FIXME: put this BACK.
## if [ option.get dump-generators : : true ]
## {
## generators.dump ;
## }
# We wish to put config.log in the build directory corresponding
# to Jamroot, so that the location does not differ depending on
# directory where we do build. The amount of indirection necessary
# here is scary.
first_project = targets[0].project()
first_project_root_location = first_project.get('project-root')
first_project_root_module = manager.projects().load(first_project_root_location)
first_project_root = manager.projects().target(first_project_root_module)
first_build_build_dir = first_project_root.build_dir()
configure.set_log_file(os.path.join(first_build_build_dir, "config.log"))
virtual_targets = []
global results_of_main_targets
# Expand properties specified on the command line into multiple property
# sets consisting of all legal property combinations. Each expanded property
# set will be used for a single build run. E.g. if multiple toolsets are
# specified then requested targets will be built with each of them.
# The expansion is being performed as late as possible so that the feature
# validation is performed after all necessary modules (including project targets
# on the command line) have been loaded.
if properties:
expanded = []
for p in properties:
expanded.extend(build_request.convert_command_line_element(p))
expanded = build_request.expand_no_defaults(expanded)
else:
expanded = [property_set.empty()]
# Now that we have a set of targets to build and a set of property sets to
# build the targets with, we can start the main build process by using each
# property set to generate virtual targets from all of our listed targets
# and any of their dependants.
for p in expanded:
manager.set_command_line_free_features(property_set.create(p.free()))
for t in targets:
try:
g = t.generate(p)
if not isinstance(t, ProjectTarget):
results_of_main_targets.extend(g.targets())
virtual_targets.extend(g.targets())
except ExceptionWithUserContext, e:
e.report()
except Exception:
raise
# Convert collected virtual targets into actual raw Jam targets.
for t in virtual_targets:
actual_targets.append(t.actualize())
j = option.get("jobs")
if j:
bjam.call("set-variable", 'PARALLELISM', j)
k = option.get("keep-going", "true", "true")
if k in ["on", "yes", "true"]:
bjam.call("set-variable", "KEEP_GOING", "1")
elif k in ["off", "no", "false"]:
bjam.call("set-variable", "KEEP_GOING", "0")
else:
print "error: Invalid value for the --keep-going option"
sys.exit()
# The 'all' pseudo target is not strictly needed expect in the case when we
# use it below but people often assume they always have this target
# available and do not declare it themselves before use which may cause
# build failures with an error message about not being able to build the
# 'all' target.
bjam.call("NOTFILE", "all")
# And now that all the actual raw Jam targets and all the dependencies
# between them have been prepared all that is left is to tell Jam to update
# those targets.
if explicitly_requested_files:
# Note that this case can not be joined with the regular one when only
# exact Boost Build targets are requested as here we do not build those
# requested targets but only use them to construct the dependency tree
# needed to build the explicitly requested files.
# FIXME: add $(.out-xml)
bjam.call("UPDATE", ["<e>%s" % x for x in explicitly_requested_files])
elif cleanall:
bjam.call("UPDATE", "clean-all")
elif clean:
manager.engine().set_update_action("common.Clean", "clean",
actual_clean_targets(targets))
bjam.call("UPDATE", "clean")
else:
# FIXME:
#configure.print-configure-checks-summary ;
if pre_build_hook:
for h in pre_build_hook:
h()
bjam.call("DEPENDS", "all", actual_targets)
ok = bjam.call("UPDATE_NOW", "all") # FIXME: add out-xml
if post_build_hook:
post_build_hook(ok)
# Prevent automatic update of the 'all' target, now that
# we have explicitly updated what we wanted.
bjam.call("UPDATE")
if manager.errors().count() == 0:
return ["ok"]
else:
return []

View File

@@ -0,0 +1,309 @@
# Copyright 2008 - 2013 Roland Schwarz
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE.txt or copy at
# https://www.bfgroup.xyz/b2/LICENSE.txt)
# Boost library support module.
#
# This module allows to use the boost library from boost-build projects. The
# location of a boost source tree or the path to a pre-built version of the
# library can be configured from either site-config.jam or user-config.jam. If
# no location is configured the module looks for a BOOST_ROOT environment
# variable, which should point to a boost source tree. As a last resort it tries
# to use pre-built libraries from the standard search path of the compiler.
#
# If the location to a source tree is known, the module can be configured from
# the *-config.jam files:
#
# using boost : 1.35 : <root>/path-to-boost-root ;
#
# If the location to a pre-built version is known:
#
# using boost : 1.34
# : <include>/usr/local/include/boost_1_34
# <library>/usr/local/lib
# ;
#
# It is legal to configure more than one boost library version in the config
# files. The version identifier is used to disambiguate between them. The first
# configured version becomes the default.
#
# To use a boost library you need to put a 'use' statement into your Jamfile:
#
# import boost ;
#
# boost.use-project 1.35 ;
#
# If you do not care about a specific version you just can omit the version
# part, in which case the default is picked up:
#
# boost.use-project ;
#
# The library can be referenced with the project identifier '/boost'. To
# reference the program_options you would specify:
#
# exe myexe : mysrc.cpp : <library>/boost//program_options ;
#
# Note that the requirements are automatically transformed into suitable tags to
# find the correct pre-built library.
#
import common ;
import modules ;
import numbers ;
import project ;
import property-set ;
import regex ;
import toolset ;
.boost.auto_config = [ property-set.create <layout>system ] ;
if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
{
.debug-configuration = true ;
}
# Configuration of the boost library to use.
#
# This can either be a boost source tree or pre-built libraries. The 'version'
# parameter must be a valid boost version number, e.g. 1.35, if specifying a
# pre-built version with versioned layout. It may be a symbolic name, e.g.
# 'trunk' if specifying a source tree. The options are specified as named
# parameters (like properties). The following parameters are available:
#
# <root>/path-to-boost-root : Specify a source tree.
# <include>/path-to-include : The include directory to search.
# <library>/path-to-library : The library directory to search.
# <layout>system or <layout>versioned : Built library layout.
# <build-id>my_build_id : The custom build id to use.
#
rule init
(
version # Version identifier.
: options * # Set the option properties.
)
{
if $(.boost.$(version))
{
import errors ;
errors.user-error Boost $(version) already configured. ;
}
else
{
if $(.debug-configuration)
{
if ! $(.boost_default)
{
echo notice\: configuring default boost library $(version) ;
}
echo notice\: configuring boost library $(version) ;
}
.boost_default ?= $(version) ; # the first configured is default
.boost.$(version) = [ property-set.create $(options) ] ;
}
}
# Use a certain version of the library.
#
# The use-project rule causes the module to define a boost project of searchable
# pre-built boost libraries, or references a source tree of the boost library.
# If the 'version' parameter is omitted either the configured default (first in
# config files) is used or an auto configuration will be attempted.
#
rule use-project
(
version ? # The version of the library to use.
)
{
project.push-current [ project.current ] ;
version ?= $(.boost_default) ;
version ?= auto_config ;
if $(.initialized)
{
if $(.initialized) != $(version)
{
import errors ;
errors.user-error Attempt to use $(__name__) with different
parameters. ;
}
}
else
{
if $(.boost.$(version))
{
local opt = $(.boost.$(version)) ;
local root = [ $(opt).get <root> ] ;
local inc = [ $(opt).get <include> ] ;
local lib = [ $(opt).get <library> ] ;
if $(.debug-configuration)
{
echo notice\: using boost library $(version) [ $(opt).raw ] ;
}
.layout = [ $(opt).get <layout> ] ;
.layout ?= versioned ;
.build_id = [ $(opt).get <build-id> ] ;
.version_tag = [ regex.replace $(version) "[*\\/:.\"\' ]" "_" ] ;
.initialized = $(version) ;
if ( $(root) && $(inc) )
|| ( $(root) && $(lib) )
|| ( $(lib) && ! $(inc) )
|| ( ! $(lib) && $(inc) )
{
import errors ;
errors.user-error Ambiguous parameters, use either <root> or
<include> with <library>. ;
}
else if ! $(root) && ! $(inc)
{
root = [ modules.peek : BOOST_ROOT ] ;
}
local prj = [ project.current ] ;
local mod = [ $(prj).project-module ] ;
if $(root)
{
modules.call-in $(mod) : use-project boost : $(root) ;
}
else
{
project.initialize $(__name__) ;
# It is possible to override the setup of the searched libraries
# per version. The (unlikely) 0.0.1 tag is meant as an example
# template only.
switch $(version)
{
case 0.0.1 : boost_0_0_1 $(inc) $(lib) ;
case * : boost_std $(inc) $(lib) ;
}
}
}
else
{
import errors ;
errors.user-error Reference to unconfigured boost version. ;
}
}
project.pop-current ;
}
local rule boost_lib_std ( id : shared-lib-define )
{
lib $(id) : : : : <link>shared:<define>$(shared-lib-define) ;
}
rule boost_std ( inc ? lib ? )
{
# The default definitions for pre-built libraries.
project boost
: usage-requirements <include>$(inc) <define>BOOST_ALL_NO_LIB
: requirements <tag>@tag_std <search>$(lib)
;
alias headers ;
boost_lib_std chrono : BOOST_CHRONO_DYN_LINK ;
boost_lib_std container : BOOST_CONTAINER_DYN_LINK ;
boost_lib_std date_time : BOOST_DATE_TIME_DYN_LINK ;
boost_lib_std filesystem : BOOST_FILE_SYSTEM_DYN_LINK ;
boost_lib_std graph : BOOST_GRAPH_DYN_LINK ;
boost_lib_std graph_parallel : BOOST_GRAPH_DYN_LINK ;
boost_lib_std iostreams : BOOST_IOSTREAMS_DYN_LINK ;
boost_lib_std json : BOOST_JSON_DYN_LINK ;
boost_lib_std locale : BOOST_LOCALE_DYN_LINK ;
boost_lib_std log : BOOST_LOG_DYN_LINK ;
boost_lib_std log_setup : BOOST_LOG_SETUP_DYN_LINK ;
boost_lib_std math_c99 : BOOST_MATH_TR1_DYN_LINK ;
boost_lib_std math_c99f : BOOST_MATH_TR1_DYN_LINK ;
boost_lib_std math_c99l : BOOST_MATH_TR1_DYN_LINK ;
boost_lib_std math_tr1 : BOOST_MATH_TR1_DYN_LINK ;
boost_lib_std math_tr1f : BOOST_MATH_TR1_DYN_LINK ;
boost_lib_std math_tr1l : BOOST_MATH_TR1_DYN_LINK ;
boost_lib_std mpi : BOOST_MPI_DYN_LINK ;
boost_lib_std prg_exec_monitor : BOOST_TEST_DYN_LINK ;
boost_lib_std program_options : BOOST_PROGRAM_OPTIONS_DYN_LINK ;
boost_lib_std python : BOOST_PYTHON_DYN_LINK ;
boost_lib_std python3 : BOOST_PYTHON_DYN_LINK ;
boost_lib_std random : BOOST_RANDOM_DYN_LINK ;
boost_lib_std regex : BOOST_REGEX_DYN_LINK ;
boost_lib_std serialization : BOOST_SERIALIZATION_DYN_LINK ;
boost_lib_std signals : BOOST_SIGNALS_DYN_LINK ;
boost_lib_std system : BOOST_SYSTEM_DYN_LINK ;
boost_lib_std test_exec_monitor : BOOST_TEST_DYN_LINK ;
boost_lib_std thread : BOOST_THREAD_DYN_DLL ;
boost_lib_std timer : BOOST_TIMER_DYN_DLL ;
boost_lib_std unit_test_framework : BOOST_TEST_DYN_LINK ;
boost_lib_std wave : BOOST_WAVE_DYN_LINK ;
boost_lib_std wserialization : BOOST_SERIALIZATION_DYN_LINK ;
}
# Example placeholder for rules defining Boost library project & library targets
# for a specific Boost library version. Copy under a different name and model
# after the boost_std rule. Please note that it is also possible to have a per
# version taging rule in case the tagging algorithm changes between versions.
#
rule boost_0_0_1 ( inc ? lib ? )
{
echo "You are trying to use an example placeholder for boost libs." ;
}
rule tag_std ( name : type ? : property-set )
{
name = boost_$(name) ;
if ( [ $(property-set).get <link> ] in static ) &&
( [ $(property-set).get <target-os> ] in windows )
{
name = lib$(name) ;
}
local result ;
if $(.layout) = system
{
local version = [ MATCH "^([0-9]+)_([0-9]+)" : $(.version_tag) ] ;
if $(version[1]) = "1" && [ numbers.less $(version[2]) 39 ]
{
result = [ tag_tagged $(name) : $(type) : $(property-set) ] ;
}
else
{
result = [ tag_system $(name) : $(type) : $(property-set) ] ;
}
}
else if $(.layout) = tagged
{
result = [ tag_tagged $(name) : $(type) : $(property-set) ] ;
}
else if $(.layout) = versioned
{
result = [ tag_versioned $(name) : $(type) : $(property-set) ] ;
}
else
{
import errors ;
errors.error Missing layout. ;
}
return $(result) ;
}
rule tag_system ( name : type ? : property-set )
{
return [ common.format-name <base> -$(.build_id) : $(name) : $(type) :
$(property-set) ] ;
}
rule tag_tagged ( name : type ? : property-set )
{
return [ common.format-name <base> <threading> <runtime> -$(.build_id) :
$(name) : $(type) : $(property-set) ] ;
}
rule tag_versioned ( name : type ? : property-set )
{
return [ common.format-name <base> <toolset> <threading> <runtime>
-$(.version_tag) -$(.build_id) : $(name) : $(type) : $(property-set) ] ;
}

View File

@@ -0,0 +1,280 @@
# $Id: boost.jam 62249 2010-05-26 19:05:19Z steven_watanabe $
# Copyright 2008 Roland Schwarz
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt)
# Boost library support module.
#
# This module allows to use the boost library from boost-build projects.
# The location of a boost source tree or the path to a pre-built
# version of the library can be configured from either site-config.jam
# or user-config.jam. If no location is configured the module looks for
# a BOOST_ROOT environment variable, which should point to a boost source
# tree. As a last resort it tries to use pre-built libraries from the standard
# search path of the compiler.
#
# If the location to a source tree is known, the module can be configured
# from the *-config.jam files:
#
# using boost : 1.35 : <root>/path-to-boost-root ;
#
# If the location to a pre-built version is known:
#
# using boost : 1.34
# : <include>/usr/local/include/boost_1_34
# <library>/usr/local/lib
# ;
#
# It is legal to configure more than one boost library version in the config
# files. The version identifier is used to disambiguate between them.
# The first configured version becomes the default.
#
# To use a boost library you need to put a 'use' statement into your
# Jamfile:
#
# import boost ;
#
# boost.use-project 1.35 ;
#
# If you don't care about a specific version you just can omit the version
# part, in which case the default is picked up:
#
# boost.use-project ;
#
# The library can be referenced with the project identifier '/boost'. To
# reference the program_options you would specify:
#
# exe myexe : mysrc.cpp : <library>/boost//program_options ;
#
# Note that the requirements are automatically transformed into suitable
# tags to find the correct pre-built library.
#
import re
import bjam
from b2.build import alias, property, property_set, feature
from b2.manager import get_manager
from b2.tools import builtin, common
from b2.util import bjam_signature, regex
# TODO: This is currently necessary in Python Port, but was not in Jam.
feature.feature('layout', ['system', 'versioned', 'tag'], ['optional'])
feature.feature('root', [], ['optional', 'free'])
feature.feature('build-id', [], ['optional', 'free'])
__initialized = None
__boost_auto_config = property_set.create([property.Property('layout', 'system')])
__boost_configured = {}
__boost_default = None
__build_id = None
__debug = None
def debug():
global __debug
if __debug is None:
__debug = "--debug-configuration" in bjam.variable("ARGV")
return __debug
# Configuration of the boost library to use.
#
# This can either be a boost source tree or
# pre-built libraries. The 'version' parameter must be a valid boost
# version number, e.g. 1.35, if specifying a pre-built version with
# versioned layout. It may be a symbolic name, e.g. 'trunk' if specifying
# a source tree. The options are specified as named parameters (like
# properties). The following parameters are available:
#
# <root>/path-to-boost-root: Specify a source tree.
#
# <include>/path-to-include: The include directory to search.
#
# <library>/path-to-library: The library directory to search.
#
# <layout>system or <layout>versioned.
#
# <build-id>my_build_id: The custom build id to use.
#
def init(version, options = None):
assert(isinstance(version,list))
assert(len(version)==1)
version = version[0]
if version in __boost_configured:
get_manager().errors()("Boost {} already configured.".format(version));
else:
global __boost_default
if debug():
if not __boost_default:
print "notice: configuring default boost library {}".format(version)
print "notice: configuring boost library {}".format(version)
if not __boost_default:
__boost_default = version
properties = []
for option in options:
properties.append(property.create_from_string(option))
__boost_configured[ version ] = property_set.PropertySet(properties)
projects = get_manager().projects()
rules = projects.project_rules()
# Use a certain version of the library.
#
# The use-project rule causes the module to define a boost project of
# searchable pre-built boost libraries, or references a source tree
# of the boost library. If the 'version' parameter is omitted either
# the configured default (first in config files) is used or an auto
# configuration will be attempted.
#
@bjam_signature(([ "version", "?" ], ))
def use_project(version = None):
projects.push_current( projects.current() )
if not version:
version = __boost_default
if not version:
version = "auto_config"
global __initialized
if __initialized:
if __initialized != version:
get_manager().errors()('Attempt to use {} with different parameters'.format('boost'))
else:
if version in __boost_configured:
opts = __boost_configured[ version ]
root = opts.get('<root>' )
inc = opts.get('<include>')
lib = opts.get('<library>')
if debug():
print "notice: using boost library {} {}".format( version, opt.raw() )
global __layout
global __version_tag
__layout = opts.get('<layout>')
if not __layout:
__layout = 'versioned'
__build_id = opts.get('<build-id>')
__version_tag = re.sub("[*\\/:.\"\' ]", "_", version)
__initialized = version
if ( root and inc ) or \
( root and lib ) or \
( lib and not inc ) or \
( not lib and inc ):
get_manager().errors()("Ambiguous parameters, use either <root> or <include> with <library>.")
elif not root and not inc:
root = bjam.variable("BOOST_ROOT")
module = projects.current().project_module()
if root:
bjam.call('call-in-module', module, 'use-project', ['boost', root])
else:
projects.initialize(__name__)
if version == '0.0.1':
boost_0_0_1( inc, lib )
else:
boost_std( inc, lib )
else:
get_manager().errors()("Reference to unconfigured boost version.")
projects.pop_current()
rules.add_rule( 'boost.use-project', use_project )
def boost_std(inc = None, lib = None):
# The default definitions for pre-built libraries.
rules.project(
['boost'],
['usage-requirements'] + ['<include>{}'.format(i) for i in inc] + ['<define>BOOST_ALL_NO_LIB'],
['requirements'] + ['<search>{}'.format(l) for l in lib])
# TODO: There should be a better way to add a Python function into a
# project requirements property set.
tag_prop_set = property_set.create([property.Property('<tag>', tag_std)])
attributes = projects.attributes(projects.current().project_module())
attributes.requirements = attributes.requirements.refine(tag_prop_set)
alias('headers')
def boost_lib(lib_name, dyn_link_macro):
if (isinstance(lib_name,str)):
lib_name = [lib_name]
builtin.lib(lib_name, usage_requirements=['<link>shared:<define>{}'.format(dyn_link_macro)])
boost_lib('container' , 'BOOST_CONTAINER_DYN_LINK' )
boost_lib('date_time' , 'BOOST_DATE_TIME_DYN_LINK' )
boost_lib('filesystem' , 'BOOST_FILE_SYSTEM_DYN_LINK' )
boost_lib('graph' , 'BOOST_GRAPH_DYN_LINK' )
boost_lib('graph_parallel' , 'BOOST_GRAPH_DYN_LINK' )
boost_lib('iostreams' , 'BOOST_IOSTREAMS_DYN_LINK' )
boost_lib('locale' , 'BOOST_LOG_DYN_LINK' )
boost_lib('log' , 'BOOST_LOG_DYN_LINK' )
boost_lib('log_setup' , 'BOOST_LOG_DYN_LINK' )
boost_lib('math_tr1' , 'BOOST_MATH_TR1_DYN_LINK' )
boost_lib('math_tr1f' , 'BOOST_MATH_TR1_DYN_LINK' )
boost_lib('math_tr1l' , 'BOOST_MATH_TR1_DYN_LINK' )
boost_lib('math_c99' , 'BOOST_MATH_TR1_DYN_LINK' )
boost_lib('math_c99f' , 'BOOST_MATH_TR1_DYN_LINK' )
boost_lib('math_c99l' , 'BOOST_MATH_TR1_DYN_LINK' )
boost_lib('mpi' , 'BOOST_MPI_DYN_LINK' )
boost_lib('program_options' , 'BOOST_PROGRAM_OPTIONS_DYN_LINK')
boost_lib('python' , 'BOOST_PYTHON_DYN_LINK' )
boost_lib('python3' , 'BOOST_PYTHON_DYN_LINK' )
boost_lib('random' , 'BOOST_RANDOM_DYN_LINK' )
boost_lib('regex' , 'BOOST_REGEX_DYN_LINK' )
boost_lib('serialization' , 'BOOST_SERIALIZATION_DYN_LINK' )
boost_lib('wserialization' , 'BOOST_SERIALIZATION_DYN_LINK' )
boost_lib('signals' , 'BOOST_SIGNALS_DYN_LINK' )
boost_lib('system' , 'BOOST_SYSTEM_DYN_LINK' )
boost_lib('unit_test_framework' , 'BOOST_TEST_DYN_LINK' )
boost_lib('prg_exec_monitor' , 'BOOST_TEST_DYN_LINK' )
boost_lib('test_exec_monitor' , 'BOOST_TEST_DYN_LINK' )
boost_lib('thread' , 'BOOST_THREAD_DYN_DLL' )
boost_lib('wave' , 'BOOST_WAVE_DYN_LINK' )
def boost_0_0_1( inc, lib ):
print "You are trying to use an example placeholder for boost libs." ;
# Copy this template to another place (in the file boost.jam)
# and define a project and libraries modelled after the
# boost_std rule. Please note that it is also possible to have
# a per version taging rule in case they are different between
# versions.
def tag_std(name, type, prop_set):
name = 'boost_' + name
if 'static' in prop_set.get('<link>') and 'windows' in prop_set.get('<target-os>'):
name = 'lib' + name
result = None
if __layout == 'system':
versionRe = re.search('^([0-9]+)_([0-9]+)', __version_tag)
if versionRe and versionRe.group(1) == '1' and int(versionRe.group(2)) < 39:
result = tag_tagged(name, type, prop_set)
else:
result = tag_system(name, type, prop_set)
elif __layout == 'tagged':
result = tag_tagged(name, type, prop_set)
elif __layout == 'versioned':
result = tag_versioned(name, type, prop_set)
else:
get_manager().errors()("Missing layout")
return result
def tag_maybe(param):
return ['-{}'.format(param)] if param else []
def tag_system(name, type, prop_set):
return common.format_name(['<base>'] + tag_maybe(__build_id), name, type, prop_set)
def tag_tagged(name, type, prop_set):
return common.format_name(['<base>', '<threading>', '<runtime>'] + tag_maybe(__build_id), name, type, prop_set)
def tag_versioned(name, type, prop_set):
return common.format_name(['<base>', '<toolset>', '<threading>', '<runtime>'] + tag_maybe(__version_tag) + tag_maybe(__build_id),
name, type, prop_set)

View File

@@ -0,0 +1,288 @@
# Copyright Rene Rivera 2015
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE.txt or copy at
# https://www.bfgroup.xyz/b2/LICENSE.txt)
import path ;
import project ;
import modules ;
import regex ;
import type ;
# Add a location, i.e. directory, where to search for libraries.
# The optional 'prefix' indicates which rooted-prefixes the new
# search dir applies to. The prefix defaults to '/'.
rule add-location ( dir prefix ? : base-dir ? )
{
process-args ;
prefix ?= "/" ;
# Dir path of caller to base paths from.
caller-module ?= [ CALLER_MODULE ] ;
local caller-dir = [ modules.peek $(caller-module) : __file__ ] ;
caller-dir = $(caller-dir:D) ;
base-dir ?= $(caller-dir) ;
.search-path-prefix += $(prefix) ;
.search-path.$(prefix) += [ path.root [ path.root $(dir) $(base-dir) ] [ path.pwd ] ] ;
}
# Declares additional definitions of a modular library target external
# to the modular library build itself. This makes it possible to externally
# define modular libraries without modifying the library. The passed in
# values are added on demand when the named library is first declared.
rule external (
name : sources * : requirements * : default-build * :
usage-requirements * )
{
.external.($(name)).sources = $(sources) ;
.external.($(name)).requirements = $(requirements) ;
.external.($(name)).default-build = $(default-build) ;
.external.($(name)).usage-requirements = $(usage-requirements) ;
}
# Find, and declare, any modular libraries referenced in the target-refs.
# This will both load the modular libraries, and declare/manufacture
# the modular libraries as needed.
rule find ( target-refs + )
{
process-args ;
local caller-mod = [ CALLER_MODULE ] ;
local caller-dir = [ modules.peek $(caller-mod) : __file__ ] ;
caller-dir = $(caller-dir:D) ;
caller-dir = [ path.root $(caller-dir) [ path.pwd ] ] ;
local result-refs ;
for local target-ref in $(target-refs)
{
result-refs += [ resolve-reference $(target-ref)
: $(caller-mod) $(caller-dir) ] ;
}
return $(result-refs) ;
}
##############################################################################
local rule resolve-reference ( target-ref : caller-mod caller-dir ? )
{
# ECHO %%% modular.resolve-target-ref $(target-ref) :: $(caller-mod) $(caller-dir) ;
if ! $(caller-dir)
{
caller-dir = [ modules.peek $(caller-mod) : __file__ ] ;
caller-dir = $(caller-dir:D) ;
caller-dir = [ path.root $(caller-dir) [ path.pwd ] ] ;
}
local result-ref = $(target-ref) ;
local ref = [ MATCH ^(.*)//.* : $(target-ref:G=) ] ;
# if ! ( $(ref) in $(.target-refs) )
{
# .target-refs += $(ref) ;
local search-prefix ;
local search-sub ;
for local prefix in $(.search-path-prefix)
{
if ! $(search-prefix)
{
local search-match = [ MATCH ^($(prefix))/(.*)$ : $(ref) ] ;
search-prefix = $(search-match[1]) ;
search-sub = $(search-match[2]) ;
}
}
if $(search-prefix)
{
local found = [ path.glob $(.search-path.$(search-prefix)) : $(search-sub) ] ;
found = $(found[1]) ;
if $(found)
{
local lib-ref = [ regex.split $(search-sub) / ] ;
lib-ref = $(search-prefix)/$(lib-ref[1]) ;
local lib-path = [ path.relative-to $(caller-dir) $(found) ] ;
define-library $(lib-ref) $(caller-mod) : $(lib-path) ;
}
}
}
return $(result-ref) ;
}
local rule define-library ( name caller-module ? : root )
{
# ECHO ~~~ modular.library $(name) $(caller-module) :: $(root) :: $(depends) ;
process-args ;
# Dir path of caller to base paths from.
caller-module ?= [ CALLER_MODULE ] ;
local caller-dir = [ modules.peek $(caller-module) : __file__ ] ;
caller-dir = $(caller-dir:D) ;
# Find the various parts of the library.
local lib-dir = [ path.root [ path.root $(root) $(caller-dir) ] [ path.pwd ] ] ;
local lib-contents = [ path.glob $(lib-dir) : "include" "build" ] ;
lib-contents = $(lib-contents:D=) ;
# "include" dir for library..
local include-dir ;
if "include" in $(lib-contents)
{
include-dir = $(root)/include ;
}
# If it has a build dir, i.e. it has targets to build,
# we root the project at the build dir to make it easy
# to refer to the build targets. This mirrors the regular
# Boost organization of the project aliases.
if "build" in $(lib-contents)
{
root = $(root)/build ;
build-dir = "." ;
}
# Shadow target declarations so that we can alter build targets
# to work in the standalone modular structure.
local lib-location = [ path.root [ path.make $(root) ] $(caller-dir) ] ;
local lib-module-name = [ project.module-name $(lib-location) ] ;
local modular-rules = [ RULENAMES modular-rules ] ;
IMPORT modular-rules : $(modular-rules) : $(lib-module-name) : $(modular-rules) ;
# Load/create/declare library project.
local lib-module = [ project.find $(root) : $(caller-dir) ] ;
if ! $(lib-module)
{
# If the find was unable to load the project we synthesize it.
lib-module = [ project.load $(lib-location) : synthesize ] ;
}
local lib-target = [ project.target $(lib-module) ] ;
if ! [ modules.peek $(lib-module) : __library__ ]
{
modules.poke $(lib-module) : __library__ : $(name) ;
for local type in [ modules.peek type : .types ]
{
main-rule-name = [ type.type-to-rule-name $(type) ] ;
IMPORT modular-rules : main-target-rule : $(lib-module-name) : $(main-rule-name) ;
}
}
# Declare project alternate ID.
modules.call-in $(caller-module) : use-project $(name) : $(root) ;
# Create a "library" target that has basic usage info if needed.
if ! [ $(lib-target).has-alternative-for-target library ]
{
include-dir = [ path.relative-to $(root) $(include-dir) ] ;
project.push-current $(lib-target) ;
# Declare the library alias.
modules.call-in $(lib-module) : library
: # Sources
: # Requirements
: # Default Build
: # Usage Requirements
<include>$(include-dir)
;
project.pop-current ;
}
}
local rule process-args ( )
{
if ! $(.did-process-args)
{
.did-process-args = yes ;
local argv = [ modules.peek : ARGV ] ;
local dirs = [ MATCH ^--modular-search-dir=(.*)$ : $(argv) ] ;
for local dir in $(dirs)
{
add-location $(dir) : [ path.pwd ] ;
}
}
}
rule apply-external (
mod : field : values * )
{
local result ;
local name = [ modules.peek $(mod) : __library__ ] ;
values += $(.external.($(name)).$(field)) ;
for local value in $(values)
{
result += [ resolve-reference $(value) : $(mod) ] ;
}
return $(result) ;
}
module modular-rules
{
import type ;
import targets ;
import builtin ;
import alias ;
# Avoids any form of installation for Boost modules.
rule boost-install ( libraries * ) { }
# Generic typed target rule to pre-process main target
# declarations to make them work within the standalone
# modular structure.
rule main-target-rule (
name : sources * : requirements * : default-build * :
usage-requirements * )
{
local mod = [ CALLER_MODULE ] ;
# ECHO @@@ [[$(mod)]] modular-rules.main-target-rule $(name) :: $(sources) :: $(requirements) :: $(default-build) :: $(usage-requirements) ;
# First discover the required target type based on the exact alias used to
# invoke this rule.
local bt = [ BACKTRACE 1 ] ;
local rulename = $(bt[4]) ;
local target-type = [ type.type-from-rule-name $(rulename) ] ;
return [ targets.create-typed-target $(target-type) : [ project.current ] :
$(name) : $(sources) : $(requirements) : $(default-build) :
$(usage-requirements) ] ;
}
rule lib ( names + : sources * : requirements * : default-build * :
usage-requirements * )
{
local mod = [ CALLER_MODULE ] ;
requirements += <use>library ;
usage-requirements += <use>library ;
# ECHO @@@ [[$(mod)]] modular-rules.lib $(names) :: $(sources) :: $(requirements) :: $(default-build) :: $(usage-requirements) ;
return [ builtin.lib $(names) : $(sources) : $(requirements) : $(default-build) : $(usage-requirements) ] ;
}
rule alias ( name : sources * : requirements * : default-build * :
usage-requirements * )
{
local mod = [ CALLER_MODULE ] ;
# ECHO @@@ [[$(mod)]] modular-rules.alias $(name) :: $(sources) :: $(requirements) :: $(default-build) :: $(usage-requirements) ;
return [ alias.alias $(name) : $(sources) : $(requirements) : $(default-build) : $(usage-requirements) ] ;
}
rule library ( name ? : sources * : requirements * : default-build * :
usage-requirements * )
{
import modular ;
local mod = [ CALLER_MODULE ] ;
sources = [ modular.apply-external $(mod) : sources : $(sources) ] ;
requirements = [ modular.apply-external $(mod) : requirements : $(requirements) ] ;
default-build = [ modular.apply-external $(mod) : default-build : $(default-build) ] ;
usage-requirements = [ modular.apply-external $(mod) : usage-requirements : $(usage-requirements) ] ;
name ?= library ;
# ECHO @@@ [[$(mod)]] modular-rules.library $(name) :: $(sources) :: $(requirements) :: $(default-build) :: $(usage-requirements) ;
return [ alias.alias $(name) : $(sources) : $(requirements) : $(default-build) : $(usage-requirements) ] ;
}
}

View File

@@ -0,0 +1,208 @@
# Copyright 2008 Eduardo Gurgel
#
# Distributed under the Boost Software License, Version 1.0. (See
# accompanying file LICENSE.txt or copy at
# https://www.bfgroup.xyz/b2/LICENSE.txt)
#
# Support for creating components for the Tntnet web application
# server (http://tntnet.org)
#
# Example:
#
# using tntnet : /usr ;
# lib index : index.png index.js index.css index.ecpp otherclass.cpp
# /tntnnet//tntnet /tntnet//cxxtools ;
#
#
import modules ;
import feature ;
import errors ;
import "class" : new ;
import generators ;
import project ;
import toolset : flags ;
import os ;
import virtual-target ;
import scanner ;
import type ;
type.register ECPP : ecpp ;
type.register JPEG : jpeg ;
type.register JPG : jpg ;
type.register PNG : png ;
type.register JS : js ;
type.register CSS : css ;
type.register GIF : gif ;
project.initialize $(__name__) ;
project tntnet ;
# Save the project so that we tolerate 'import + using' combo.
.project = [ project.current ] ;
# Initialized the Tntnet support module. The 'prefix' parameter
# tells where Tntnet is installed.
rule init ( prefix : full_bin ? : full_inc ? : full_lib ? )
{
project.push-current $(.project) ;
# pre-build paths to detect reinitializations changes
local inc_prefix lib_prefix bin_prefix ;
if $(full_inc)
{
inc_prefix = $(full_inc) ;
}
else
{
inc_prefix = $(prefix)/include ;
}
if $(full_lib)
{
lib_prefix = $(full_lib) ;
}
else
{
lib_prefix = $(prefix)/lib ;
}
if $(full_bin)
{
bin_prefix = $(full_bin) ;
}
else
{
bin_prefix = $(prefix)/bin ;
}
if $(.initialized)
{
if $(prefix) != $(.prefix)
{
errors.error
"Attempt the reinitialize Tntnet with different installation prefix" ;
}
if $(inc_prefix) != $(.incprefix)
{
errors.error
"Attempt the reinitialize Tntnet with different include path" ;
}
if $(lib_prefix) != $(.libprefix)
{
errors.error
"Attempt the reinitialize Tntnet with different library path" ;
}
if $(bin_prefix) != $(.binprefix)
{
errors.error
"Attempt the reinitialize Tntnet with different bin path" ;
}
}
else
{
.initialized = true ;
.prefix = $(prefix) ;
# Setup prefixes for include, binaries and libs.
.incprefix = $(.prefix)/include ;
.libprefix = $(.prefix)/lib ;
.binprefix = $(.prefix)/bin ;
# Generates cpp files from ecpp files using "ecppc" tool
generators.register-standard tntnet.ecpp : ECPP : CPP ;
# Generates cpp files from jpeg files using "ecppc" tool
generators.register-standard tntnet.jpeg : JPEG : CPP ;
# Generates cpp files from jpg files using "ecppc" tool
generators.register-standard tntnet.jpg : JPG : CPP ;
# Generates cpp files from png files using "ecppc" tool
generators.register-standard tntnet.png : PNG : CPP ;
# Generates cpp files from js files using "ecppc" tool
generators.register-standard tntnet.js : JS : CPP ;
# Generates cpp files from gif files using "ecppc" tool
generators.register-standard tntnet.gif : GIF : CPP ;
# Generates cpp files from css files using "ecppc" tool
generators.register-standard tntnet.css : CSS : CPP ;
# Scanner for ecpp includes
type.set-scanner ECPP : ecpp-scanner ;
local usage-requirements =
<include>$(.incprefix)
<library-path>$(.libprefix)
<dll-path>$(.libprefix)
<threading>multi
<allow>tntnet ;
lib cxxtools : $(main)
:
:
:
<include>$(.incprefix)/cxxtools
$(usage-requiriments)
;
lib tntnet : $(main)
:
:
:
<include>$(.incprefix)/tntnet
$(usage-requiriments)
;
}
project.pop-current ;
}
rule directory
{
return $(.prefix) ;
}
rule initialized ( )
{
return $(.initialized) ;
}
# Get <include> from current toolset.
flags tntnet.ecpp INCLUDES <include> ;
actions ecpp
{
$(.binprefix)/ecppc -I " $(INCLUDES) " -o $(<) $(>)
}
actions jpeg
{
$(.binprefix)/ecppc -b -m image/jpeg -o $(<) $(>)
}
actions jpg
{
$(.binprefix)/ecppc -b -m image/jpeg -o $(<) $(>)
}
actions js
{
$(.binprefix)/ecppc -b -m application/x-javascript -o $(<) $(>)
}
actions png
{
$(.binprefix)/ecppc -b -m image/png -o $(<) $(>)
}
actions gif
{
$(.binprefix)/ecppc -b -m image/gif -o $(<) $(>)
}
actions css
{
$(.binprefix)/ecppc -b -m text/css -o $(<) $(>)
}
class ecpp-scanner : common-scanner
{
rule pattern ( )
{
return "<%include.*>(.*)</%include>" ;
}
}
scanner.register ecpp-scanner : include ;

View File

@@ -0,0 +1,195 @@
################################################################################
#
# Copyright (c) 2007-2008 Dario Senic, Jurko Gospodnetic.
#
# Use, modification and distribution is subject to the Boost Software
# License Version 1.0. (See accompanying file LICENSE.txt or
# https://www.bfgroup.xyz/b2/LICENSE.txt)
#
################################################################################
################################################################################
#
# Boost Build wxFormBuilder generator tool module.
#
# wxFormBuilder is a GUI designer tool for the wxWidgets library. It can then
# generate C++ sources modeling the designed GUI using the wxWidgets library
# APIs.
#
# This module defines a wxFormBuilder project file type and rules needed to
# generate C++ source files from those projects. With it you can simply list
# wxFormBuilder projects as sources for some target and Boost Build will
# automatically convert them to C++ sources and process from there.
#
# The wxFormBuilder executable location may be provided as a parameter when
# configuring this toolset. Otherwise the default wxFormBuilder.exe executable
# name is used located in the folder pointed to by the WXFORMBUILDER environment
# variable.
#
# Current limitations:
#
# * Works only on Windows.
# * Works only when run via Boost Jam using the native Windows cmd.exe command
# interpreter, i.e. the default native Windows Boost Jam build.
# * Used wxFormBuilder projects need to have their output file names defined
# consistently with target names assumed by this build script. This means
# that their target names must use the prefix 'wxFormBuilderGenerated_' and
# have no output folder defined where the base name is equal to the .fpb
# project file's name.
#
################################################################################
################################################################################
#
# Implementation note:
#
# Avoiding the limitation on the generated target file names can be done but
# would require depending on external tools to copy the wxFormBuilder project to
# a temp location and then modify it in-place to set its target file names. On
# the other hand wxFormBuilder is expected to add command-line options for
# choosing the target file names from the command line which will allow us to
# remove this limitation in a much cleaner way.
# (23.08.2008.) (Jurko)
#
################################################################################
import generators ;
import os ;
import path ;
import toolset ;
import type ;
################################################################################
#
# wxFormBuilder.generate()
# ------------------------
#
# Action for processing WX_FORM_BUILDER_PROJECT types.
#
################################################################################
#
# Implementation notes:
#
# wxFormBuilder generated CPP and H files need to be moved to the location
# where the Boost Build target system expects them so that the generated CPP
# file can be included into the compile process and that the clean rule
# successfully deletes both CPP and H files. We expect wxFormBuilder to generate
# files in the same location where the provided WX_FORM_BUILDER_PROJECT file is
# located.
# (15.05.2007.) (Dario)
#
################################################################################
actions generate
{
start "" /wait "$(EXECUTABLE)" /g "$(2)"
move "$(1[1]:BSR=$(2:P))" "$(1[1]:P)"
move "$(1[2]:BSR=$(2:P))" "$(1[2]:P)"
}
################################################################################
#
# wxFormBuilder.init()
# --------------------
#
# Main toolset initialization rule called via the toolset.using rule.
#
################################################################################
rule init ( executable ? )
{
if $(.initialized)
{
if $(.debug-configuration)
{
ECHO notice: [wxFormBuilder-cfg] Repeated initialization request
(executable \"$(executable:E="")\") detected and ignored. ;
}
}
else
{
local environmentVariable = WXFORMBUILDER ;
if $(.debug-configuration)
{
ECHO notice: [wxFormBuilder-cfg] Configuring wxFormBuilder... ;
}
# Deduce the path to the used wxFormBuilder executable.
if ! $(executable)
{
executable = "wxFormBuilder.exe" ;
local executable-path = [ os.environ $(environmentVariable) ] ;
if $(executable-path)-is-not-empty
{
executable = [ path.root $(executable) $(executable-path) ] ;
}
else if $(.debug-configuration)
{
ECHO notice: [wxFormBuilder-cfg] No wxFormBuilder path
configured either explicitly or using the
$(environmentVariable) environment variable. ;
ECHO notice: [wxFormBuilder-cfg] To avoid complications please
update your configuration to includes a correct path to the
wxFormBuilder executable. ;
ECHO notice: [wxFormBuilder-cfg] wxFormBuilder executable will
be searched for on the system path. ;
}
}
if $(.debug-configuration)
{
ECHO notice: [wxFormBuilder-cfg] Will use wxFormBuilder executable
\"$(executable)\". ;
}
# Now we are sure we have everything we need to initialize this toolset.
.initialized = true ;
# Store the path to the used wxFormBuilder executable.
.executable = $(executable) ;
# Type registration.
type.register WX_FORM_BUILDER_PROJECT : fbp ;
# Parameters to be forwarded to the action rule.
toolset.flags wxFormBuilder.generate EXECUTABLE : $(.executable) ;
# Generator definition and registration.
generators.register-standard wxFormBuilder.generate :
WX_FORM_BUILDER_PROJECT : CPP(wxFormBuilderGenerated_%)
H(wxFormBuilderGenerated_%) ;
}
}
################################################################################
#
# wxFormBuilder.is-initialized()
# ------------------------------
#
# Returns whether this toolset has been initialized.
#
################################################################################
rule is-initialized ( )
{
return $(.initialized) ;
}
################################################################################
#
# Startup code executed when loading this module.
#
################################################################################
# Global variables for this module.
.executable = ;
.initialized = ;
if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
{
.debug-configuration = true ;
}

View File

@@ -0,0 +1,9 @@
bin.*
bootstrap
*.obj
*.ilk
*.pdb
b2
b2.*
bjam
bjam.*

View File

@@ -0,0 +1,64 @@
Name: boost-jam
Version: 3.1.19
Summary: Build tool
Release: 1
Source: %{name}-%{version}.tgz
License: Boost Software License, Version 1.0
Group: Development/Tools
URL: http://www.boost.org
Packager: Rene Rivera <grafik@redshift-software.com>
BuildRoot: /var/tmp/%{name}-%{version}.root
%description
Boost Jam is a build tool based on FTJam, which in turn is based on
Perforce Jam. It contains significant improvements made to facilitate
its use in the Boost Build System, but should be backward compatible
with Perforce Jam.
Authors:
Perforce Jam : Cristopher Seiwald
FT Jam : David Turner
Boost Jam : David Abrahams
Copyright:
/+\
+\ Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
\+/
License is hereby granted to use this software and distribute it
freely, as long as this copyright notice is retained and modifications
are clearly marked.
ALL WARRANTIES ARE HEREBY DISCLAIMED.
Also:
Copyright 2001-2006 David Abrahams.
Copyright 2002-2006 Rene Rivera.
Copyright 2003-2006 Vladimir Prus.
Distributed under the Boost Software License, Version 1.0.
(See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
%prep
%setup -n %{name}-%{version}
%build
LOCATE_TARGET=bin ./build.sh $BOOST_JAM_TOOLSET
%install
rm -rf $RPM_BUILD_ROOT
mkdir -p $RPM_BUILD_ROOT%{_bindir}
mkdir -p $RPM_BUILD_ROOT%{_docdir}/%{name}-%{version}
install -m 755 bin/bjam $RPM_BUILD_ROOT%{_bindir}/bjam-%{version}
ln -sf bjam-%{version} $RPM_BUILD_ROOT%{_bindir}/bjam
cp -R *.html *.png *.css LICENSE*.txt images jam $RPM_BUILD_ROOT%{_docdir}/%{name}-%{version}
find $RPM_BUILD_ROOT -name CVS -type d -exec rm -r {} \;
%files
%defattr(-,root,root)
%attr(755,root,root) /usr/bin/*
%doc %{_docdir}/%{name}-%{version}
%clean
rm -rf $RPM_BUILD_ROOT

View File

@@ -0,0 +1 @@
this really out of our hands, so tell inspect to ignore directory

View File

@@ -0,0 +1,195 @@
@ECHO OFF
REM ~ Copyright 2002-2007 Rene Rivera.
REM ~ Distributed under the Boost Software License, Version 1.0.
REM ~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
setlocal
goto Start
:Set_Error
color 00
goto :eof
:Clear_Error
ver >nul
goto :eof
:Error_Print
REM Output an error message and set the errorlevel to indicate failure.
setlocal
ECHO ###
ECHO ### %1
ECHO ###
ECHO ### You can specify the toolset as the argument, i.e.:
ECHO ### .\build.bat msvc
ECHO ###
ECHO ### Toolsets supported by this script are: borland, como, gcc,
ECHO ### gcc-nocygwin, intel-win32, mingw,
ECHO ### vc12, vc14, vc141, vc142, vc143
ECHO ###
ECHO ### If you have Visual Studio 2017 installed you will need to either update
ECHO ### the Visual Studio 2017 installer or run from VS 2017 Command Prompt
ECHO ### as we where unable to detect your toolset installation.
ECHO ###
call :Set_Error
endlocal
goto :eof
:Test_Option
REM Tests whether the given string is in the form of an option: "--*"
call :Clear_Error
setlocal
set test=%1
if not defined test (
call :Set_Error
goto Test_Option_End
)
set test=###%test%###
set test=%test:"###=%
set test=%test:###"=%
set test=%test:###=%
if not "-" == "%test:~1,1%" call :Set_Error
:Test_Option_End
endlocal
goto :eof
:Test_Empty
REM Tests whether the given string is not empty
call :Clear_Error
setlocal
set test=%1
if not defined test (
call :Clear_Error
goto Test_Empty_End
)
set test=###%test%###
set test=%test:"###=%
set test=%test:###"=%
set test=%test:###=%
if not "" == "%test%" call :Set_Error
:Test_Empty_End
endlocal
goto :eof
:Guess_Toolset
set local
REM Try and guess the toolset to bootstrap the build with...
REM Sets B2_TOOLSET to the first found toolset.
REM May also set B2_TOOLSET_ROOT to the
REM location of the found toolset.
call :Clear_Error
call :Test_Empty "%ProgramFiles%"
if not errorlevel 1 set "ProgramFiles=C:\Program Files"
REM Visual Studio is by default installed to %ProgramFiles% on 32-bit machines and
REM %ProgramFiles(x86)% on 64-bit machines. Making a common variable for both.
call :Clear_Error
call :Test_Empty "%ProgramFiles(x86)%"
if errorlevel 1 (
set "VS_ProgramFiles=%ProgramFiles(x86)%"
) else (
set "VS_ProgramFiles=%ProgramFiles%"
)
call guess_toolset.bat
if errorlevel 1 (
call :Error_Print "Could not find a suitable toolset.")
goto :eof
endlocal
goto :eof
:Start
set B2_TOOLSET=
set B2_BUILD_ARGS=
REM If no arguments guess the toolset;
REM or if first argument is an option guess the toolset;
REM otherwise the argument is the toolset to use.
call :Clear_Error
call :Test_Empty %1
if not errorlevel 1 (
call :Guess_Toolset
if not errorlevel 1 ( goto Setup_Toolset ) else ( goto Finish )
)
call :Clear_Error
call :Test_Option %1
if not errorlevel 1 (
call :Guess_Toolset
if not errorlevel 1 ( goto Setup_Toolset ) else ( goto Finish )
)
call :Clear_Error
set B2_TOOLSET=%1
shift
goto Setup_Toolset
:Setup_Toolset
REM Setup the toolset command and options. This bit of code
REM needs to be flexible enough to handle both when
REM the toolset was guessed at and found, or when the toolset
REM was indicated in the command arguments.
REM NOTE: The strange multiple "if ?? == _toolset_" tests are that way
REM because in BAT variables are subsituted only once during a single
REM command. A complete "if ... else ..."
REM is a single command, even though it's in multiple lines here.
:Setup_Args
call :Clear_Error
call :Test_Empty %1
if not errorlevel 1 goto Config_Toolset
call :Clear_Error
call :Test_Option %1
if errorlevel 1 (
set B2_BUILD_ARGS=%B2_BUILD_ARGS% %1
shift
goto Setup_Args
)
:Config_Toolset
call config_toolset.bat
if "_%_known_%_" == "__" (
call :Error_Print "Unknown toolset: %B2_TOOLSET%"
)
if errorlevel 1 goto Finish
echo ###
echo ### Using '%B2_TOOLSET%' toolset.
echo ###
set B2_SOURCES=
set B2_SOURCES=%B2_SOURCES% builtins.cpp class.cpp
set B2_SOURCES=%B2_SOURCES% command.cpp compile.cpp constants.cpp cwd.cpp
set B2_SOURCES=%B2_SOURCES% debug.cpp debugger.cpp
set B2_SOURCES=%B2_SOURCES% execcmd.cpp execnt.cpp execunix.cpp filent.cpp filesys.cpp fileunix.cpp frames.cpp function.cpp
set B2_SOURCES=%B2_SOURCES% glob.cpp hash.cpp hcache.cpp hdrmacro.cpp headers.cpp jam.cpp
set B2_SOURCES=%B2_SOURCES% jamgram.cpp lists.cpp make.cpp make1.cpp md5.cpp mem.cpp modules.cpp
set B2_SOURCES=%B2_SOURCES% native.cpp object.cpp option.cpp output.cpp parse.cpp pathnt.cpp
set B2_SOURCES=%B2_SOURCES% pathsys.cpp pathunix.cpp regexp.cpp rules.cpp scan.cpp search.cpp jam_strings.cpp
set B2_SOURCES=%B2_SOURCES% startup.cpp subst.cpp sysinfo.cpp
set B2_SOURCES=%B2_SOURCES% timestamp.cpp variable.cpp w32_getreg.cpp
set B2_SOURCES=%B2_SOURCES% modules/order.cpp
set B2_SOURCES=%B2_SOURCES% modules/path.cpp
set B2_SOURCES=%B2_SOURCES% modules/property-set.cpp
set B2_SOURCES=%B2_SOURCES% modules/regex.cpp
set B2_SOURCES=%B2_SOURCES% modules/sequence.cpp
set B2_SOURCES=%B2_SOURCES% modules/set.cpp
set B2_CXXFLAGS=%B2_CXXFLAGS% -DNDEBUG
@echo ON
%B2_CXX% %CXXFLAGS% %B2_CXXFLAGS% %B2_SOURCES% %B2_CXX_LINK%
dir *.exe
copy /b .\b2.exe .\bjam.exe
:Finish
@exit /b %ERRORLEVEL%

View File

@@ -0,0 +1,511 @@
#!/bin/sh
#~ Copyright 2002-2020 Rene Rivera.
#~ Distributed under the Boost Software License, Version 1.0.
#~ (See accompanying file LICENSE_1_0.txt or copy at
#~ http://www.boost.org/LICENSE_1_0.txt)
FALSE=1
TRUE=0
# Reset the toolset.
B2_TOOLSET=
B2_SETUP=
# Internal options.
B2_VERBOSE_OPT=${B2_VERBOSE_OPT:=${FALSE}}
B2_DEBUG_OPT=${B2_DEBUG_OPT:=${FALSE}}
B2_GUESS_TOOLSET_OPT=${FALSE}
B2_HELP_OPT=${FALSE}
B2_CXX_OPT=
B2_CXXFLAGS_OPT=
# We need to calculate and set SCRIPT_PATH and SCRIPT_DIR to reference this
# script so that we can refer to file relative to it.
SCRIPT_PATH=""
if test "${BASH_SOURCE}" ; then
SCRIPT_PATH=${BASH_SOURCE}
fi
if test "${SCRIPT_PATH}" = "" ; then
SCRIPT_PATH=$0
fi
SCRIPT_DIR="$( cd "$( dirname "${SCRIPT_PATH}" )" && pwd )"
# This script needs to operate at engine source directory.
SAVED_PWD="${PWD}"
cd "${SCRIPT_DIR}"
test_true ()
{
if test $1 -eq ${TRUE} ; then
return ${TRUE}
fi
return ${FALSE}
}
# Run a command, and echo before doing so. Also checks the exit status and quits
# if there was an error.
echo_run ()
{
if test_true ${B2_VERBOSE_OPT} ; then echo "> $@" ; fi
$@
r=$?
if test $r -ne ${TRUE} ; then
exit $r
fi
}
# Print an error message, and exit with a status of 1.
error_exit ()
{
echo "
${@}
You can specify the toolset as the argument, i.e.:
./build.sh [options] gcc
Toolsets supported by this script are:
acc, clang, como, gcc, intel-darwin, intel-linux, kcc, kylix, mipspro,
pathscale, pgi, qcc, sun, sunpro, tru64cxx, vacpp
For any toolset you can override the path to the compiler with the '--cxx'
option. You can also use additional flags for the compiler with the
'--cxxflags' option.
A special toolset; cxx, is available which is used as a fallback when a more
specific toolset is not found and the cxx command is detected. The 'cxx'
toolset will use the '--cxx' and '--cxxflags' options, if present.
Options:
--help Show this help message.
--verbose Show messages about what this script is doing.
--debug Build b2 with debug information, and no
optimizations.
--guess-toolset Print the toolset we can detect for building.
--cxx=CXX The compiler exec to use instead of the detected
compiler exec.
--cxxflags=CXXFLAGS The compiler flags to use in addition to the
flags for the detected compiler.
" 1>&2
exit 1
}
# Check that a command is in the PATH.
test_path ()
{
if `command -v command 1>/dev/null 2>/dev/null`; then
command -v $1 1>/dev/null 2>/dev/null
else
hash $1 1>/dev/null 2>/dev/null
fi
}
# Check that the OS name, as returned by "uname", is as given.
test_uname ()
{
if test_path uname; then
test `uname` = $*
fi
}
test_compiler ()
{
EXE="${B2_CXX_OPT:-$1}"
shift
CMD="${EXE} $@ ${B2_CXXFLAGS_OPT:-}"
SETUP=${B2_SETUP:-true}
if test_true ${B2_VERBOSE_OPT} ; then
echo "> ${CMD} check_cxx11.cpp"
( ${SETUP} ; ${CMD} check_clib.cpp check_cxx11.cpp )
else
( ${SETUP} ; ${CMD} check_clib.cpp check_cxx11.cpp ) 1>/dev/null 2>/dev/null
fi
CHECK_RESULT=$?
if test_true ${CHECK_RESULT} ; then
B2_CXX=${CMD}
fi
rm -rf check_cxx11.o* a.out a.exe 1>/dev/null 2>/dev/null
return ${CHECK_RESULT}
}
test_toolset ()
{
if test "${TOOLSET}" = "" ; then return ${TRUE} ; fi
if test "${TOOLSET}" = "$1" -o "${TOOLSET}" = "$2" -o "${TOOLSET}" = "$3" ; then return ${TRUE} ; fi
return 1
}
# Check the toolset to bootstrap the build with. The one optional argument to
# the function is a toolset name. This operates as follows based on these
# contextual vars, if set, and if an arg is given:
#
# No vars set:
# Checks, in some priority order, possible toolset commands. Upon finding the
# first working command sets B2_TOOLSET to the toolset and B2_CXX to the
# compile command with any base options.
#
# B2_TOOLSET set:
# Checks that toolset for possible compile commands and sets B2_CXX to the
# command that works for the toolset.
#
check_toolset ()
{
TOOLSET=${B2_TOOLSET%%-[0-9]*}
TOOLSET_SUFFIX=${B2_TOOLSET##$TOOLSET}
# Prefer Clang (clang) on macOS..
if test_toolset clang && test_uname Darwin && test_compiler clang++$TOOLSET_SUFFIX -x c++ -std=c++11 ; then B2_TOOLSET=clang$TOOLSET_SUFFIX ; return ${TRUE} ; fi
# GCC (gcc)..
if test_toolset gcc && test_compiler g++$TOOLSET_SUFFIX -x c++ -std=c++11 ; then B2_TOOLSET=gcc$TOOLSET_SUFFIX ; return ${TRUE} ; fi
if test_toolset gcc && test_compiler g++$TOOLSET_SUFFIX -x c++ -std=c++11 -D_GNU_SOURCE ; then B2_TOOLSET=gcc$TOOLSET_SUFFIX ; return ${TRUE} ; fi
# GCC (gcc) with -pthread arg (for AIX)..
if test_toolset gcc && test_compiler g++$TOOLSET_SUFFIX -x c++ -std=c++11 -pthread ; then B2_TOOLSET=gcc$TOOLSET_SUFFIX ; return ${TRUE} ; fi
# Clang (clang)..
if test_toolset clang && test_compiler clang++$TOOLSET_SUFFIX -x c++ -std=c++11 ; then B2_TOOLSET=clang$TOOLSET_SUFFIX ; return ${TRUE} ; fi
# Intel macOS (intel-darwin)
if test_toolset intel-darwin && test -r "${HOME}/intel/oneapi/setvars.sh" && test_uname Darwin ; then
B2_SETUP="source ${HOME}/intel/oneapi/setvars.sh"
if test_toolset intel-darwin && test_compiler icpx -x c++ -std=c++11 ; then B2_TOOLSET=intel-darwin ; return ${TRUE} ; fi
if test_toolset intel-darwin && test_compiler icc -x c++ -std=c++11 ; then B2_TOOLSET=intel-darwin ; return ${TRUE} ; fi
B2_SETUP=
fi
if test_toolset intel-darwin && test -r "/opt/intel/oneapi/setvars.sh" && test_uname Darwin ; then
B2_SETUP="source /opt/intel/oneapi/setvars.sh"
if test_toolset intel-darwin && test_compiler icpx -x c++ -std=c++11 ; then B2_TOOLSET=intel-darwin ; return ${TRUE} ; fi
if test_toolset intel-darwin && test_compiler icc -x c++ -std=c++11 ; then B2_TOOLSET=intel-darwin ; return ${TRUE} ; fi
B2_SETUP=
fi
# Intel oneAPI (intel-linux)
if test_toolset intel-linux && test_path icpx ; then
if test_compiler icpx -x c++ -std=c++11 ; then B2_TOOLSET=intel-linux ; return ${TRUE} ; fi
fi
if test_toolset xyz && test_path icc ; then
if test_compiler icc -x c++ -std=c++11 ; then B2_TOOLSET=intel-linux ; return ${TRUE} ; fi
fi
if test_toolset intel-linux && test -r "${HOME}/intel/oneapi/setvars.sh" ; then
B2_SETUP="source ${HOME}/intel/oneapi/setvars.sh"
if test_toolset intel-linux && test_compiler icpx -x c++ -std=c++11 ; then B2_TOOLSET=intel-linux ; return ${TRUE} ; fi
if test_toolset intel-linux && test_compiler icc -x c++ -std=c++11 ; then B2_TOOLSET=intel-linux ; return ${TRUE} ; fi
B2_SETUP=
fi
if test_toolset intel-linux && test -r "/opt/intel/oneapi/setvars.sh" ; then
B2_SETUP="source /opt/intel/oneapi/setvars.sh"
if test_toolset intel-linux && test_compiler icpx -x c++ -std=c++11 ; then B2_TOOLSET=intel-linux ; return ${TRUE} ; fi
if test_toolset intel-linux && test_compiler icc -x c++ -std=c++11 ; then B2_TOOLSET=intel-linux ; return ${TRUE} ; fi
B2_SETUP=
fi
# Intel Pro (intel-linux)
if test_toolset intel-linux && test_path icpc ; then
if test_compiler icpc -x c++ -std=c++11 ; then B2_TOOLSET=intel-linux ; return ${TRUE} ; fi
fi
if test_toolset intel-linux && test -r "/opt/intel/inteloneapi/setvars.sh" ; then
B2_SETUP="source /opt/intel/inteloneapi/setvars.sh"
if test_compiler icpc -x c++ -std=c++11 ; then B2_TOOLSET=intel-linux ; return ${TRUE} ; fi
B2_SETUP=
fi
if test_toolset intel-linux && test -r "/opt/intel/cc/9.0/bin/iccvars.sh" ; then
B2_SETUP="source /opt/intel/cc/9.0/bin/iccvars.sh"
if test_compiler icpc -x c++ -std=c++11 ; then B2_TOOLSET=intel-linux ; return ${TRUE} ; fi
B2_SETUP=
fi
if test_toolset intel-linux && test -r "/opt/intel_cc_80/bin/iccvars.sh" ; then
B2_SETUP="source /opt/intel_cc_80/bin/iccvars.sh"
if test_compiler icpc -x c++ -std=c++11 ; then B2_TOOLSET=intel-linux ; return ${TRUE} ; fi
B2_SETUP=
fi
# Mips Pro (mipspro)
if test_toolset mipspro && test_uname IRIX && test_compiler CC -FE:template_in_elf_section -ptused ; then B2_TOOLSET=mipspro ; return ${TRUE} ; fi
if test_toolset mipspro && test_uname IRIX64 && test_compiler CC -FE:template_in_elf_section -ptused ; then B2_TOOLSET=mipspro ; return ${TRUE} ; fi
# OSF Tru64 C++ (tru64cxx)
if test_toolset tru64cxx && test_uname OSF1 && test_compiler cc ; then B2_TOOLSET=mipspro ; return ${TRUE} ; fi
# QNX (qcc)
if test_toolset qcc && test_uname QNX && test_compiler QCC ; then B2_TOOLSET=mipspro ; return ${TRUE} ; fi
# Linux XL/VA C++ (xlcpp, vacpp)
if test_toolset xlcpp vacpp && test_uname Linux && test_compiler xlC_r ; then
if /usr/bin/lscpu | grep Byte | grep Little > /dev/null 2>&1 ; then
# Little endian linux
B2_TOOLSET=xlcpp
return ${TRUE}
else
# Big endian linux
B2_TOOLSET=vacpp
return ${TRUE}
fi
fi
# AIX VA C++ (vacpp)
if test_toolset vacpp && test_uname AIX && test_compiler xlC_r ; then B2_TOOLSET=vacpp ; return ${TRUE} ; fi
# PGI (pgi)
if test_toolset pgi && test_compiler pgc++ -std=c++11 ; then B2_TOOLSET=pgi ; return ${TRUE} ; fi
# Pathscale C++ (pathscale)
if test_toolset pathscale && test_compiler pathCC ; then B2_TOOLSET=pathscale ; return ${TRUE} ; fi
# Como (como)
if test_toolset como && test_compiler como ; then B2_TOOLSET=como ; return ${TRUE} ; fi
# Borland C++ (kylix)
if test_toolset kylix && test_compiler bc++ -tC -q ; then B2_TOOLSET=kylix ; return ${TRUE} ; fi
# aCC (acc)
if test_toolset acc && test_compiler aCC -AA ; then B2_TOOLSET=acc ; return ${TRUE} ; fi
# Sun Pro C++ (sunpro)
if test_toolset sunpro && test_compiler /opt/SUNWspro/bin/CC -std=c++11 ; then B2_TOOLSET=sunpro ; return ${TRUE} ; fi
# Generic (cxx)
if test_toolset cxx && test_compiler cxx ; then B2_TOOLSET=cxx ; return ${TRUE} ; fi
if test_toolset cxx && test_compiler cpp ; then B2_TOOLSET=cxx ; return ${TRUE} ; fi
if test_toolset cxx && test_compiler CC ; then B2_TOOLSET=cxx ; return ${TRUE} ; fi
# Nothing found.
if test "${B2_TOOLSET}" = "" ; then
error_exit "Could not find a suitable toolset."
fi
return ${FALSE}
}
# Handle command options and args.
while test $# -gt 0
do
case "$1" in
--verbose) B2_VERBOSE_OPT=${TRUE} ;;
--debug) B2_DEBUG_OPT=${TRUE} ;;
--guess-toolset) B2_GUESS_TOOLSET_OPT=${TRUE} ;;
--help) B2_HELP_OPT=${TRUE} ;;
--cxx=*) B2_CXX_OPT=`expr "x$1" : "x--cxx=\(.*\)"` ;;
--cxxflags=*) B2_CXXFLAGS_OPT=`expr "x$1" : "x--cxxflags=\(.*\)"` ;;
-*) ;;
?*) B2_TOOLSET=$1 ;;
esac
shift
done
# Show some help, if requested.
if test_true ${B2_HELP_OPT} ; then
error_exit
fi
# If we have a CXX but no B2_TOOLSET specified by the user we assume they meant
# "cxx" as the toolset.
if test "${B2_CXX_OPT}" != "" -a "${B2_TOOLSET}" = "" ; then
B2_TOOLSET=cxx
fi
# If we have B2_TOOLSET=cxx but no B2_CXX_OPT nor B2_CXXFLAGS_OPT specified by the user
# we assume they meant $CXX and $CXXFLAGS.
if test "${B2_TOOLSET}" = "cxx" -a "${B2_CXX_OPT}" = "" -a "${B2_CXXFLAGS_OPT}" = "" ; then
B2_CXX_OPT="${CXX}"
B2_CXXFLAGS_OPT="${CXXFLAGS}"
fi
# Guess toolset, or toolset commands.
check_toolset
TOOLSET_CHECK=$?
# We can bail from the rest of the checks and build if we are just guessing
# the toolset.
if test_true ${B2_GUESS_TOOLSET_OPT} ; then
echo "${B2_TOOLSET}"
exit 0
fi
# We need a viable compiler. Check here and give some feedback about it.
if ! test_true ${TOOLSET_CHECK} ; then
echo "
A C++11 capable compiler is required for building the B2 engine.
Toolset '${B2_TOOLSET}' does not appear to support C++11.
"
(B2_VERBOSE_OPT=${TRUE} check_toolset)
error_exit "
** Note, the C++11 capable compiler is _only_ required for building the B2
** engine. The B2 build system allows for using any C++ level and any other
** supported language and resource in your projects.
"
fi
# Set the additional options needed to build the engine based on the toolset.
case "${B2_TOOLSET}" in
gcc|gcc-*)
CXX_VERSION_OPT=${CXX_VERSION_OPT:---version}
B2_CXXFLAGS_RELEASE="-O2 -s"
B2_CXXFLAGS_DEBUG="-O0 -g"
;;
intel-*)
CXX_VERSION_OPT=${CXX_VERSION_OPT:---version}
B2_CXXFLAGS_RELEASE="-O3 -s -static"
B2_CXXFLAGS_DEBUG="-O0 -g -p -static"
;;
vacpp)
CXX_VERSION_OPT=${CXX_VERSION_OPT:--qversion}
B2_CXXFLAGS_RELEASE="-O3 -s -qstrict -qinline"
B2_CXXFLAGS_DEBUG="-g -qNOOPTimize -qnoinline -pg"
;;
xlcpp)
CXX_VERSION_OPT=${CXX_VERSION_OPT:--qversion}
B2_CXXFLAGS_RELEASE="-s -O3 -qstrict -qinline"
B2_CXXFLAGS_DEBUG="-g -qNOOPTimize -qnoinline -pg"
;;
como)
CXX_VERSION_OPT=${CXX_VERSION_OPT:---version}
B2_CXXFLAGS_RELEASE="-O3 --inlining"
B2_CXXFLAGS_DEBUG="-O0 -g --no_inlining --long_long"
;;
kcc)
CXX_VERSION_OPT=${CXX_VERSION_OPT:---version}
B2_CXXFLAGS_RELEASE="+K2 -s"
B2_CXXFLAGS_DEBUG="+K0 -g"
;;
kylix)
CXX_VERSION_OPT=${CXX_VERSION_OPT:---version}
B2_CXXFLAGS_RELEASE="-O2 -vi -w-inl -s"
B2_CXXFLAGS_DEBUG="-Od -v -vi-"
;;
mipspro)
CXX_VERSION_OPT=${CXX_VERSION_OPT:---version}
B2_CXXFLAGS_RELEASE="-Ofast -g0 \"-INLINE:none\" -s"
B2_CXXFLAGS_DEBUG="-O0 -INLINE -g"
;;
pathscale)
CXX_VERSION_OPT=${CXX_VERSION_OPT:---version}
B2_CXXFLAGS_RELEASE="-O3 -inline -s"
B2_CXXFLAGS_DEBUG="-O0 -noinline -ggdb"
;;
pgi)
CXX_VERSION_OPT=${CXX_VERSION_OPT:---version}
B2_CXXFLAGS_RELEASE="-fast -s"
B2_CXXFLAGS_DEBUG="-O0 -gopt"
;;
sun*)
CXX_VERSION_OPT=${CXX_VERSION_OPT:--V}
B2_CXXFLAGS_RELEASE="-xO4 -s"
B2_CXXFLAGS_DEBUG="-g"
;;
clang|clang-*)
CXX_VERSION_OPT=${CXX_VERSION_OPT:---version}
B2_CXXFLAGS_RELEASE="-O3 -s"
B2_CXXFLAGS_DEBUG="-O0 -fno-inline -g"
;;
tru64cxx)
CXX_VERSION_OPT=${CXX_VERSION_OPT:---version}
B2_CXXFLAGS_RELEASE="-O5 -inline speed -s"
B2_CXXFLAGS_DEBUG="-O0 -pg -g"
;;
acc)
CXX_VERSION_OPT=${CXX_VERSION_OPT:---version}
B2_CXXFLAGS_RELEASE="-O3 -s"
B2_CXXFLAGS_DEBUG="+d -g"
;;
qcc)
CXX_VERSION_OPT=${CXX_VERSION_OPT:---version}
B2_CXXFLAGS_RELEASE="-O3 -Wc,-finline-functions"
B2_CXXFLAGS_DEBUG="O0 -Wc,-fno-inline -gstabs+"
;;
cxx)
CXX_VERSION_OPT=${CXX_VERSION_OPT:---version}
;;
*)
error_exit "Unknown toolset: ${B2_TOOLSET}"
;;
esac
build_b2 ()
{
echo "
###
###
### Using '${B2_TOOLSET}' toolset.
###
###
"
echo_run ${B2_CXX} ${CXX_VERSION_OPT}
echo "
###
###
"
B2_SOURCES="\
builtins.cpp \
class.cpp \
command.cpp \
compile.cpp \
constants.cpp \
cwd.cpp \
debug.cpp \
debugger.cpp \
execcmd.cpp \
execnt.cpp \
execunix.cpp \
filesys.cpp \
filent.cpp \
fileunix.cpp \
frames.cpp \
function.cpp \
glob.cpp \
hash.cpp \
hcache.cpp \
hdrmacro.cpp \
headers.cpp \
jam_strings.cpp \
jam.cpp \
jamgram.cpp \
lists.cpp \
make.cpp \
make1.cpp \
md5.cpp \
mem.cpp \
modules.cpp \
native.cpp \
object.cpp \
option.cpp \
output.cpp \
parse.cpp \
pathnt.cpp \
pathsys.cpp \
pathunix.cpp \
regexp.cpp \
rules.cpp \
scan.cpp \
search.cpp \
startup.cpp \
subst.cpp \
sysinfo.cpp \
timestamp.cpp \
variable.cpp \
w32_getreg.cpp \
modules/order.cpp \
modules/path.cpp \
modules/property-set.cpp \
modules/regex.cpp \
modules/sequence.cpp \
modules/set.cpp \
"
if test_true ${B2_DEBUG_OPT} ; then B2_CXXFLAGS="${B2_CXXFLAGS_DEBUG}"
else B2_CXXFLAGS="${B2_CXXFLAGS_RELEASE} -DNDEBUG"
fi
( B2_VERBOSE_OPT=${TRUE} echo_run ${B2_CXX} ${B2_CXXFLAGS} ${B2_SOURCES} -o b2 )
( B2_VERBOSE_OPT=${TRUE} echo_run cp b2 bjam )
}
if test_true ${B2_VERBOSE_OPT} ; then
(
${B2_SETUP}
build_b2
)
else
(
${B2_SETUP} 1>/dev/null 2>/dev/null
build_b2
)
fi

View File

@@ -0,0 +1,153 @@
$ ! Copyright 2002-2003 Rene Rivera, Johan Nilsson.
$ !
$ ! 8-APR-2004 Boris Gubenko
$ ! Miscellaneous improvements.
$ !
$ ! 20-JAN-2015 Artur Shepilko
$ ! Adapt for jam 3.1.19
$ !
$ ! Distributed under the Boost Software License, Version 1.0.
$ ! (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
$ !
$ ! bootstrap build script for Jam
$ !
$ THIS_FACILITY = "BUILDJAM"
$
$ verify = f$trnlnm("VERIFY_''THIS_FACILITY'")
$ save_verify = f$verify(verify)
$
$ SAY := WRITE SYS$OUTPUT
$ !
$ ON WARNING THEN CONTINUE
$ ON ERROR THEN GOTO EXIT
$
$ BOOST_JAM_TOOLSET = "vmsdecc"
$ BOOST_JAM_CC = "CC"
$ BJAM_UPDATE = ""
$
$ ARGS = F$EDIT("''p1' ''p2' ''p3' ''p4'","TRIM,LOWERCASE")
$ ARGS_LEN = F$LENGTH(ARGS)
$
$ IF F$LOCATE("--update", ARGS) .NE. F$LENGTH(ARGS) THEN BJAM_UPDATE = "update"
$ IF BJAM_UPDATE .EQS. "update" -
.AND. F$SEARCH("[.bootstrap_vms]jam0.exe") .EQS. "" THEN BJAM_UPDATE = ""
$
$ IF BJAM_UPDATE .NES. "update"
$ THEN
$ GOSUB CLEAN
$
$ SAY "I|Creating bootstrap directory..."
$ CREATE /DIR [.bootstrap_vms]
$
$ !------------------
$ ! NOTE: Assume jamgram and jambase have been generated (true for fresh release).
$ ! Otherwise these need to be re-generated manually.
$ !------------------
$
$ SAY "I|Building bootstrap jam..."
$ !
$ CC_FLAGS = "/DEFINE=VMS /STANDARD=VAXC " + -
"/PREFIX_LIBRARY_ENTRIES=(ALL_ENTRIES) " + -
"/WARNING=DISABLE=(LONGEXTERN)" + -
"/OBJ=[.bootstrap_vms] "
$
$ CC_INCLUDE=""
$
$ SAY "I|Using compile flags: ", CC_FLAGS
$
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE command.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE compile.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE constants.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE debug.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE execcmd.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE frames.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE function.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE glob.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE hash.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE hdrmacro.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE headers.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE jam.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE jambase.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE jamgram.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE lists.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE make.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE make1.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE object.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE option.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE output.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE parse.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE pathsys.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE regexp.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE rules.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE scan.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE search.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE subst.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE timestamp.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE variable.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE modules.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE strings.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE filesys.c
$
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE execvms.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE pathvms.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE filevms.c
$
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE builtins.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE class.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE cwd.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE native.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE md5.c
$
$ CC_INCLUDE = "/INCLUDE=(""./modules"")"
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE [.modules]set.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE [.modules]path.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE [.modules]regex.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE [.modules]property-set.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE [.modules]sequence.c
$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE [.modules]order.c
$
$ LIB /CREATE [.bootstrap_vms]jam0.olb [.bootstrap_vms]*.obj
$ LINK /EXEC=[.bootstrap_vms]jam0.exe -
[.bootstrap_vms]jam0.olb/INCLUDE=JAM/LIB
$
$ IF F$SEARCH("[.bootstrap_vms]*.obj") .NES. "" THEN -
DELETE /NOCONF /NOLOG [.bootstrap_vms]*.obj;*, *.olb;*
$ ENDIF
$
$ IF F$SEARCH("[.bootstrap_vms]jam0.exe") .NES. ""
$ THEN
$ IF BJAM_UPDATE .NES. "update"
$ THEN
$ SAY "I|Cleaning previous build..."
$ MCR [.bootstrap_vms]jam0.exe -f build.jam --toolset='BOOST_JAM_TOOLSET' 'ARGS' clean
$ ENDIF
$
$ SAY "I|Building Boost.Jam..."
$ MCR [.bootstrap_vms]jam0.exe -f build.jam --toolset='BOOST_JAM_TOOLSET' 'ARGS'
$ ENDIF
$
$
$EXIT:
$ sts = $STATUS
$ exit 'sts' + (0 * f$verify(save_verify))
$CLEAN: !GOSUB
$ !
$ IF F$SEARCH("[.bootstrap_vms]*.*") .NES. ""
$ THEN
$ SAY "I|Cleaning previous bootstrap files..."
$ !
$ SET FILE /PROT=(W:RWED) [.bootstrap_vms]*.*;*
$ DELETE /NOCONF /NOLOG [.bootstrap_vms]*.*;*
$ ENDIF
$ !
$ IF F$SEARCH("bootstrap_vms.dir") .NES. ""
$ THEN
$ SAY "I|Removing previous bootstrap directory..."
$ !
$ SET FILE /PROT=(W:RWED) bootstrap_vms.dir
$ DELETE /NOCONF /NOLOG bootstrap_vms.dir;
$ ENDIF
$ !
$ RETURN

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,74 @@
/*
* Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
*
* This file is part of Jam - see jam.c for Copyright information.
*/
#ifndef JAM_BUILTINS_H
# define JAM_BUILTINS_H
# include "config.h"
# include "frames.h"
/*
* builtins.h - compile parsed jam statements
*/
void load_builtins();
void init_set();
void init_path();
void init_regex();
void init_property_set();
void init_sequence();
void init_order();
void property_set_done();
LIST *builtin_calc( FRAME * frame, int flags );
LIST *builtin_depends( FRAME * frame, int flags );
LIST *builtin_rebuilds( FRAME * frame, int flags );
LIST *builtin_echo( FRAME * frame, int flags );
LIST *builtin_exit( FRAME * frame, int flags );
LIST *builtin_flags( FRAME * frame, int flags );
LIST *builtin_glob( FRAME * frame, int flags );
LIST *builtin_glob_recursive( FRAME * frame, int flags );
LIST *builtin_subst( FRAME * frame, int flags );
LIST *builtin_match( FRAME * frame, int flags );
LIST *builtin_split_by_characters( FRAME * frame, int flags );
LIST *builtin_hdrmacro( FRAME * frame, int flags );
LIST *builtin_rulenames( FRAME * frame, int flags );
LIST *builtin_varnames( FRAME * frame, int flags );
LIST *builtin_delete_module( FRAME * frame, int flags );
LIST *builtin_import( FRAME * frame, int flags );
LIST *builtin_export( FRAME * frame, int flags );
LIST *builtin_caller_module( FRAME * frame, int flags );
LIST *builtin_backtrace( FRAME * frame, int flags );
LIST *builtin_pwd( FRAME * frame, int flags );
LIST *builtin_update( FRAME * frame, int flags );
LIST *builtin_update_now( FRAME * frame, int flags );
LIST *builtin_import_module( FRAME * frame, int flags );
LIST *builtin_imported_modules( FRAME * frame, int flags );
LIST *builtin_instance( FRAME * frame, int flags );
LIST *builtin_sort( FRAME * frame, int flags );
LIST *builtin_normalize_path( FRAME * frame, int flags );
LIST *builtin_native_rule( FRAME * frame, int flags );
LIST *builtin_has_native_rule( FRAME * frame, int flags );
LIST *builtin_user_module( FRAME * frame, int flags );
LIST *builtin_nearest_user_location( FRAME * frame, int flags );
LIST *builtin_check_if_file( FRAME * frame, int flags );
LIST *builtin_python_import_rule( FRAME * frame, int flags );
LIST *builtin_shell( FRAME * frame, int flags );
LIST *builtin_md5( FRAME * frame, int flags );
LIST *builtin_file_open( FRAME * frame, int flags );
LIST *builtin_pad( FRAME * frame, int flags );
LIST *builtin_precious( FRAME * frame, int flags );
LIST *builtin_self_path( FRAME * frame, int flags );
LIST *builtin_makedir( FRAME * frame, int flags );
LIST *builtin_readlink( FRAME * frame, int flags );
LIST *builtin_glob_archive( FRAME * frame, int flags );
LIST *builtin_debug_print_helper( FRAME * frame, int flags );
void backtrace( FRAME *frame );
extern int last_update_now_status;
#endif

View File

@@ -0,0 +1,98 @@
#!/usr/bin/python
# This script is used to bump the bjam version. It takes a single argument, e.g
#
# ./bump_version.py 3.1.9
#
# and updates all the necessary files.
#
# Copyright 2006 Rene Rivera.
# Copyright 2005-2006 Vladimir Prus.
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt)
import os
import os.path
import re
import string
import sys
srcdir = os.path.abspath(os.path.dirname(__file__))
docdir = os.path.abspath(os.path.join(srcdir, "..", "doc"))
def edit(file, *replacements):
print(" '%s'..." % file)
f = open(file, 'r')
text = f.read()
f.close()
for (source, target) in replacements:
text, n = re.compile(source, re.MULTILINE).subn(target, text)
assert n > 0
f = open(file, 'w')
f.write(text)
f.close()
def make_edits(ver):
ver03 = (list(ver) + [0] * 3)[0:3]
ver02 = ver03[0:2]
join = lambda v, s : s.join(str(x) for x in v)
dotJoin = lambda v : join(v, ".")
print("Setting version to %s" % str(ver03))
edit(os.path.join(srcdir, "boost-jam.spec"),
('^(Version:) .*$', '\\1 %s' % dotJoin(ver03)))
edit(os.path.join(srcdir, "build.jam"),
('^(_VERSION_ =).* ;$', '\\1 %s ;' % join(ver03, " ")))
edit(os.path.join(docdir, "bjam.qbk"),
('(\[version).*(\])', '\\1: %s\\2' % dotJoin(ver03)),
('(\[def :version:).*(\])', '\\1 %s\\2' % dotJoin(ver03)))
edit(os.path.join(srcdir, "patchlevel.h"),
('^(#define VERSION_MAJOR) .*$', '\\1 %s' % ver03[0]),
('^(#define VERSION_MINOR) .*$', '\\1 %s' % ver03[1]),
('^(#define VERSION_PATCH) .*$', '\\1 %s' % ver03[2]),
('^(#define VERSION_MAJOR_SYM) .*$', '\\1 "%02d"' % ver03[0]),
('^(#define VERSION_MINOR_SYM) .*$', '\\1 "%02d"' % ver03[1]),
('^(#define VERSION_PATCH_SYM) .*$', '\\1 "%02d"' % ver03[2]),
('^(#define VERSION) .*$', '\\1 "%s"' % dotJoin(ver)),
('^(#define JAMVERSYM) .*$', '\\1 "JAMVERSION=%s"' % dotJoin(ver02)))
def main():
if len(sys.argv) < 2:
print("Expect new version as argument.")
sys.exit(1)
if len(sys.argv) > 3:
print("Too many arguments.")
sys.exit(1)
version = sys.argv[1].split(".")
if len(version) > 3:
print("Expect version argument in the format: <MAJOR>.<MINOR>.<PATCH>")
sys.exit(1)
try:
version = list(int(x) for x in version)
except ValueError:
print("Version values must be valid integers.")
sys.exit(1)
while version and version[-1] == 0:
version.pop()
if not version:
print("At least one of the version values must be positive.")
sys.exit()
make_edits(version)
if __name__ == '__main__':
main()

View File

@@ -0,0 +1,19 @@
/* Copyright 2021 Rene Rivera
* Distributed under the Boost Software License, Version 1.0.
* (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt)
*/
/*
This program is a compile test for support of clib use.
This is used by the build script to guess and check the compiler to build the engine with.
*/
// Some headers we depend on..
#include <string.h>
int check_clib()
{
{ auto _ = strdup("-"); }
return 0;
}

View File

@@ -0,0 +1,31 @@
/* Copyright 2020 Rene Rivera
* Distributed under the Boost Software License, Version 1.0.
* (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt)
*/
/*
This program is a compile test for support of C++11. If it compiles
successfully some key parts of C++11 the B2 engine requires are
available. This is used by the build script to guess and check the
compiler to build the engine with.
*/
// Some headers we test...
#include <thread>
#include <memory>
int main()
{
// Check for basic thread calls.
// [2020-08-19] Mingw-w64 with win32 threading model (as opposed to posix
// threading model) does not really have std::thread etc. Please see comments
// in sysinfo.cpp.
#ifndef _WIN32
{ auto _ = std::thread::hardware_concurrency(); }
#endif
// [2021-08-07] We check the following C++11 features: brace initialization,
// unique_ptr. Plus the author's ability to memorize some digits.
{ const std::unique_ptr <float> pf {new float {3.14159f}}; }
}

View File

@@ -0,0 +1,191 @@
/*
* Copyright Vladimir Prus 2003.
* Distributed under the Boost Software License, Version 1.0.
* (See accompanying file LICENSE.txt or copy at
* https://www.bfgroup.xyz/b2/LICENSE.txt)
*/
#include "class.h"
#include "constants.h"
#include "frames.h"
#include "hash.h"
#include "lists.h"
#include "object.h"
#include "rules.h"
#include "jam_strings.h"
#include "variable.h"
#include "output.h"
#include <stdio.h>
#include <stdlib.h>
static struct hash * classes = 0;
static void check_defined( LIST * class_names )
{
LISTITER iter = list_begin( class_names );
LISTITER const end = list_end( class_names );
for ( ; iter != end; iter = list_next( iter ) )
{
if ( !hash_find( classes, list_item( iter ) ) )
{
out_printf( "Class %s is not defined\n", object_str( list_item( iter ) )
);
abort();
}
}
}
static OBJECT * class_module_name( OBJECT * declared_name )
{
string name[ 1 ];
OBJECT * result;
string_new( name );
string_append( name, "class@" );
string_append( name, object_str( declared_name ) );
result = object_new( name->value );
string_free( name );
return result;
}
struct import_base_data
{
OBJECT * base_name;
module_t * base_module;
module_t * class_module;
};
static void import_base_rule( void * r_, void * d_ )
{
RULE * r = (RULE *)r_;
RULE * ir1;
RULE * ir2;
struct import_base_data * d = (struct import_base_data *)d_;
OBJECT * qname;
string qualified_name[ 1 ];
string_new ( qualified_name );
string_append ( qualified_name, object_str( d->base_name ) );
string_push_back( qualified_name, '.' );
string_append ( qualified_name, object_str( r->name ) );
qname = object_new( qualified_name->value );
string_free( qualified_name );
ir1 = import_rule( r, d->class_module, r->name );
ir2 = import_rule( r, d->class_module, qname );
object_free( qname );
/* Copy 'exported' flag. */
ir1->exported = ir2->exported = r->exported;
/* If we are importing a class method, localize it. */
if ( ( r->module == d->base_module ) || ( r->module->class_module &&
( r->module->class_module == d->base_module ) ) )
{
rule_localize( ir1, d->class_module );
rule_localize( ir2, d->class_module );
}
}
/*
* For each exported rule 'n', declared in class module for base, imports that
* rule in 'class' as 'n' and as 'base.n'. Imported rules are localized and
* marked as exported.
*/
static void import_base_rules( module_t * class_, OBJECT * base )
{
OBJECT * module_name = class_module_name( base );
module_t * base_module = bindmodule( module_name );
LIST * imported;
struct import_base_data d;
d.base_name = base;
d.base_module = base_module;
d.class_module = class_;
object_free( module_name );
if ( base_module->rules )
hashenumerate( base_module->rules, import_base_rule, &d );
imported = imported_modules( base_module );
import_module( imported, class_ );
list_free( imported );
}
OBJECT * make_class_module( LIST * xname, LIST * bases, FRAME * frame )
{
OBJECT * name = class_module_name( list_front( xname ) );
OBJECT * * pp;
module_t * class_module = 0;
int found;
if ( !classes )
classes = hashinit( sizeof( OBJECT * ), "classes" );
pp = (OBJECT * *)hash_insert( classes, list_front( xname ), &found );
if ( !found )
{
*pp = object_copy( list_front( xname ) );
}
else
{
out_printf( "Class %s already defined\n", object_str( list_front( xname ) )
);
abort();
}
check_defined( bases );
class_module = bindmodule( name );
{
/*
Initialize variables that B2 inserts in every object.
We want to avoid creating the object's hash if it isn't needed.
*/
int num = class_module->num_fixed_variables;
module_add_fixed_var( class_module, constant_name, &num );
module_add_fixed_var( class_module, constant_class, &num );
module_set_fixed_variables( class_module, num );
}
var_set( class_module, constant_name, xname, VAR_SET );
var_set( class_module, constant_bases, bases, VAR_SET );
{
LISTITER iter = list_begin( bases );
LISTITER const end = list_end( bases );
for ( ; iter != end; iter = list_next( iter ) )
import_base_rules( class_module, list_item( iter ) );
}
return name;
}
static void free_class( void * xclass, void * data )
{
object_free( *(OBJECT * *)xclass );
}
void class_done( void )
{
if ( classes )
{
hashenumerate( classes, free_class, (void *)0 );
hashdone( classes );
classes = 0;
}
}

View File

@@ -0,0 +1,15 @@
/* Copyright Vladimir Prus 2003. Distributed under the Boost */
/* Software License, Version 1.0. (See accompanying */
/* file LICENSE.txt or copy at https://www.bfgroup.xyz/b2/LICENSE.txt) */
#ifndef CLASS_H_VP_2003_08_01
#define CLASS_H_VP_2003_08_01
#include "config.h"
#include "lists.h"
#include "frames.h"
OBJECT * make_class_module( LIST * xname, LIST * bases, FRAME * frame );
void class_done( void );
#endif

View File

@@ -0,0 +1,121 @@
/*
* Copyright 1993, 1995 Christopher Seiwald.
*
* This file is part of Jam - see jam.c for Copyright information.
*/
/* This file is ALSO:
* Copyright 2001-2004 David Abrahams.
* Distributed under the Boost Software License, Version 1.0.
* (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt)
*/
/*
* command.c - maintain lists of commands
*/
#include "jam.h"
#include "command.h"
#include "lists.h"
#include "rules.h"
#include <assert.h>
/*
* cmdlist_append_cmd
*/
CMDLIST * cmdlist_append_cmd( CMDLIST * l, CMD * cmd )
{
CMDLIST * result = (CMDLIST *)BJAM_MALLOC( sizeof( CMDLIST ) );
result->iscmd = 1;
result->next = l;
result->impl.cmd = cmd;
return result;
}
CMDLIST * cmdlist_append_target( CMDLIST * l, TARGET * t )
{
CMDLIST * result = (CMDLIST *)BJAM_MALLOC( sizeof( CMDLIST ) );
result->iscmd = 0;
result->next = l;
result->impl.t = t;
return result;
}
void cmdlist_free( CMDLIST * l )
{
while ( l )
{
CMDLIST * tmp = l->next;
BJAM_FREE( l );
l = tmp;
}
}
/*
* cmd_new() - return a new CMD.
*/
CMD * cmd_new( RULE * rule, LIST * targets, LIST * sources, LIST * shell )
{
CMD * cmd = (CMD *)BJAM_MALLOC( sizeof( CMD ) );
FRAME frame[ 1 ];
assert( cmd );
cmd->rule = rule;
cmd->shell = shell;
cmd->next = 0;
cmd->noop = 0;
cmd->asynccnt = 1;
cmd->status = 0;
cmd->lock = NULL;
cmd->unlock = NULL;
lol_init( &cmd->args );
lol_add( &cmd->args, targets );
lol_add( &cmd->args, sources );
string_new( cmd->buf );
frame_init( frame );
frame->module = rule->module;
lol_init( frame->args );
lol_add( frame->args, list_copy( targets ) );
lol_add( frame->args, list_copy( sources ) );
function_run_actions( rule->actions->command, frame, stack_global(),
cmd->buf );
frame_free( frame );
return cmd;
}
/*
* cmd_free() - free a CMD
*/
void cmd_free( CMD * cmd )
{
cmdlist_free( cmd->next );
lol_free( &cmd->args );
list_free( cmd->shell );
string_free( cmd->buf );
freetargets( cmd->unlock );
BJAM_FREE( (void *)cmd );
}
/*
* cmd_release_targets_and_shell()
*
* Makes the CMD release its hold on its targets & shell lists and forget
* about them. Useful in case caller still has references to those lists and
* wants to reuse them after freeing the CMD object.
*/
void cmd_release_targets_and_shell( CMD * cmd )
{
cmd->args.list[ 0 ] = L0; /* targets */
cmd->shell = L0; /* shell */
}

View File

@@ -0,0 +1,101 @@
/*
* Copyright 1994 Christopher Seiwald.
*
* This file is part of Jam - see jam.c for Copyright information.
*/
/*
* command.h - the CMD structure and routines to manipulate them
*
* Both ACTION and CMD contain a rule, targets, and sources. An
* ACTION describes a rule to be applied to the given targets and
* sources; a CMD is what actually gets executed by the shell. The
* differences are due to:
*
* ACTIONS must be combined if 'actions together' is given.
* ACTIONS must be split if 'actions piecemeal' is given.
* ACTIONS must have current sources omitted for 'actions updated'.
*
* The CMD datatype holds a single command that is to be executed
* against a target, and they can chain together to represent the
* full collection of commands used to update a target.
*
* Structures:
*
* CMD - an action, ready to be formatted into a buffer and executed.
*
* External routines:
*
* cmd_new() - return a new CMD or 0 if too many args.
* cmd_free() - delete CMD and its parts.
* cmd_next() - walk the CMD chain.
* cmd_release_targets_and_shell() - CMD forgets about its targets & shell.
*/
/*
* CMD - an action, ready to be formatted into a buffer and executed.
*/
#ifndef COMMAND_SW20111118_H
#define COMMAND_SW20111118_H
#include "config.h"
#include "lists.h"
#include "rules.h"
#include "jam_strings.h"
typedef struct _cmd CMD;
/*
* A list whose elements are either TARGETS or CMDS.
* CMDLIST is used only by CMD. A TARGET means that
* the CMD is the last updating action required to
* build the target. A CMD is the next CMD required
* to build the same target. (Note that a single action
* can update more than one target, so the CMDs form
* a DAG, not a straight linear list.)
*/
typedef struct _cmdlist {
struct _cmdlist * next;
union {
CMD * cmd;
TARGET * t;
} impl;
char iscmd;
} CMDLIST;
CMDLIST * cmdlist_append_cmd( CMDLIST *, CMD * );
CMDLIST * cmdlist_append_target( CMDLIST *, TARGET * );
void cmd_list_free( CMDLIST * );
struct _cmd
{
CMDLIST * next;
RULE * rule; /* rule->actions contains shell script */
LIST * shell; /* $(JAMSHELL) value */
LOL args; /* LISTs for $(<), $(>) */
string buf[ 1 ]; /* actual commands */
int noop; /* no-op commands should be faked instead of executed */
int asynccnt; /* number of outstanding dependencies */
TARGETS * lock; /* semaphores that are required by this cmd. */
TARGETS * unlock; /* semaphores that are released when this cmd finishes. */
char status; /* the command status */
};
CMD * cmd_new
(
RULE * rule, /* rule (referenced) */
LIST * targets, /* $(<) (ownership transferred) */
LIST * sources, /* $(>) (ownership transferred) */
LIST * shell /* $(JAMSHELL) (ownership transferred) */
);
void cmd_release_targets_and_shell( CMD * );
void cmd_free( CMD * );
#define cmd_next( c ) ((c)->next)
#endif

View File

@@ -0,0 +1,233 @@
/*
* Copyright 1993, 2000 Christopher Seiwald.
*
* This file is part of Jam - see jam.c for Copyright information.
*/
/* This file is ALSO:
* Copyright 2001-2004 David Abrahams.
* Distributed under the Boost Software License, Version 1.0.
* (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt)
*/
/*
* compile.c - compile parsed jam statements
*
* External routines:
* evaluate_rule() - execute a rule invocation
*
* Internal routines:
* debug_compile() - printf with indent to show rule expansion
*/
#include "jam.h"
#include "compile.h"
#include "builtins.h"
#include "class.h"
#include "constants.h"
#include "hash.h"
#include "hdrmacro.h"
#include "make.h"
#include "modules.h"
#include "parse.h"
#include "rules.h"
#include "search.h"
#include "jam_strings.h"
#include "variable.h"
#include "output.h"
#include <assert.h>
#include <stdarg.h>
#include <string.h>
static void debug_compile( int which, char const * s, FRAME * );
/* Internal functions from builtins.c */
void backtrace( FRAME * );
void backtrace_line( FRAME * );
void print_source_line( FRAME * );
void unknown_rule( FRAME *, char const * key, module_t *, OBJECT * rule_name );
/*
* evaluate_rule() - execute a rule invocation
*/
LIST * evaluate_rule( RULE * rule, OBJECT * rulename, FRAME * frame )
{
LIST * result = L0;
profile_frame prof[ 1 ];
module_t * prev_module = frame->module;
if ( DEBUG_COMPILE )
{
/* Try hard to indicate in which module the rule is going to execute. */
char buf[ 256 ] = "";
if ( rule->module->name )
{
strncat( buf, object_str( rule->module->name ), sizeof( buf ) -
1 );
strncat( buf, ".", sizeof( buf ) - 1 );
if ( strncmp( buf, object_str( rule->name ), strlen( buf ) ) == 0 )
{
buf[ 0 ] = 0;
}
}
strncat( buf, object_str( rule->name ), sizeof( buf ) - 1 );
debug_compile( 1, buf, frame );
lol_print( frame->args );
out_printf( "\n" );
}
if ( rule->procedure && rule->module != prev_module )
{
/* Propagate current module to nested rule invocations. */
frame->module = rule->module;
}
/* Record current rule name in frame. */
if ( rule->procedure )
{
frame->rulename = object_str( rulename );
/* And enter record profile info. */
if ( DEBUG_PROFILE )
profile_enter( function_rulename( rule->procedure ), prof );
}
/* Check traditional targets $(<) and sources $(>). */
if ( !rule->actions && !rule->procedure )
unknown_rule( frame, NULL, frame->module, rulename );
/* If this rule will be executed for updating the targets then construct the
* action for make().
*/
if ( rule->actions )
{
TARGETS * t;
/* The action is associated with this instance of this rule. */
ACTION * const action = (ACTION *)BJAM_MALLOC( sizeof( ACTION ) );
memset( (char *)action, '\0', sizeof( *action ) );
action->rule = rule;
action->targets = targetlist( (TARGETS *)0, lol_get( frame->args, 0 ) );
action->sources = targetlist( (TARGETS *)0, lol_get( frame->args, 1 ) );
action->refs = 1;
/* If we have a group of targets all being built using the same action
* and any of these targets is updated, then we have to consider them
* all to be out-dated. We do this by adding a REBUILDS in both directions
* between the first target and all the other targets.
*/
if ( action->targets )
{
TARGET * const t0 = action->targets->target;
for ( t = action->targets->next; t; t = t->next )
{
t->target->rebuilds = targetentry( t->target->rebuilds, t0 );
t0->rebuilds = targetentry( t0->rebuilds, t->target );
}
}
/* Append this action to the actions of each target. */
for ( t = action->targets; t; t = t->next )
t->target->actions = actionlist( t->target->actions, action );
action_free( action );
}
/* Now recursively compile any parse tree associated with this rule.
* function_refer()/function_free() call pair added to ensure the rule does
* not get freed while in use.
*/
if ( rule->procedure )
{
FUNCTION * const function = rule->procedure;
function_refer( function );
result = function_run( function, frame, stack_global() );
function_free( function );
}
if ( DEBUG_PROFILE && rule->procedure )
profile_exit( prof );
if ( DEBUG_COMPILE )
debug_compile( -1, 0, frame );
return result;
}
/*
* Call the given rule with the specified parameters. The parameters should be
* of type LIST* and end with a NULL pointer. This differs from 'evaluate_rule'
* in that frame for the called rule is prepared inside 'call_rule'.
*
* This function is useful when a builtin rule (in C) wants to call another rule
* which might be implemented in Jam.
*/
LIST * call_rule( OBJECT * rulename, FRAME * caller_frame, ... )
{
va_list va;
LIST * result;
FRAME inner[ 1 ];
frame_init( inner );
inner->prev = caller_frame;
inner->prev_user = caller_frame->module->user_module
? caller_frame
: caller_frame->prev_user;
inner->module = caller_frame->module;
va_start( va, caller_frame );
for ( ; ; )
{
LIST * const l = va_arg( va, LIST * );
if ( !l )
break;
lol_add( inner->args, l );
}
va_end( va );
result = evaluate_rule( bindrule( rulename, inner->module ), rulename, inner );
frame_free( inner );
return result;
}
/*
* debug_compile() - printf with indent to show rule expansion
*/
static void debug_compile( int which, char const * s, FRAME * frame )
{
static int level = 0;
static char indent[ 36 ] = ">>>>|>>>>|>>>>|>>>>|>>>>|>>>>|>>>>|";
if ( which >= 0 )
{
int i;
print_source_line( frame );
i = ( level + 1 ) * 2;
while ( i > 35 )
{
out_puts( indent );
i -= 35;
}
out_printf( "%*.*s ", i, i, indent );
}
if ( s )
out_printf( "%s ", s );
level += which;
}

View File

@@ -0,0 +1,60 @@
/*
* Copyright 1993, 2000 Christopher Seiwald.
*
* This file is part of Jam - see jam.c for Copyright information.
*/
/* This file is ALSO:
* Copyright 2001-2004 David Abrahams.
* Distributed under the Boost Software License, Version 1.0.
* (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt)
*/
/*
* compile.h - compile parsed jam statements
*/
#ifndef COMPILE_DWA20011022_H
#define COMPILE_DWA20011022_H
#include "config.h"
#include "frames.h"
#include "lists.h"
#include "object.h"
#include "rules.h"
void compile_builtins();
LIST * evaluate_rule( RULE * rule, OBJECT * rulename, FRAME * );
LIST * call_rule( OBJECT * rulename, FRAME * caller_frame, ... );
/* Flags for compile_set(), etc */
#define ASSIGN_SET 0x00 /* = assign variable */
#define ASSIGN_APPEND 0x01 /* += append variable */
#define ASSIGN_DEFAULT 0x02 /* set only if unset */
/* Flags for compile_setexec() */
#define EXEC_UPDATED 0x01 /* executes updated */
#define EXEC_TOGETHER 0x02 /* executes together */
#define EXEC_IGNORE 0x04 /* executes ignore */
#define EXEC_QUIETLY 0x08 /* executes quietly */
#define EXEC_PIECEMEAL 0x10 /* executes piecemeal */
#define EXEC_EXISTING 0x20 /* executes existing */
/* Conditions for compile_if() */
#define EXPR_NOT 0 /* ! cond */
#define EXPR_AND 1 /* cond && cond */
#define EXPR_OR 2 /* cond || cond */
#define EXPR_EXISTS 3 /* arg */
#define EXPR_EQUALS 4 /* arg = arg */
#define EXPR_NOTEQ 5 /* arg != arg */
#define EXPR_LESS 6 /* arg < arg */
#define EXPR_LESSEQ 7 /* arg <= arg */
#define EXPR_MORE 8 /* arg > arg */
#define EXPR_MOREEQ 9 /* arg >= arg */
#define EXPR_IN 10 /* arg in arg */
#endif

View File

@@ -0,0 +1,59 @@
#ifndef B2_CONFIG_H
#define B2_CONFIG_H
/*
Copyright 2002-2021 Rene Rivera.
Distributed under the Boost Software License, Version 1.0.
(See accompanying file LICENSE.txt or copy at
https://www.bfgroup.xyz/b2/LICENSE.txt)
*/
#define OPT_HEADER_CACHE_EXT 1
#define OPT_GRAPH_DEBUG_EXT 1
#define OPT_SEMAPHORE 1
#define OPT_AT_FILES 1
#define OPT_DEBUG_PROFILE 1
#define JAM_DEBUGGER 1
#define OPT_FIX_TARGET_VARIABLES_EXT 1
#define OPT_IMPROVED_PATIENCE_EXT 1
// Autodetect various operating systems..
#if defined(_WIN32) || defined(_WIN64) || \
defined(__WIN32__) || defined(__TOS_WIN__) || \
defined(__WINDOWS__)
#define NT 1
#endif
#if defined(__VMS) || defined(__VMS_VER)
#if !defined(VMS)
#define VMS 1
#endif
#endif
// To work around QEMU failures on mixed mode situations (32 vs 64) we need to
// enable partial LFS support in system headers. And we need to do this before
// any system headers are included.
#if !defined(NT) && !defined(VMS)
# define _FILE_OFFSET_BITS 64
#endif
// Correct missing types in some earlier compilers..
#include <stdint.h>
#ifndef INT32_MIN
// VS 2013 is barely C++11/C99. And opts to not provide specific sized int types.
// Provide a generic implementation of the sizes we use.
#if UINT_MAX == 0xffffffff
typedef int int32_t;
#elif (USHRT_MAX == 0xffffffff)
typedef short int32_t;
#elif ULONG_MAX == 0xffffffff
typedef long int32_t;
#endif
#endif
#endif

View File

@@ -0,0 +1,238 @@
@ECHO OFF
REM ~ Copyright 2002-2018 Rene Rivera.
REM ~ Distributed under the Boost Software License, Version 1.0.
REM ~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
:Start
REM Setup the toolset command and options.
if "_%B2_TOOLSET%_" == "_msvc_" call :Config_MSVC
if "_%B2_TOOLSET%_" == "_vc12_" call :Config_VC12
if "_%B2_TOOLSET%_" == "_vc14_" call :Config_VC14
if "_%B2_TOOLSET%_" == "_vc141_" call :Config_VC141
if "_%B2_TOOLSET%_" == "_vc142_" call :Config_VC142
if "_%B2_TOOLSET%_" == "_vc143_" call :Config_VC143
if "_%B2_TOOLSET%_" == "_borland_" call :Config_BORLAND
if "_%B2_TOOLSET%_" == "_como_" call :Config_COMO
if "_%B2_TOOLSET%_" == "_gcc_" call :Config_GCC
if "_%B2_TOOLSET%_" == "_clang_" call :Config_CLANG
if "_%B2_TOOLSET%_" == "_gcc-nocygwin_" call :Config_GCC_NOCYGWIN
if "_%B2_TOOLSET%_" == "_intel-win32_" call :Config_INTEL_WIN32
if "_%B2_TOOLSET%_" == "_mingw_" call :Config_MINGW
exit /b %errorlevel%
:Call_If_Exists
ECHO Call_If_Exists %*
if EXIST %1 call %*
goto :eof
:Config_MSVC
if not defined CXX ( set "CXX=cl" )
if NOT "_%MSVCDir%_" == "__" (
set "B2_TOOLSET_ROOT=%MSVCDir%\"
)
call :Call_If_Exists "%B2_TOOLSET_ROOT%bin\VCVARS32.BAT"
if not "_%B2_TOOLSET_ROOT%_" == "__" (
set "PATH=%B2_TOOLSET_ROOT%bin;%PATH%"
)
set "B2_CXX="%CXX%" /nologo /MP /MT /TP /Feb2 /wd4996 /O2 /GL /EHsc"
set "B2_CXX_LINK=/link kernel32.lib advapi32.lib user32.lib"
set "_known_=1"
goto :eof
:Config_VC11
if not defined CXX ( set "CXX=cl" )
if NOT "_%VS110COMNTOOLS%_" == "__" (
set "B2_TOOLSET_ROOT=%VS110COMNTOOLS%..\..\VC\"
)
if "_%VCINSTALLDIR%_" == "__" call :Call_If_Exists "%B2_TOOLSET_ROOT%VCVARSALL.BAT" %B2_BUILD_ARGS%
if NOT "_%B2_TOOLSET_ROOT%_" == "__" (
if "_%VCINSTALLDIR%_" == "__" (
set "PATH=%B2_TOOLSET_ROOT%bin;%PATH%"
) )
set "B2_CXX="%CXX%" /nologo /MP /MT /TP /Feb2 /wd4996 /O2 /GL /EHsc"
set "B2_CXX_LINK=/link kernel32.lib advapi32.lib user32.lib"
set "_known_=1"
goto :eof
:Config_VC12
if not defined CXX ( set "CXX=cl" )
if NOT "_%VS120COMNTOOLS%_" == "__" (
set "B2_TOOLSET_ROOT=%VS120COMNTOOLS%..\..\VC\"
)
if "_%B2_ARCH%_" == "__" set B2_ARCH=%PROCESSOR_ARCHITECTURE%
set B2_BUILD_ARGS=%B2_BUILD_ARGS% %B2_ARCH%
if "_%VCINSTALLDIR%_" == "__" call :Call_If_Exists "%B2_TOOLSET_ROOT%VCVARSALL.BAT" %B2_BUILD_ARGS%
if NOT "_%B2_TOOLSET_ROOT%_" == "__" (
if "_%VCINSTALLDIR%_" == "__" (
set "PATH=%B2_TOOLSET_ROOT%bin;%PATH%"
) )
set "B2_CXX="%CXX%" /nologo /MP /MT /TP /Feb2 /wd4996 /O2 /GL /EHsc"
set "B2_CXX_LINK=/link kernel32.lib advapi32.lib user32.lib"
set "_known_=1"
goto :eof
:Config_VC14
if not defined CXX ( set "CXX=cl" )
if "_%B2_TOOLSET_ROOT%_" == "__" (
if NOT "_%VS140COMNTOOLS%_" == "__" (
set "B2_TOOLSET_ROOT=%VS140COMNTOOLS%..\..\VC\"
))
if "_%B2_ARCH%_" == "__" set B2_ARCH=%PROCESSOR_ARCHITECTURE%
set B2_BUILD_ARGS=%B2_BUILD_ARGS% %B2_ARCH%
if "_%VCINSTALLDIR%_" == "__" call :Call_If_Exists "%B2_TOOLSET_ROOT%VCVARSALL.BAT" %B2_BUILD_ARGS%
if NOT "_%B2_TOOLSET_ROOT%_" == "__" (
if "_%VCINSTALLDIR%_" == "__" (
set "PATH=%B2_TOOLSET_ROOT%bin;%PATH%"
) )
set "B2_CXX="%CXX%" /nologo /MP /MT /TP /Feb2 /wd4996 /O2 /GL /EHsc"
set "B2_CXX_LINK=/link kernel32.lib advapi32.lib user32.lib"
set "_known_=1"
goto :eof
:Config_VC141
if not defined CXX ( set "CXX=cl" )
call vswhere_usability_wrapper.cmd
REM Reset ERRORLEVEL since from now on it's all based on ENV vars
ver > nul 2> nul
if "_%B2_TOOLSET_ROOT%_" == "__" (
if NOT "_%VS150COMNTOOLS%_" == "__" (
set "B2_TOOLSET_ROOT=%VS150COMNTOOLS%..\..\VC\"
))
if "_%B2_ARCH%_" == "__" set B2_ARCH=%PROCESSOR_ARCHITECTURE%
set B2_BUILD_ARGS=%B2_BUILD_ARGS% %B2_ARCH%
REM return to current directory as vsdevcmd_end.bat switches to %USERPROFILE%\Source if it exists.
pushd %CD%
if "_%VSINSTALLDIR%_" == "__" call :Call_If_Exists "%B2_TOOLSET_ROOT%Auxiliary\Build\vcvarsall.bat" %B2_BUILD_ARGS%
popd
set "B2_CXX="%CXX%" /nologo /MP /MT /TP /Feb2 /wd4996 /O2 /GL /EHsc"
set "B2_CXX_LINK=/link kernel32.lib advapi32.lib user32.lib"
set "_known_=1"
goto :eof
:Config_VC142
if not defined CXX ( set "CXX=cl" )
call vswhere_usability_wrapper.cmd
REM Reset ERRORLEVEL since from now on it's all based on ENV vars
ver > nul 2> nul
if "_%B2_TOOLSET_ROOT%_" == "__" (
if NOT "_%VS160COMNTOOLS%_" == "__" (
set "B2_TOOLSET_ROOT=%VS160COMNTOOLS%..\..\VC\"
))
if "_%B2_ARCH%_" == "__" set B2_ARCH=%PROCESSOR_ARCHITECTURE%
set B2_BUILD_ARGS=%B2_BUILD_ARGS% %B2_ARCH%
REM return to current directory as vsdevcmd_end.bat switches to %USERPROFILE%\Source if it exists.
pushd %CD%
if "_%VSINSTALLDIR%_" == "__" call :Call_If_Exists "%B2_TOOLSET_ROOT%Auxiliary\Build\vcvarsall.bat" %B2_BUILD_ARGS%
popd
set "B2_CXX="%CXX%" /nologo /MP /MT /TP /Feb2 /wd4996 /O2 /GL /EHsc"
set "B2_CXX_LINK=/link kernel32.lib advapi32.lib user32.lib"
set "_known_=1"
goto :eof
:Config_VC143
if not defined CXX ( set "CXX=cl" )
call vswhere_usability_wrapper.cmd
REM Reset ERRORLEVEL since from now on it's all based on ENV vars
ver > nul 2> nul
if "_%B2_TOOLSET_ROOT%_" == "__" (
if NOT "_%VS170COMNTOOLS%_" == "__" (
set "B2_TOOLSET_ROOT=%VS170COMNTOOLS%..\..\VC\"
))
if "_%B2_ARCH%_" == "__" set B2_ARCH=%PROCESSOR_ARCHITECTURE%
set B2_BUILD_ARGS=%B2_BUILD_ARGS% %B2_ARCH%
REM return to current directory as vsdevcmd_end.bat switches to %USERPROFILE%\Source if it exists.
pushd %CD%
if "_%VSINSTALLDIR%_" == "__" call :Call_If_Exists "%B2_TOOLSET_ROOT%Auxiliary\Build\vcvarsall.bat" %B2_BUILD_ARGS%
popd
set "B2_CXX="%CXX%" /nologo /MP /MT /TP /Feb2 /wd4996 /O2 /GL /EHsc"
set "B2_CXX_LINK=/link kernel32.lib advapi32.lib user32.lib"
set "_known_=1"
goto :eof
:Config_VCUNK
if NOT "_%B2_TOOLSET%_" == "_vcunk_" goto Skip_VCUNK
call vswhere_usability_wrapper.cmd
REM Reset ERRORLEVEL since from now on it's all based on ENV vars
ver > nul 2> nul
if "_%B2_TOOLSET_ROOT%_" == "__" (
if NOT "_%VSUNKCOMNTOOLS%_" == "__" (
set "B2_TOOLSET_ROOT=%VSUNKCOMNTOOLS%..\..\VC\"
))
if "_%B2_ARCH%_" == "__" set B2_ARCH=%PROCESSOR_ARCHITECTURE%
set B2_BUILD_ARGS=%B2_BUILD_ARGS% %B2_ARCH%
REM return to current directory as vsdevcmd_end.bat switches to %USERPROFILE%\Source if it exists.
pushd %CD%
if "_%VSINSTALLDIR%_" == "__" call :Call_If_Exists "%B2_TOOLSET_ROOT%Auxiliary\Build\vcvarsall.bat" %B2_BUILD_ARGS%
popd
set "B2_CXX="%CXX%" /nologo /MP /MT /TP /Feb2 /wd4996 /O2 /GL /EHsc"
set "B2_CXX_LINK=/link kernel32.lib advapi32.lib user32.lib"
set "_known_=1"
goto :eof
:Config_BORLAND
if not defined CXX ( set "CXX=bcc32c" )
if "_%B2_TOOLSET_ROOT%_" == "__" (
call guess_toolset.bat test_path bcc32c.exe )
if "_%B2_TOOLSET_ROOT%_" == "__" (
if not errorlevel 1 (
set "B2_TOOLSET_ROOT=%FOUND_PATH%..\"
) )
if not "_%B2_TOOLSET_ROOT%_" == "__" (
set "PATH=%B2_TOOLSET_ROOT%Bin;%PATH%"
)
set "B2_CXX="%CXX%" -tC -P -O2 -w- -I"%B2_TOOLSET_ROOT%Include" -L"%B2_TOOLSET_ROOT%Lib" -eb2"
set "_known_=1"
goto :eof
:Config_COMO
if not defined CXX ( set "CXX=como" )
set "B2_CXX="%CXX%" --inlining -o b2.exe"
set "_known_=1"
goto :eof
:Config_GCC
if not defined CXX ( set "CXX=g++" )
set "B2_CXX="%CXX%" -x c++ -std=c++11 -s -O3 -o b2.exe -D_GNU_SOURCE"
set "_known_=1"
goto :eof
:Config_CLANG
if not defined CXX ( set "CXX=clang++" )
set "B2_CXX="%CXX%" -x c++ -std=c++11 -s -O3 -o b2.exe"
set "_known_=1"
goto :eof
:Config_GCC_NOCYGWIN
if not defined CXX ( set "CXX=g++" )
set "B2_CXX="%CXX%" -x c++ -std=c++11 -s -O3 -mno-cygwin -o b2.exe"
set "_known_=1"
goto :eof
:Config_INTEL_WIN32
if not defined CXX ( set "CXX=icl" )
set "B2_CXX="%CXX%" /nologo /MT /O2 /Ob2 /Gy /GF /GA /GB /Feb2"
set "_known_=1"
goto :eof
:Config_MINGW
if not defined CXX ( set "CXX=g++" )
if not "_%B2_TOOLSET_ROOT%_" == "__" (
set "PATH=%B2_TOOLSET_ROOT%bin;%PATH%"
)
for /F "delims=" %%I in ("%CXX%") do set "PATH=%PATH%;%%~dpI"
set "B2_CXX="%CXX%" -x c++ -std=c++11 -s -O3 -o b2.exe"
set "_known_=1"
goto :eof

View File

@@ -0,0 +1,199 @@
/*
* Copyright 2011 Steven Watanabe
* Copyright 2020 René Ferdinand Rivera Morell
*
* This file is part of Jam - see jam.c for Copyright information.
*/
/*
* constants.c - constant objects
*
* External functions:
*
* constants_init() - initialize constants
* constants_done() - free constants
*
*/
#include "constants.h"
void constants_init( void )
{
constant_empty = object_new( "" );
constant_dot = object_new( "." );
constant_plus = object_new( "+" );
constant_star = object_new( "*" );
constant_question_mark = object_new( "?" );
constant_ok = object_new( "ok" );
constant_true = object_new( "true" );
constant_name = object_new( "__name__" );
constant_bases = object_new( "__bases__" );
constant_class = object_new( "__class__" );
constant_typecheck = object_new( ".typecheck" );
constant_builtin = object_new( "(builtin)" );
constant_HCACHEFILE = object_new( "HCACHEFILE" );
constant_HCACHEMAXAGE = object_new( "HCACHEMAXAGE" );
constant_HDRSCAN = object_new( "HDRSCAN" );
constant_HDRRULE = object_new( "HDRRULE" );
constant_BINDRULE = object_new( "BINDRULE" );
constant_LOCATE = object_new( "LOCATE" );
constant_SEARCH = object_new( "SEARCH" );
constant_JAM_SEMAPHORE = object_new( "JAM_SEMAPHORE" );
constant_TIMING_RULE = object_new( "__TIMING_RULE__" );
constant_ACTION_RULE = object_new( "__ACTION_RULE__" );
constant_JAMSHELL = object_new( "JAMSHELL" );
constant_TMPDIR = object_new( "TMPDIR" );
constant_TMPNAME = object_new( "TMPNAME" );
constant_TMPFILE = object_new( "TMPFILE" );
constant_STDOUT = object_new( "STDOUT" );
constant_STDERR = object_new( "STDERR" );
constant_JAMDATE = object_new( "JAMDATE" );
constant_JAM_TIMESTAMP_RESOLUTION = object_new( "JAM_TIMESTAMP_RESOLUTION" );
constant_JAM_VERSION = object_new( "JAM_VERSION" );
constant_JAMUNAME = object_new( "JAMUNAME" );
constant_ENVIRON = object_new( ".ENVIRON" );
constant_ARGV = object_new( "ARGV" );
constant_all = object_new( "all" );
constant_PARALLELISM = object_new( "PARALLELISM" );
constant_KEEP_GOING = object_new( "KEEP_GOING" );
constant_other = object_new( "[OTHER]" );
constant_total = object_new( "[TOTAL]" );
constant_FILE_DIRSCAN = object_new( "FILE_DIRSCAN" );
constant_MAIN = object_new( "MAIN" );
constant_MAIN_MAKE = object_new( "MAIN_MAKE" );
constant_MAKE_MAKE0 = object_new( "MAKE_MAKE0" );
constant_MAKE_MAKE1 = object_new( "MAKE_MAKE1" );
constant_MAKE_MAKE0SORT = object_new( "MAKE_MAKE0SORT" );
constant_BINDMODULE = object_new( "BINDMODULE" );
constant_IMPORT_MODULE = object_new( "IMPORT_MODULE" );
constant_BUILTIN_GLOB_BACK = object_new( "BUILTIN_GLOB_BACK" );
constant_timestamp = object_new( "timestamp" );
constant_python = object_new("__python__");
constant_python_interface = object_new( "python_interface" );
constant_extra_pythonpath = object_new( "EXTRA_PYTHONPATH" );
constant_MAIN_PYTHON = object_new( "MAIN_PYTHON" );
constant_BUILTIN_GLOB_ARCHIVE_BACK= object_new( "BUILTIN_GLOB_ARCHIVE_BACK" );
constant_FILE_ARCHIVESCAN = object_new( "FILE_ARCHIVESCAN" );
constant_RESPONSE_FILE_SUB = object_new( "RESPONSE_FILE_SUB" );
}
void constants_done( void )
{
object_free( constant_empty );
object_free( constant_dot );
object_free( constant_plus );
object_free( constant_star );
object_free( constant_question_mark );
object_free( constant_ok );
object_free( constant_true );
object_free( constant_name );
object_free( constant_bases );
object_free( constant_class );
object_free( constant_typecheck );
object_free( constant_builtin );
object_free( constant_HCACHEFILE );
object_free( constant_HCACHEMAXAGE );
object_free( constant_HDRSCAN );
object_free( constant_HDRRULE );
object_free( constant_BINDRULE );
object_free( constant_LOCATE );
object_free( constant_SEARCH );
object_free( constant_JAM_SEMAPHORE );
object_free( constant_TIMING_RULE );
object_free( constant_ACTION_RULE );
object_free( constant_JAMSHELL );
object_free( constant_TMPDIR );
object_free( constant_TMPNAME );
object_free( constant_TMPFILE );
object_free( constant_STDOUT );
object_free( constant_STDERR );
object_free( constant_JAMDATE );
object_free( constant_JAM_TIMESTAMP_RESOLUTION );
object_free( constant_JAM_VERSION );
object_free( constant_JAMUNAME );
object_free( constant_ENVIRON );
object_free( constant_ARGV );
object_free( constant_all );
object_free( constant_PARALLELISM );
object_free( constant_KEEP_GOING );
object_free( constant_other );
object_free( constant_total );
object_free( constant_FILE_DIRSCAN );
object_free( constant_MAIN );
object_free( constant_MAIN_MAKE );
object_free( constant_MAKE_MAKE0 );
object_free( constant_MAKE_MAKE1 );
object_free( constant_MAKE_MAKE0SORT );
object_free( constant_BINDMODULE );
object_free( constant_IMPORT_MODULE );
object_free( constant_BUILTIN_GLOB_BACK );
object_free( constant_timestamp );
object_free( constant_python );
object_free( constant_python_interface );
object_free( constant_extra_pythonpath );
object_free( constant_MAIN_PYTHON );
object_free( constant_FILE_ARCHIVESCAN );
object_free( constant_BUILTIN_GLOB_ARCHIVE_BACK );
object_free( constant_RESPONSE_FILE_SUB );
}
OBJECT * constant_empty;
OBJECT * constant_dot;
OBJECT * constant_plus;
OBJECT * constant_star;
OBJECT * constant_question_mark;
OBJECT * constant_ok;
OBJECT * constant_true;
OBJECT * constant_name;
OBJECT * constant_bases;
OBJECT * constant_class;
OBJECT * constant_typecheck;
OBJECT * constant_builtin;
OBJECT * constant_HCACHEFILE;
OBJECT * constant_HCACHEMAXAGE;
OBJECT * constant_HDRSCAN;
OBJECT * constant_HDRRULE;
OBJECT * constant_BINDRULE;
OBJECT * constant_LOCATE;
OBJECT * constant_SEARCH;
OBJECT * constant_JAM_SEMAPHORE;
OBJECT * constant_TIMING_RULE;
OBJECT * constant_ACTION_RULE;
OBJECT * constant_JAMSHELL;
OBJECT * constant_TMPDIR;
OBJECT * constant_TMPNAME;
OBJECT * constant_TMPFILE;
OBJECT * constant_STDOUT;
OBJECT * constant_STDERR;
OBJECT * constant_JAMDATE;
OBJECT * constant_JAM_VERSION;
OBJECT * constant_JAMUNAME;
OBJECT * constant_ENVIRON;
OBJECT * constant_ARGV;
OBJECT * constant_all;
OBJECT * constant_PARALLELISM;
OBJECT * constant_KEEP_GOING;
OBJECT * constant_other;
OBJECT * constant_total;
OBJECT * constant_FILE_DIRSCAN;
OBJECT * constant_MAIN;
OBJECT * constant_MAIN_MAKE;
OBJECT * constant_MAKE_MAKE0;
OBJECT * constant_MAKE_MAKE1;
OBJECT * constant_MAKE_MAKE0SORT;
OBJECT * constant_BINDMODULE;
OBJECT * constant_IMPORT_MODULE;
OBJECT * constant_BUILTIN_GLOB_BACK;
OBJECT * constant_timestamp;
OBJECT * constant_JAM_TIMESTAMP_RESOLUTION;
OBJECT * constant_python;
OBJECT * constant_python_interface;
OBJECT * constant_extra_pythonpath;
OBJECT * constant_MAIN_PYTHON;
OBJECT * constant_FILE_ARCHIVESCAN;
OBJECT * constant_BUILTIN_GLOB_ARCHIVE_BACK;
OBJECT * constant_RESPONSE_FILE_SUB;

View File

@@ -0,0 +1,78 @@
/*
* Copyright 2011 Steven Watanabe
*
* This file is part of Jam - see jam.c for Copyright information.
*/
/*
* constants.h - constant objects
*/
#ifndef BOOST_JAM_CONSTANTS_H
#define BOOST_JAM_CONSTANTS_H
#include "config.h"
#include "object.h"
void constants_init( void );
void constants_done( void );
extern OBJECT * constant_empty; /* "" */
extern OBJECT * constant_dot; /* "." */
extern OBJECT * constant_plus; /* "+" */
extern OBJECT * constant_star; /* "*" */
extern OBJECT * constant_question_mark; /* "?" */
extern OBJECT * constant_ok; /* "ok" */
extern OBJECT * constant_true; /* "true" */
extern OBJECT * constant_name; /* "__name__" */
extern OBJECT * constant_bases; /* "__bases__" */
extern OBJECT * constant_class; /* "__class__" */
extern OBJECT * constant_typecheck; /* ".typecheck" */
extern OBJECT * constant_builtin; /* "(builtin)" */
extern OBJECT * constant_HCACHEFILE; /* "HCACHEFILE" */
extern OBJECT * constant_HCACHEMAXAGE; /* "HCACHEMAXAGE" */
extern OBJECT * constant_HDRSCAN; /* "HDRSCAN" */
extern OBJECT * constant_HDRRULE; /* "HDRRULE" */
extern OBJECT * constant_BINDRULE; /* "BINDRULE" */
extern OBJECT * constant_LOCATE; /* "LOCATE" */
extern OBJECT * constant_SEARCH; /* "SEARCH" */
extern OBJECT * constant_JAM_SEMAPHORE; /* "JAM_SEMAPHORE" */
extern OBJECT * constant_TIMING_RULE; /* "__TIMING_RULE__" */
extern OBJECT * constant_ACTION_RULE; /* "__ACTION_RULE__" */
extern OBJECT * constant_JAMSHELL; /* "JAMSHELL" */
extern OBJECT * constant_TMPDIR; /* "TMPDIR" */
extern OBJECT * constant_TMPNAME; /* "TMPNAME" */
extern OBJECT * constant_TMPFILE; /* "TMPFILE" */
extern OBJECT * constant_STDOUT; /* "STDOUT" */
extern OBJECT * constant_STDERR; /* "STDERR" */
extern OBJECT * constant_JAMDATE; /* "JAMDATE" */
extern OBJECT * constant_JAM_TIMESTAMP_RESOLUTION; /* "JAM_TIMESTAMP_RESOLUTION" */
extern OBJECT * constant_JAM_VERSION; /* "JAM_VERSION" */
extern OBJECT * constant_JAMUNAME; /* "JAMUNAME" */
extern OBJECT * constant_ENVIRON; /* ".ENVIRON" */
extern OBJECT * constant_ARGV; /* "ARGV" */
extern OBJECT * constant_all; /* "all" */
extern OBJECT * constant_PARALLELISM; /* "PARALLELISM" */
extern OBJECT * constant_KEEP_GOING; /* "KEEP_GOING" */
extern OBJECT * constant_other; /* "[OTHER]" */
extern OBJECT * constant_total; /* "[TOTAL]" */
extern OBJECT * constant_FILE_DIRSCAN; /* "FILE_DIRSCAN" */
extern OBJECT * constant_MAIN; /* "MAIN" */
extern OBJECT * constant_MAIN_MAKE; /* "MAIN_MAKE" */
extern OBJECT * constant_MAKE_MAKE0; /* "MAKE_MAKE0" */
extern OBJECT * constant_MAKE_MAKE1; /* "MAKE_MAKE1" */
extern OBJECT * constant_MAKE_MAKE0SORT; /* "MAKE_MAKE0SORT" */
extern OBJECT * constant_BINDMODULE; /* "BINDMODULE" */
extern OBJECT * constant_IMPORT_MODULE; /* "IMPORT_MODULE" */
extern OBJECT * constant_BUILTIN_GLOB_BACK; /* "BUILTIN_GLOB_BACK" */
extern OBJECT * constant_timestamp; /* "timestamp" */
extern OBJECT * constant_python; /* "__python__" */
extern OBJECT * constant_python_interface; /* "python_interface" */
extern OBJECT * constant_extra_pythonpath; /* "EXTRA_PYTHONPATH" */
extern OBJECT * constant_MAIN_PYTHON; /* "MAIN_PYTHON" */
extern OBJECT * constant_FILE_ARCHIVESCAN; /* "FILE_ARCHIVESCAN" */
extern OBJECT * constant_BUILTIN_GLOB_ARCHIVE_BACK; /* "BUILTIN_GLOB_ARCHIVE_BACK" */
extern OBJECT * constant_RESPONSE_FILE_SUB; // "RESPONSE_FILE_SUB"
#endif

View File

@@ -0,0 +1,100 @@
/*
* Copyright 2002. Vladimir Prus
* Copyright 2005. Rene Rivera
* Distributed under the Boost Software License, Version 1.0.
* (See accompanying file LICENSE.txt or copy at
* https://www.bfgroup.xyz/b2/LICENSE.txt)
*/
#include "cwd.h"
#include "jam.h"
#include "mem.h"
#include "output.h"
#include "pathsys.h"
#include <assert.h>
#include <errno.h>
#include <limits.h>
/* MinGW on Windows declares PATH_MAX in limits.h */
#if defined( NT ) && !defined( __GNUC__ )
# include <direct.h>
# define PATH_MAX _MAX_PATH
#else
# include <unistd.h>
# if defined( __COMO__ )
# include <linux/limits.h>
# endif
#endif
#ifndef PATH_MAX
# define PATH_MAX 1024
#endif
static OBJECT * cwd_;
namespace
{
std::string cwd_s;
}
void cwd_init( void )
{
int buffer_size = PATH_MAX;
char * cwd_buffer = 0;
int error;
assert( !cwd_ );
do
{
char * const buffer = (char *)BJAM_MALLOC_RAW( buffer_size );
#ifdef OS_VMS
/* cwd in POSIX-format */
cwd_buffer = getcwd( buffer, buffer_size, 0 );
#else
cwd_buffer = getcwd( buffer, buffer_size );
#endif
error = errno;
if ( cwd_buffer )
{
/* We store the path using its canonical/long/key format. */
OBJECT * const cwd = object_new( cwd_buffer );
cwd_ = path_as_key( cwd );
object_free( cwd );
cwd_s = cwd_buffer;
}
buffer_size *= 2;
BJAM_FREE_RAW( buffer );
}
while ( !cwd_ && error == ERANGE );
if ( !cwd_ )
{
errno_puts( "can not get current working directory" );
exit( EXITBAD );
}
}
OBJECT * cwd( void )
{
assert( cwd_ );
return cwd_;
}
void cwd_done( void )
{
assert( cwd_ );
object_free( cwd_ );
cwd_ = NULL;
}
const std::string & b2::cwd_str()
{
return cwd_s;
}

View File

@@ -0,0 +1,42 @@
/*
* Copyright 2002. Vladimir Prus
* Distributed under the Boost Software License, Version 1.0.
* (See accompanying file LICENSE.txt or copy at
* https://www.bfgroup.xyz/b2/LICENSE.txt)
*/
/*
* cwd.h - manages the current working folder information
*/
#ifndef CWD_H
#define CWD_H
#include "config.h"
#include "object.h"
#include <string>
/* cwd() - returns the current working folder */
OBJECT * cwd( void );
namespace b2
{
const std::string & cwd_str();
}
/* cwd_init() - initialize the cwd module functionality
*
* The current working folder can not change in Boost Jam so this function
* gets the current working folder information from the OS and stores it
* internally.
*
* Expected to be called at program startup before the program's current
* working folder has been changed
*/
void cwd_init( void );
/* cwd_done() - cleans up the cwd module functionality */
void cwd_done( void );
#endif

View File

@@ -0,0 +1,72 @@
bjam (3.1.12-1) unstable; urgency=low
* New upstream release.
-- Rene Rivera <grafik@redshift-software.com> Sat, 01 Oct 2005 00:00:00 +0000
bjam (3.1.11-1) unstable; urgency=low
* New upstream release.
-- Rene Rivera <grafik@redshift-software.com> Sat, 30 Apr 2005 00:00:00 +0000
bjam (3.1.10-1) unstable; urgency=low
* New upstream release.
-- Rene Rivera <grafik@redshift-software.com> Tue, 1 Jun 2004 05:42:35 +0000
bjam (3.1.9-2) unstable; urgency=low
* Use default value of BOOST_BUILD_PATH is not is set in environment.
-- Vladimir Prus <ghost@zigzag.lvk.cs.msu.su> Wed, 17 Dec 2003 16:44:35 +0300
bjam (3.1.9-1) unstable; urgency=low
* Implement NATIVE_FILE builtin and several native rules.
-- Vladimir Prus <ghost@zigzag.lvk.cs.msu.su> Thu, 11 Dec 2003 13:15:26 +0300
bjam (3.1.8-1) unstable; urgency=low
* New upstream release.
-- Vladimir Prus <ghost@zigzag.lvk.cs.msu.su> Tue, 4 Nov 2003 20:50:43 +0300
bjam (3.1.7-1) unstable; urgency=low
* New upstream release.
-- Vladimir Prus <ghost@zigzag.lvk.cs.msu.su> Thu, 11 Sep 2003 10:45:44 +0400
bjam (3.1.6-1) unstable; urgency=low
* New upstream release.
-- Vladimir Prus <ghost@zigzag.lvk.cs.msu.su> Tue, 1 Jul 2003 09:12:18 +0400
bjam (3.1.5-1) unstable; urgency=low
* New upstream release.
-- Vladimir Prus <ghost@zigzag.lvk.cs.msu.su> Mon, 19 May 2003 14:05:13 +0400
bjam (3.1.3-2) unstable; urgency=low
* Changed Debian package to be similar to Jam's package.
-- Vladimir Prus <ghost@cs.msu.su> Thu, 10 Oct 2002 18:43:26 +0400
bjam (3.1.3-1) unstable; urgency=low
* New upstream release.
-- Vladimir Prus <ghost@zigzag.lvk.cs.msu.su> Fri, 4 Oct 2002 18:16:54 +0400
bjam (3.1.2-1) unstable; urgency=low
* Initial Release.
-- Vladimir Prus <ghost@cs.msu.su> Wed, 14 Aug 2002 14:08:00 +0400

View File

@@ -0,0 +1,16 @@
Source: bjam
Section: devel
Priority: optional
Maintainer: Vladimir Prus <ghost@cs.msu.su>
Build-Depends: debhelper (>> 3.0.0), docbook-to-man, bison
Standards-Version: 3.5.2
Package: bjam
Architecture: any
Depends: ${shlibs:Depends}
Description: Build tool
Boost.Jam is a portable build tool with its own interpreted language, which
allows to implement rather complex logic in a readable way and without
resorting to external programs. It is a descendant of Jam/MR tool modified to
suit the needs of B2. In particular, modules and rule parameters
were added, as well as several new builtins.

View File

@@ -0,0 +1,25 @@
This package was debianized by Vladimir Prus <ghost@cs.msu.su> on
Wed, 17 July 2002, 19:27:00 +0400.
Copyright:
/+\
+\ Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
\+/
This is Release 2.4 of Jam/MR, a make-like program.
License is hereby granted to use this software and distribute it
freely, as long as this copyright notice is retained and modifications
are clearly marked.
ALL WARRANTIES ARE HEREBY DISCLAIMED.
Some portions are also:
Copyright 2001-2006 David Abrahams.
Copyright 2002-2006 Rene Rivera.
Copyright 2003-2006 Vladimir Prus.
Distributed under the Boost Software License, Version 1.0.
(See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)

View File

@@ -0,0 +1,236 @@
<!doctype refentry PUBLIC "-//OASIS//DTD DocBook V4.1//EN" [
<!-- Process this file with docbook-to-man to generate an nroff manual
page: `docbook-to-man manpage.sgml > manpage.1'. You may view
the manual page with: `docbook-to-man manpage.sgml | nroff -man |
less'. A typical entry in a Makefile or Makefile.am is:
manpage.1: manpage.sgml
docbook-to-man $< > $@
-->
<!ENTITY dhfirstname "<firstname>Yann</firstname>">
<!ENTITY dhsurname "<surname>Dirson</surname>">
<!-- Please adjust the date whenever revising the manpage. -->
<!ENTITY dhdate "<date>mai 23, 2001</date>">
<!ENTITY dhemail "<email>dirson@debian.org</email>">
<!ENTITY dhusername "Yann Dirson">
<!ENTITY dhpackage "jam">
<!ENTITY debian "<productname>Debian GNU/Linux</productname>">
<!ENTITY gnu "<acronym>GNU</acronym>">
]>
<refentry>
<refentryinfo>
<address>
&dhemail;
</address>
<author>
&dhfirstname;
&dhsurname;
</author>
<copyright>
<year>2001</year>
<holder>&dhusername;</holder>
</copyright>
&dhdate;
</refentryinfo>
<refmeta>
<refentrytitle>JAM</refentrytitle>
<manvolnum>1</manvolnum>
</refmeta>
<refnamediv>
<refname>Jam/MR</refname>
<refpurpose>Make(1) Redux</refpurpose>
</refnamediv>
<refsynopsisdiv>
<cmdsynopsis>
<command>jam</command>
<arg><option>-a</option></arg>
<arg><option>-n</option></arg>
<arg><option>-v</option></arg>
<arg><option>-d <replaceable/debug/</option></arg>
<arg><option>-f <replaceable/jambase/</option></arg>
<arg><option>-j <replaceable/jobs/</option></arg>
<arg><option>-o <replaceable/actionsfile/</option></arg>
<arg><option>-s <replaceable/var/=<replaceable/value/</option></arg>
<arg><option>-t <replaceable/target/</option></arg>
<arg repeat><option><replaceable/target/</option></arg>
</cmdsynopsis>
</refsynopsisdiv>
<refsect1>
<title>DESCRIPTION</title>
<para>Jam is a program construction tool, like make(1).</para>
<para>Jam recursively builds target files from source files, using
dependency information and updating actions expressed in the
Jambase file, which is written in jam's own interpreted language.
The default Jambase is compiled into jam and provides a
boilerplate for common use, relying on a user-provide file
"Jamfile" to enumerate actual targets and sources.</para>
</refsect1>
<refsect1>
<title>OPTIONS</title>
<variablelist>
<varlistentry>
<term><option/-a/</term>
<listitem>
<para>Build all targets anyway, even if they are up-to-date.</para>
</listitem>
</varlistentry>
<varlistentry>
<term><option>-d <replaceable/n/</option></term>
<listitem>
<para>Enable cumulative debugging levels from 1 to
<replaceable/n/. Interesting values are:
<glosslist>
<glossentry><glossterm/1/ <glossdef><simpara/Show
actions (the default)/</glossdef></glossentry>
<glossentry><glossterm/2/ <glossdef><simpara/Show
"quiet" actions and display all action
text/</glossdef></glossentry>
<glossentry><glossterm/3/ <glossdef><simpara>Show
dependency analysis, and target/source
timestamps/paths</simpara></glossdef></glossentry>
<glossentry><glossterm/4/ <glossdef><simpara/Show shell
arguments/</glossdef></glossentry>
<glossentry><glossterm/5/ <glossdef><simpara/Show rule
invocations and variable
expansions/</glossdef></glossentry>
<glossentry><glossterm/6/ <glossdef><simpara>Show
directory/header file/archive
scans</simpara></glossdef></glossentry>
<glossentry><glossterm/7/ <glossdef><simpara/Show
variable settings/</glossdef></glossentry>
<glossentry><glossterm/8/ <glossdef><simpara/Show
variable fetches/</glossdef></glossentry>
<glossentry><glossterm/9/ <glossdef><simpara/Show
variable manipulation, scanner
tokens/</glossdef></glossentry>
</glosslist>
</para>
</listitem>
</varlistentry>
<varlistentry>
<term><option>-d +<replaceable/n/</option></term>
<listitem>
<para>Enable debugging level <replaceable/n/.</para>
</listitem>
</varlistentry>
<varlistentry>
<term><option/-d 0/</term>
<listitem>
<para>Turn off all debugging levels. Only errors are not
suppressed.</para>
</listitem>
</varlistentry>
<varlistentry>
<term><option>-f <replaceable/jambase/</option></term>
<listitem>
<para>Read <replaceable/jambase/ instead of using the
built-in Jambase. Only one <option/-f/ flag is permitted,
but the <replaceable/jambase/ may explicitly include other
files.</para>
</listitem>
</varlistentry>
<varlistentry>
<term><option>-j <replaceable/n/</option></term>
<listitem>
<para>Run up to <replaceable/n/ shell commands concurrently
(UNIX and NT only). The default is 1.</para>
</listitem>
</varlistentry>
<varlistentry>
<term><option/-n/</term>
<listitem>
<para>Don't actually execute the updating actions, but do
everything else. This changes the debug level default to
<option/-d2/.</para>
</listitem>
</varlistentry>
<varlistentry>
<term><option>-o <replaceable/file/</option></term>
<listitem>
<para>Write the updating actions to the specified file
instead of running them (or outputting them, as on the
Mac).</para>
</listitem>
</varlistentry>
<varlistentry>
<term><option>-s <replaceable/var/=<replaceable/value/</option></term>
<listitem>
<para>Set the variable <replaceable/var/ to
<replaceable/value/, overriding both internal variables and
variables imported from the environment. </para>
</listitem>
</varlistentry>
<varlistentry>
<term><option>-t <replaceable/target/</option></term>
<listitem>
<para>Rebuild <replaceable/target/ and everything that
depends on it, even if it is up-to-date.</para>
</listitem>
</varlistentry>
<varlistentry>
<term><option/-v/</term>
<listitem>
<para>Print the version of jam and exit.</para>
</listitem>
</varlistentry>
</variablelist>
</refsect1>
<refsect1>
<title>SEE ALSO</title>
<para>Jam is documented fully in HTML pages available on Debian
systems from
<filename>/usr/share/doc/jam/Jam.html</filename>.</para>
</refsect1>
<refsect1>
<title>AUTHOR</title>
<para>This manual page was created by &dhusername; &dhemail; from
the <filename/Jam.html/ documentation, for the &debian; system
(but may be used by others).</para>
</refsect1>
</refentry>
<!-- Keep this comment at the end of the file
Local variables:
sgml-omittag:t
sgml-shorttag:t
End:
-->

View File

@@ -0,0 +1,73 @@
#!/usr/bin/make -f
# Sample debian/rules that uses debhelper.
# GNU copyright 1997 to 1999 by Joey Hess.
# GNU copyright 2001 by Yann Dirson.
# This is the debian/rules file for packages jam and ftjam
# It should be usable with both packages without any change
# Uncomment this to turn on verbose mode.
#export DH_VERBOSE=1
# This is the debhelper compatibility version to use.
export DH_COMPAT=3
topdir=$(shell pwd)
jam=bjam
binname=bjam
build: build-stamp
build-stamp: debian/jam.1
dh_testdir
./build.sh
touch build-stamp
%.1: %.man.sgml
/usr/bin/docbook-to-man $< > $@
clean:
dh_testdir
dh_testroot
rm -f build-stamp
rm -rf bin.*
rm -f jam0 debian/jam.1
dh_clean
install: build
dh_testdir
dh_testroot
dh_clean -k
dh_installdirs
install -d ${topdir}/debian/${jam}/usr/bin
install -m755 bin.linuxx86/bjam ${topdir}/debian/${jam}/usr/bin/
install -d ${topdir}/debian/${jam}/usr/share/man/man1/
install -m644 debian/jam.1 ${topdir}/debian/${jam}/usr/share/man/man1/${binname}.1
# Build architecture-independent files here.
binary-indep: build install
# We have nothing to do by default.
# Build architecture-dependent files here.
binary-arch: build install
dh_testdir
dh_testroot
dh_installdocs README RELNOTES Jambase *.html
# dh_installemacsen
# dh_undocumented
dh_installchangelogs
dh_strip
dh_compress
dh_fixperms
dh_installdeb
dh_shlibdeps
dh_gencontrol
dh_md5sums
dh_builddeb
binary: binary-indep binary-arch
.PHONY: build clean binary-indep binary-arch binary install configure

View File

@@ -0,0 +1,158 @@
/*
* Copyright 2005, 2016. Rene Rivera
* Distributed under the Boost Software License, Version 1.0.
* (See accompanying file LICENSE.txt or copy at
* https://www.bfgroup.xyz/b2/LICENSE.txt)
*/
#include "jam.h"
#include "debug.h"
#include "output.h"
#include "hash.h"
#include <time.h>
static profile_frame * profile_stack = 0;
static struct hash * profile_hash = 0;
static profile_info profile_other = { 0 };
static profile_info profile_total = { 0 };
profile_frame * profile_init( OBJECT * rulename, profile_frame * frame )
{
if ( DEBUG_PROFILE ) profile_enter( rulename, frame );
return frame;
}
void profile_enter( OBJECT * rulename, profile_frame * frame )
{
if ( DEBUG_PROFILE )
{
double start = profile_clock();
profile_info * p;
if ( !profile_hash && rulename )
profile_hash = hashinit( sizeof( profile_info ), "profile" );
if ( rulename )
{
int found;
p = (profile_info *)hash_insert( profile_hash, rulename, &found );
if ( !found )
{
p->name = rulename;
p->cumulative = 0;
p->net = 0;
p->num_entries = 0;
p->stack_count = 0;
p->memory = 0;
}
}
else
{
p = &profile_other;
}
p->num_entries += 1;
p->stack_count += 1;
frame->info = p;
frame->caller = profile_stack;
profile_stack = frame;
frame->entry_time = profile_clock();
frame->overhead = 0;
frame->subrules = 0;
/* caller pays for the time it takes to play with the hash table */
if ( frame->caller )
frame->caller->overhead += frame->entry_time - start;
}
}
void profile_memory( size_t mem )
{
if ( DEBUG_PROFILE )
if ( profile_stack && profile_stack->info )
profile_stack->info->memory += ((double)mem) / 1024;
}
void profile_exit( profile_frame * frame )
{
if ( DEBUG_PROFILE )
{
/* Cumulative time for this call. */
double t = profile_clock() - frame->entry_time - frame->overhead;
/* If this rule is already present on the stack, do not add the time for
* this instance.
*/
if ( frame->info->stack_count == 1 )
frame->info->cumulative += t;
/* Net time does not depend on presence of the same rule in call stack.
*/
frame->info->net += t - frame->subrules;
if ( frame->caller )
{
/* Caller's cumulative time must account for this overhead. */
frame->caller->overhead += frame->overhead;
frame->caller->subrules += t;
}
/* Pop this stack frame. */
--frame->info->stack_count;
profile_stack = frame->caller;
}
}
static void dump_profile_entry( void * p_, void * ignored )
{
profile_info * p = (profile_info *)p_;
double mem_each = ( p->memory / ( p->num_entries ? p->num_entries : 1
) );
double q = p->net;
if (p->num_entries) q /= p->num_entries;
if ( !ignored )
{
profile_total.cumulative += p->net;
profile_total.memory += p->memory;
}
out_printf( "%10ld %12.6f %12.6f %12.8f %10.2f %10.2f %s\n", p->num_entries,
p->cumulative, p->net, q, p->memory, mem_each, object_str( p->name ) );
}
void profile_dump()
{
if ( profile_hash )
{
out_printf( "%10s %12s %12s %12s %10s %10s %s\n", "--count--", "--gross--",
"--net--", "--each--", "--mem--", "--each--", "--name--" );
hashenumerate( profile_hash, dump_profile_entry, 0 );
profile_other.name = constant_other;
dump_profile_entry( &profile_other, 0 );
profile_total.name = constant_total;
dump_profile_entry( &profile_total, (void *)1 );
}
}
double profile_clock()
{
return ((double) clock()) / CLOCKS_PER_SEC;
}
OBJECT * profile_make_local( char const * scope )
{
if ( DEBUG_PROFILE )
{
return object_new( scope );
}
else
{
return 0;
}
}

View File

@@ -0,0 +1,63 @@
/*
* Copyright 2005, 2016. Rene Rivera
* Distributed under the Boost Software License, Version 1.0.
* (See accompanying file LICENSE.txt or copy at
* https://www.bfgroup.xyz/b2/LICENSE.txt)
*/
#ifndef BJAM_DEBUG_H
#define BJAM_DEBUG_H
#include "config.h"
#include "constants.h"
#include "object.h"
typedef struct profile_info
{
/* name of rule being called */
OBJECT * name;
/* cumulative time spent in rule, in seconds */
double cumulative;
/* time spent in rule proper, in seconds */
double net;
/* number of time rule was entered */
unsigned long num_entries;
/* number of the times this function is present in stack */
unsigned long stack_count;
/* memory allocated by the call, in KiB */
double memory;
} profile_info;
typedef struct profile_frame
{
/* permanent storage where data accumulates */
profile_info * info;
/* overhead for profiling in this call */
double overhead;
/* time of last entry to rule */
double entry_time;
/* stack frame of caller */
struct profile_frame * caller;
/* time spent in subrules */
double subrules;
} profile_frame;
profile_frame * profile_init( OBJECT * rulename, profile_frame * );
void profile_enter( OBJECT * rulename, profile_frame * );
void profile_memory( size_t mem );
void profile_exit( profile_frame * );
void profile_dump();
double profile_clock();
#define PROFILE_ENTER( scope ) profile_frame PROF_ ## scope, *PROF_ ## scope ## _p = profile_init( constant_ ## scope, &PROF_ ## scope )
#define PROFILE_EXIT( scope ) profile_exit( PROF_ ## scope ## _p )
OBJECT * profile_make_local( char const * );
#define PROFILE_ENTER_LOCAL( scope ) \
static OBJECT * constant_LOCAL_##scope = 0; \
if (DEBUG_PROFILE && !constant_LOCAL_##scope) constant_LOCAL_##scope = profile_make_local( #scope ); \
PROFILE_ENTER( LOCAL_##scope )
#define PROFILE_EXIT_LOCAL( scope ) PROFILE_EXIT( LOCAL_##scope )
#endif

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,64 @@
/*
* Copyright 2015 Steven Watanabe
* Distributed under the Boost Software License, Version 1.0.
* (See accompanying file LICENSE.txt or copy at
* https://www.bfgroup.xyz/b2/LICENSE.txt)
*/
#ifndef DEBUGGER_SW20150314_H
#define DEBUGGER_SW20150314_H
#include "config.h"
#include <setjmp.h>
#include "object.h"
#include "frames.h"
#ifdef JAM_DEBUGGER
void debug_on_instruction( FRAME * frame, OBJECT * file, int line );
void debug_on_enter_function( FRAME * frame, OBJECT * name, OBJECT * file, int line );
void debug_on_exit_function( OBJECT * name );
int debugger( void );
struct debug_child_data_t
{
int argc;
const char * * argv;
jmp_buf jmp;
};
extern struct debug_child_data_t debug_child_data;
extern LIST * debug_print_result;
extern const char debugger_opt[];
extern int debug_interface;
#define DEBUG_INTERFACE_CONSOLE 1
#define DEBUG_INTERFACE_MI 2
#define DEBUG_INTERFACE_CHILD 3
#define debug_is_debugging() ( debug_interface != 0 )
#define debug_on_enter_function( frame, name, file, line ) \
( debug_is_debugging()? \
debug_on_enter_function( frame, name, file, line ) : \
(void)0 )
#define debug_on_exit_function( name ) \
( debug_is_debugging()? \
debug_on_exit_function( name ) : \
(void)0 )
#if NT
void debug_init_handles( const char * in, const char * out );
#endif
#else
#define debug_on_instruction( frame, file, line ) ( ( void )0 )
#define debug_on_enter_function( frame, name, file, line ) ( ( void )0 )
#define debug_on_exit_function( name ) ( ( void )0 )
#define debug_is_debugging() ( 0 )
#endif
#endif

View File

@@ -0,0 +1,122 @@
/*
* Copyright 1993, 1995 Christopher Seiwald.
* Copyright 2007 Noel Belcourt.
*
* Utility functions shared between different exec*.c platform specific
* implementation modules.
*
* This file is part of Jam - see jam.c for Copyright information.
*/
#include "jam.h"
#include "execcmd.h"
#include "output.h"
#include <assert.h>
#include <stdio.h>
#include <string.h>
/* Internal interrupt counter. */
static int intr;
/* Constructs a list of command-line elements using the format specified by the
* given shell list.
*
* Given argv array should have at least MAXARGC + 1 elements.
* Slot numbers may be between 0 and 998 (inclusive).
*
* Constructed argv list will be zero terminated. Character arrays referenced by
* the argv structure elements will be either elements from the give shell list,
* internal static buffers or the given command string and should thus not
* considered owned by or released via the argv structure and should be
* considered invalidated by the next argv_from_shell() call.
*
* Shell list elements:
* - Starting with '%' - represent the command string.
* - Starting with '!' - represent the slot number (increased by one).
* - Anything else - used as a literal.
* - If no '%' element is found, the command string is appended as an extra.
*/
void argv_from_shell( char const * * argv, LIST * shell, char const * command,
int32_t const slot )
{
static char jobno[ 12 ];
int i;
int gotpercent = 0;
LISTITER iter = list_begin( shell );
LISTITER end = list_end( shell );
assert( 0 <= slot );
assert( slot < 999 );
sprintf( jobno, "%d", slot + 1 );
for ( i = 0; iter != end && i < MAXARGC; ++i, iter = list_next( iter ) )
{
switch ( object_str( list_item( iter ) )[ 0 ] )
{
case '%': argv[ i ] = command; ++gotpercent; break;
case '!': argv[ i ] = jobno; break;
default : argv[ i ] = object_str( list_item( iter ) );
}
}
if ( !gotpercent )
argv[ i++ ] = command;
argv[ i ] = NULL;
}
/* Returns whether the given command string contains lines longer than the given
* maximum.
*/
int check_cmd_for_too_long_lines( char const * command, int32_t max,
int32_t * const error_length, int32_t * const error_max_length )
{
while ( *command )
{
int32_t const l = int32_t(strcspn( command, "\n" ));
if ( l > max )
{
*error_length = l;
*error_max_length = max;
return EXEC_CHECK_LINE_TOO_LONG;
}
command += l;
if ( *command )
++command;
}
return EXEC_CHECK_OK;
}
/* Checks whether the given shell list is actually a request to execute raw
* commands without an external shell.
*/
int is_raw_command_request( LIST * shell )
{
return !list_empty( shell ) &&
!strcmp( object_str( list_front( shell ) ), "%" ) &&
list_next( list_begin( shell ) ) == list_end( shell );
}
/* Returns whether an interrupt has been detected so far. */
int interrupted( void )
{
return intr != 0;
}
/* Internal interrupt handler. */
void onintr( int disp )
{
++intr;
out_printf( "...interrupted\n" );
}

View File

@@ -0,0 +1,119 @@
/*
* Copyright 1993, 1995 Christopher Seiwald.
*
* This file is part of Jam - see jam.c for Copyright information.
*/
/*
* execcmd.h - execute a shell script.
*
* Defines the interface to be implemented in platform specific implementation
* modules as well as different shared utility functions prepared in the
* execcmd.c module.
*/
#ifndef EXECCMD_H
#define EXECCMD_H
#include "config.h"
#include "lists.h"
#include "jam_strings.h"
#include "timestamp.h"
typedef struct timing_info
{
double system;
double user;
timestamp start;
timestamp end;
} timing_info;
typedef void (* ExecCmdCallback)
(
void * const closure,
int const status,
timing_info const * const,
char const * const cmd_stdout,
char const * const cmd_stderr,
int const cmd_exit_reason
);
/* Global initialization. Must be called after setting
* globs.jobs. May be called multiple times. */
void exec_init( void );
/* Global cleanup */
void exec_done( void );
/* Status codes passed to ExecCmdCallback routines. */
#define EXEC_CMD_OK 0
#define EXEC_CMD_FAIL 1
#define EXEC_CMD_INTR 2
int exec_check
(
string const * command,
LIST * * pShell,
int32_t * error_length,
int32_t * error_max_length
);
/* exec_check() return codes. */
#define EXEC_CHECK_OK 101
#define EXEC_CHECK_NOOP 102
#define EXEC_CHECK_LINE_TOO_LONG 103
#define EXEC_CHECK_TOO_LONG 104
/* Prevents action output from being written
* immediately to stdout/stderr.
*/
#define EXEC_CMD_QUIET 1
void exec_cmd
(
string const * command,
int flags,
ExecCmdCallback func,
void * closure,
LIST * shell
);
void exec_wait();
/******************************************************************************
* *
* Utility functions defined in the execcmd.c module. *
* *
******************************************************************************/
/* Constructs a list of command-line elements using the format specified by the
* given shell list.
*/
void argv_from_shell( char const * * argv, LIST * shell, char const * command,
int32_t const slot );
/* Interrupt routine bumping the internal interrupt counter. Needs to be
* registered by platform specific exec*.c modules.
*/
void onintr( int disp );
/* Returns whether an interrupt has been detected so far. */
int interrupted( void );
/* Checks whether the given shell list is actually a request to execute raw
* commands without an external shell.
*/
int is_raw_command_request( LIST * shell );
/* Utility worker for exec_check() checking whether all the given command lines
* are under the specified length limit.
*/
int check_cmd_for_too_long_lines( char const * command, int32_t max,
int32_t * const error_length, int32_t * const error_max_length );
/* Maximum shell command line length.
*/
int32_t shell_maxline();
#endif

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,612 @@
/*
* Copyright 1993, 1995 Christopher Seiwald.
* Copyright 2007 Noel Belcourt.
*
* This file is part of Jam - see jam.c for Copyright information.
*/
#include "jam.h"
#ifdef USE_EXECUNIX
#include "execcmd.h"
#include "lists.h"
#include "output.h"
#include "jam_strings.h"
#include <errno.h>
#include <signal.h>
#include <stdio.h>
#include <time.h>
#include <unistd.h> /* vfork(), _exit(), STDOUT_FILENO and such */
#include <sys/resource.h>
#include <sys/times.h>
#include <sys/wait.h>
#include <poll.h>
#if defined(sun) || defined(__sun)
#include <wait.h>
#endif
#include <sys/times.h>
#if defined(__APPLE__)
#define NO_VFORK
#endif
#ifdef NO_VFORK
#define vfork() fork()
#endif
/*
* execunix.c - execute a shell script on UNIX/OS2/AmigaOS
*
* If $(JAMSHELL) is defined, uses that to formulate execvp()/spawnvp(). The
* default is: /bin/sh -c
*
* In $(JAMSHELL), % expands to the command string and ! expands to the slot
* number (starting at 1) for multiprocess (-j) invocations. If $(JAMSHELL) does
* not include a %, it is tacked on as the last argument.
*
* Each word must be an individual element in a jam variable value.
*
* Do not just set JAMSHELL to /bin/sh - it will not work!
*
* External routines:
* exec_check() - preprocess and validate the command.
* exec_cmd() - launch an async command execution.
* exec_wait() - wait for any of the async command processes to terminate.
*/
/* find a free slot in the running commands table */
static int get_free_cmdtab_slot();
/* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */
static clock_t tps;
/* We hold stdout & stderr child process information in two element arrays
* indexed as follows.
*/
#define OUT 0
#define ERR 1
static struct cmdtab_t
{
int pid; /* on win32, a real process handle */
int fd[ 2 ]; /* file descriptors for stdout and stderr */
FILE * stream[ 2 ]; /* child's stdout and stderr file streams */
clock_t start_time; /* start time of child process */
int exit_reason; /* termination status */
char * buffer[ 2 ]; /* buffers to hold stdout and stderr, if any */
int buf_size[ 2 ]; /* buffer sizes in bytes */
timestamp start_dt; /* start of command timestamp */
int flags;
/* Function called when the command completes. */
ExecCmdCallback func;
/* Opaque data passed back to the 'func' callback. */
void * closure;
} * cmdtab = NULL;
static int cmdtab_size = 0;
/* Contains both stdin and stdout of all processes.
* The length is either globs.jobs or globs.jobs * 2
* depending on globs.pipe_action.
*/
struct pollfd * wait_fds = NULL;
#define WAIT_FDS_SIZE ( globs.jobs * ( globs.pipe_action ? 2 : 1 ) )
#define GET_WAIT_FD( job_idx ) ( wait_fds + ( ( job_idx * ( globs.pipe_action ? 2 : 1 ) ) ) )
/*
* exec_init() - global initialization
*/
void exec_init( void )
{
int i;
if ( globs.jobs > cmdtab_size )
{
cmdtab = (cmdtab_t*)BJAM_REALLOC( cmdtab, globs.jobs * sizeof( *cmdtab ) );
memset( cmdtab + cmdtab_size, 0, ( globs.jobs - cmdtab_size ) * sizeof( *cmdtab ) );
wait_fds = (pollfd*)BJAM_REALLOC( wait_fds, WAIT_FDS_SIZE * sizeof ( *wait_fds ) );
for ( i = cmdtab_size; i < globs.jobs; ++i )
{
GET_WAIT_FD( i )[ OUT ].fd = -1;
GET_WAIT_FD( i )[ OUT ].events = POLLIN;
if ( globs.pipe_action )
{
GET_WAIT_FD( i )[ ERR ].fd = -1;
GET_WAIT_FD( i )[ ERR ].events = POLLIN;
}
}
cmdtab_size = globs.jobs;
}
}
void exec_done( void )
{
BJAM_FREE( cmdtab );
BJAM_FREE( wait_fds );
}
/*
* exec_check() - preprocess and validate the command.
*/
int exec_check
(
string const * command,
LIST * * pShell,
int32_t * error_length,
int32_t * error_max_length
)
{
int const is_raw_cmd = is_raw_command_request( *pShell );
/* We allow empty commands for non-default shells since we do not really
* know what they are going to do with such commands.
*/
if ( !command->size && ( is_raw_cmd || list_empty( *pShell ) ) )
return EXEC_CHECK_NOOP;
return is_raw_cmd
? EXEC_CHECK_OK
: check_cmd_for_too_long_lines( command->value, shell_maxline(), error_length,
error_max_length );
}
/*
* exec_cmd() - launch an async command execution.
*/
/* We hold file descriptors for pipes used to communicate with child processes
* in two element arrays indexed as follows.
*/
#define EXECCMD_PIPE_READ 0
#define EXECCMD_PIPE_WRITE 1
void exec_cmd
(
string const * command,
int flags,
ExecCmdCallback func,
void * closure,
LIST * shell
)
{
struct sigaction ignore, saveintr, savequit;
sigset_t chldmask, savemask;
int const slot = get_free_cmdtab_slot();
int out[ 2 ];
int err[ 2 ];
char const * argv[ MAXARGC + 1 ]; /* +1 for NULL */
/* Initialize default shell. */
static LIST * default_shell;
if ( !default_shell )
default_shell = list_push_back( list_new(
object_new( "/bin/sh" ) ),
object_new( "-c" ) );
if ( list_empty( shell ) )
shell = default_shell;
/* Forumulate argv. If shell was defined, be prepared for % and ! subs.
* Otherwise, use stock /bin/sh.
*/
argv_from_shell( argv, shell, command->value, slot );
if ( DEBUG_EXECCMD )
{
int i;
out_printf( "Using shell: " );
list_print( shell );
out_printf( "\n" );
for ( i = 0; argv[ i ]; ++i )
out_printf( " argv[%d] = '%s'\n", i, argv[ i ] );
}
/* Create pipes for collecting child output. */
if ( pipe( out ) < 0 || ( globs.pipe_action && pipe( err ) < 0 ) )
{
errno_puts( "pipe" );
exit( EXITBAD );
}
/* Start the command */
timestamp_current( &cmdtab[ slot ].start_dt );
if ( 0 < globs.timeout )
{
/* Handle hung processes by manually tracking elapsed time and signal
* process when time limit expires.
*/
struct tms buf;
cmdtab[ slot ].start_time = times( &buf );
/* Make a global, only do this once. */
if ( !tps ) tps = sysconf( _SC_CLK_TCK );
}
/* Child does not need the read pipe ends used by the parent. */
fcntl( out[ EXECCMD_PIPE_READ ], F_SETFD, FD_CLOEXEC );
if ( globs.pipe_action )
fcntl( err[ EXECCMD_PIPE_READ ], F_SETFD, FD_CLOEXEC );
/* ignore SIGINT and SIGQUIT */
ignore.sa_handler = SIG_IGN;
sigemptyset(&ignore.sa_mask);
ignore.sa_flags = 0;
if (sigaction(SIGINT, &ignore, &saveintr) < 0)
return;
if (sigaction(SIGQUIT, &ignore, &savequit) < 0)
return;
/* block SIGCHLD */
sigemptyset(&chldmask);
sigaddset(&chldmask, SIGCHLD);
if (sigprocmask(SIG_BLOCK, &chldmask, &savemask) < 0)
return;
if ( ( cmdtab[ slot ].pid = vfork() ) == -1 )
{
errno_puts( "vfork" );
exit( EXITBAD );
}
if ( cmdtab[ slot ].pid == 0 )
{
/*****************/
/* Child process */
/*****************/
int const pid = getpid();
/* restore previous signals */
sigaction(SIGINT, &saveintr, NULL);
sigaction(SIGQUIT, &savequit, NULL);
sigprocmask(SIG_SETMASK, &savemask, NULL);
/* Redirect stdout and stderr to pipes inherited from the parent. */
dup2( out[ EXECCMD_PIPE_WRITE ], STDOUT_FILENO );
dup2( globs.pipe_action ? err[ EXECCMD_PIPE_WRITE ] :
out[ EXECCMD_PIPE_WRITE ], STDERR_FILENO );
close( out[ EXECCMD_PIPE_WRITE ] );
if ( globs.pipe_action )
close( err[ EXECCMD_PIPE_WRITE ] );
/* Make this process a process group leader so that when we kill it, all
* child processes of this process are terminated as well. We use
* killpg( pid, SIGKILL ) to kill the process group leader and all its
* children.
*/
if ( 0 < globs.timeout )
{
struct rlimit r_limit;
r_limit.rlim_cur = globs.timeout;
r_limit.rlim_max = globs.timeout;
setrlimit( RLIMIT_CPU, &r_limit );
}
if (0 != setpgid( pid, pid )) {
errno_puts("setpgid(child)");
/* exit( EXITBAD ); */
}
execvp( argv[ 0 ], (char * *)argv );
errno_puts( "execvp" );
_exit( 127 );
}
/******************/
/* Parent process */
/******************/
/* redundant call, ignore return value */
setpgid(cmdtab[ slot ].pid, cmdtab[ slot ].pid);
/* Parent not need the write pipe ends used by the child. */
close( out[ EXECCMD_PIPE_WRITE ] );
if ( globs.pipe_action )
close( err[ EXECCMD_PIPE_WRITE ] );
/* Set both pipe read file descriptors to non-blocking. */
fcntl( out[ EXECCMD_PIPE_READ ], F_SETFL, O_NONBLOCK );
if ( globs.pipe_action )
fcntl( err[ EXECCMD_PIPE_READ ], F_SETFL, O_NONBLOCK );
/* Parent reads from out[ EXECCMD_PIPE_READ ]. */
cmdtab[ slot ].fd[ OUT ] = out[ EXECCMD_PIPE_READ ];
cmdtab[ slot ].stream[ OUT ] = fdopen( cmdtab[ slot ].fd[ OUT ], "rb" );
if ( !cmdtab[ slot ].stream[ OUT ] )
{
errno_puts( "fdopen" );
exit( EXITBAD );
}
/* Parent reads from err[ EXECCMD_PIPE_READ ]. */
if ( globs.pipe_action )
{
cmdtab[ slot ].fd[ ERR ] = err[ EXECCMD_PIPE_READ ];
cmdtab[ slot ].stream[ ERR ] = fdopen( cmdtab[ slot ].fd[ ERR ], "rb" );
if ( !cmdtab[ slot ].stream[ ERR ] )
{
errno_puts( "fdopen" );
exit( EXITBAD );
}
}
GET_WAIT_FD( slot )[ OUT ].fd = out[ EXECCMD_PIPE_READ ];
if ( globs.pipe_action )
GET_WAIT_FD( slot )[ ERR ].fd = err[ EXECCMD_PIPE_READ ];
cmdtab[ slot ].flags = flags;
/* Save input data into the selected running commands table slot. */
cmdtab[ slot ].func = func;
cmdtab[ slot ].closure = closure;
/* restore previous signals */
sigaction(SIGINT, &saveintr, NULL);
sigaction(SIGQUIT, &savequit, NULL);
sigprocmask(SIG_SETMASK, &savemask, NULL);
}
#undef EXECCMD_PIPE_READ
#undef EXECCMD_PIPE_WRITE
/* Returns 1 if file descriptor is closed, or 0 if it is still alive.
*
* i is index into cmdtab
*
* s (stream) indexes:
* - cmdtab[ i ].stream[ s ]
* - cmdtab[ i ].buffer[ s ]
* - cmdtab[ i ].fd [ s ]
*/
static int read_descriptor( int i, int s )
{
int ret;
char buffer[ BUFSIZ ];
while ( 0 < ( ret = fread( buffer, sizeof( char ), BUFSIZ - 1,
cmdtab[ i ].stream[ s ] ) ) )
{
buffer[ ret ] = 0;
/* Copy it to our output if appropriate */
if ( ! ( cmdtab[ i ].flags & EXEC_CMD_QUIET ) )
{
if ( s == OUT && ( globs.pipe_action != 2 ) )
out_data( buffer );
else if ( s == ERR && ( globs.pipe_action & 2 ) )
err_data( buffer );
}
if ( !cmdtab[ i ].buffer[ s ] )
{
/* Never been allocated. */
if ( globs.max_buf && ret > globs.max_buf )
{
ret = globs.max_buf;
buffer[ ret ] = 0;
}
cmdtab[ i ].buf_size[ s ] = ret + 1;
cmdtab[ i ].buffer[ s ] = (char*)BJAM_MALLOC_ATOMIC( ret + 1 );
memcpy( cmdtab[ i ].buffer[ s ], buffer, ret + 1 );
}
else
{
/* Previously allocated. */
if ( cmdtab[ i ].buf_size[ s ] < globs.max_buf || !globs.max_buf )
{
char * tmp = cmdtab[ i ].buffer[ s ];
int const old_len = cmdtab[ i ].buf_size[ s ] - 1;
int const new_len = old_len + ret + 1;
cmdtab[ i ].buf_size[ s ] = new_len;
cmdtab[ i ].buffer[ s ] = (char*)BJAM_MALLOC_ATOMIC( new_len );
memcpy( cmdtab[ i ].buffer[ s ], tmp, old_len );
memcpy( cmdtab[ i ].buffer[ s ] + old_len, buffer, ret + 1 );
BJAM_FREE( tmp );
}
}
}
/* If buffer full, ensure last buffer char is newline so that jam log
* contains the command status at beginning of it own line instead of
* appended to end of the previous output.
*/
if ( globs.max_buf && globs.max_buf <= cmdtab[ i ].buf_size[ s ] )
cmdtab[ i ].buffer[ s ][ cmdtab[ i ].buf_size[ s ] - 2 ] = '\n';
return feof( cmdtab[ i ].stream[ s ] );
}
/*
* close_streams() - Close the stream and pipe descriptor.
*/
static void close_streams( int const i, int const s )
{
fclose( cmdtab[ i ].stream[ s ] );
cmdtab[ i ].stream[ s ] = 0;
close( cmdtab[ i ].fd[ s ] );
cmdtab[ i ].fd[ s ] = 0;
GET_WAIT_FD( i )[ s ].fd = -1;
}
/*
* exec_wait() - wait for any of the async command processes to terminate.
*
* May register more than one terminated child process but will exit as soon as
* at least one has been registered.
*/
void exec_wait()
{
int finished = 0;
/* Process children that signaled. */
while ( !finished )
{
int i;
int select_timeout = globs.timeout;
/* Check for timeouts:
* - kill children that already timed out
* - decide how long until the next one times out
*/
if ( globs.timeout > 0 )
{
struct tms buf;
clock_t const current = times( &buf );
for ( i = 0; i < globs.jobs; ++i )
if ( cmdtab[ i ].pid )
{
clock_t const consumed =
( current - cmdtab[ i ].start_time ) / tps;
if ( consumed >= globs.timeout )
{
killpg( cmdtab[ i ].pid, SIGKILL );
cmdtab[ i ].exit_reason = EXIT_TIMEOUT;
}
else if ( globs.timeout - consumed < select_timeout )
select_timeout = globs.timeout - consumed;
}
}
/* select() will wait for I/O on a descriptor, a signal, or timeout. */
{
/* disable child termination signals while in select */
int ret;
int timeout;
sigset_t sigmask;
sigemptyset(&sigmask);
sigaddset(&sigmask, SIGCHLD);
sigprocmask(SIG_BLOCK, &sigmask, NULL);
/* If no timeout is specified, pass -1 (which means no timeout,
* wait indefinitely) to poll, to prevent busy-looping.
*/
timeout = select_timeout? select_timeout * 1000 : -1;
while ( ( ret = poll( wait_fds, WAIT_FDS_SIZE, timeout ) ) == -1 )
if ( errno != EINTR )
break;
/* restore original signal mask by unblocking sigchld */
sigprocmask(SIG_UNBLOCK, &sigmask, NULL);
if ( ret <= 0 )
continue;
}
for ( i = 0; i < globs.jobs; ++i )
{
int out_done = 0;
int err_done = 0;
if ( GET_WAIT_FD( i )[ OUT ].revents )
out_done = read_descriptor( i, OUT );
if ( globs.pipe_action && ( GET_WAIT_FD( i )[ ERR ].revents ) )
err_done = read_descriptor( i, ERR );
/* If feof on either descriptor, we are done. */
if ( out_done || err_done )
{
int pid;
int status;
int rstat;
timing_info time_info;
struct rusage cmd_usage;
/* We found a terminated child process - our search is done. */
finished = 1;
/* Close the stream and pipe descriptors. */
close_streams( i, OUT );
if ( globs.pipe_action )
close_streams( i, ERR );
/* Reap the child and release resources. */
while ( ( pid = wait4( cmdtab[ i ].pid, &status, 0, &cmd_usage ) ) == -1 )
if ( errno != EINTR )
break;
if ( pid != cmdtab[ i ].pid )
{
err_printf( "unknown pid %d with errno = %d\n", pid, errno );
exit( EXITBAD );
}
/* Set reason for exit if not timed out. */
if ( WIFEXITED( status ) )
cmdtab[ i ].exit_reason = WEXITSTATUS( status )
? EXIT_FAIL
: EXIT_OK;
{
time_info.system = ((double)(cmd_usage.ru_stime.tv_sec)*1000000.0+(double)(cmd_usage.ru_stime.tv_usec))/1000000.0;
time_info.user = ((double)(cmd_usage.ru_utime.tv_sec)*1000000.0+(double)(cmd_usage.ru_utime.tv_usec))/1000000.0;
timestamp_copy( &time_info.start, &cmdtab[ i ].start_dt );
timestamp_current( &time_info.end );
}
/* Drive the completion. */
if ( interrupted() )
rstat = EXEC_CMD_INTR;
else if ( status )
rstat = EXEC_CMD_FAIL;
else
rstat = EXEC_CMD_OK;
/* Call the callback, may call back to jam rule land. */
(*cmdtab[ i ].func)( cmdtab[ i ].closure, rstat, &time_info,
cmdtab[ i ].buffer[ OUT ], cmdtab[ i ].buffer[ ERR ],
cmdtab[ i ].exit_reason );
/* Clean up the command's running commands table slot. */
BJAM_FREE( cmdtab[ i ].buffer[ OUT ] );
cmdtab[ i ].buffer[ OUT ] = 0;
cmdtab[ i ].buf_size[ OUT ] = 0;
BJAM_FREE( cmdtab[ i ].buffer[ ERR ] );
cmdtab[ i ].buffer[ ERR ] = 0;
cmdtab[ i ].buf_size[ ERR ] = 0;
cmdtab[ i ].pid = 0;
cmdtab[ i ].func = 0;
cmdtab[ i ].closure = 0;
cmdtab[ i ].start_time = 0;
}
}
}
}
/*
* Find a free slot in the running commands table.
*/
static int get_free_cmdtab_slot()
{
int slot;
for ( slot = 0; slot < globs.jobs; ++slot )
if ( !cmdtab[ slot ].pid )
return slot;
err_printf( "no slots for child!\n" );
exit( EXITBAD );
}
int32_t shell_maxline()
{
return MAXLINE;
}
# endif /* USE_EXECUNIX */

View File

@@ -0,0 +1,425 @@
/*
* Copyright 1993, 1995 Christopher Seiwald.
*
* This file is part of Jam - see jam.c for Copyright information.
*/
/* This file is ALSO:
* Copyright 2001-2004 David Abrahams.
* Copyright 2015 Artur Shepilko.
* Distributed under the Boost Software License, Version 1.0.
* (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt)
*/
/*
* execvms.c - execute a shell script, ala VMS.
*
* The approach is this:
*
* If the command is a single line, and shorter than WRTLEN (what we believe to
* be the maximum line length), we just system() it.
*
* If the command is multi-line, or longer than WRTLEN, we write the command
* block to a temp file, splitting long lines (using "-" at the end of the line
* to indicate contiuation), and then source that temp file. We use special
* logic to make sure we do not continue in the middle of a quoted string.
*
* 05/04/94 (seiwald) - async multiprocess interface; noop on VMS
* 12/20/96 (seiwald) - rewritten to handle multi-line commands well
* 01/14/96 (seiwald) - do not put -'s between "'s
* 01/19/15 (shepilko)- adapt for jam-3.1.19
*/
#include "jam.h"
#include "lists.h"
#include "execcmd.h"
#include "output.h"
#ifdef OS_VMS
#include <stdio.h>
#include <string.h>
#include <stdlib.h>
#include <ctype.h>
#include <times.h>
#include <unistd.h>
#include <errno.h>
#define WRTLEN 240
#define MIN( a, b ) ((a) < (b) ? (a) : (b))
#define CHAR_DQUOTE '"'
#define VMS_PATH_MAX 1024
#define VMS_COMMAND_MAX 1024
#define VMS_WARNING 0
#define VMS_SUCCESS 1
#define VMS_ERROR 2
#define VMS_FATAL 4
char commandbuf[ VMS_COMMAND_MAX ] = { 0 };
static int get_status(int vms_status);
static clock_t get_cpu_time();
/*
* exec_check() - preprocess and validate the command.
*/
int exec_check
(
string const * command,
LIST * * pShell,
int32_t * error_length,
int32_t * error_max_length
)
{
int const is_raw_cmd = 1;
/* We allow empty commands for non-default shells since we do not really
* know what they are going to do with such commands.
*/
if ( !command->size && ( is_raw_cmd || list_empty( *pShell ) ) )
return EXEC_CHECK_NOOP;
return is_raw_cmd
? EXEC_CHECK_OK
: check_cmd_for_too_long_lines( command->value, shell_maxline(), error_length,
error_max_length );
}
/*
* exec_cmd() - execute system command.
*/
void exec_cmd
(
string const * command,
int flags,
ExecCmdCallback func,
void * closure,
LIST * shell
)
{
char * s;
char * e;
char * p;
int vms_status;
int status;
int rstat = EXEC_CMD_OK;
int exit_reason = EXIT_OK;
timing_info time_info;
timestamp start_dt;
struct tms start_time;
struct tms end_time;
char * cmd_string = command->value;
/* Start the command */
timestamp_current( &time_info.start );
times( &start_time );
/* See if command is more than one line discounting leading/trailing white
* space.
*/
for ( s = cmd_string; *s && isspace( *s ); ++s );
e = p = strchr( s, '\n' );
while ( p && isspace( *p ) )
++p;
/* If multi line or long, write to com file. Otherwise, exec directly. */
if ( ( p && *p ) || ( e - s > WRTLEN ) )
{
FILE * f;
/* Create temp file invocation. */
if ( !*commandbuf )
{
OBJECT * tmp_filename = 0;
tmp_filename = path_tmpfile();
/* Get tmp file name is VMS-format. */
{
string os_filename[ 1 ];
string_new( os_filename );
path_translate_to_os( object_str( tmp_filename ), os_filename );
object_free( tmp_filename );
tmp_filename = object_new( os_filename->value );
string_free( os_filename );
}
commandbuf[0] = '@';
strncat( commandbuf + 1, object_str( tmp_filename ),
VMS_COMMAND_MAX - 2);
}
/* Open tempfile. */
if ( !( f = fopen( commandbuf + 1, "w" ) ) )
{
err_printf( "[errno %d] failed to wite cmd_string file '%s': %s",
errno, commandbuf + 1, strerror(errno) );
rstat = EXEC_CMD_FAIL;
exit_reason = EXIT_FAIL;
times( &end_time );
timestamp_current( &time_info.end );
time_info.system = (double)( end_time.tms_cstime -
start_time.tms_cstime ) / 100.;
time_info.user = (double)( end_time.tms_cutime -
start_time.tms_cutime ) / 100.;
(*func)( closure, rstat, &time_info, "" , "", exit_reason );
return;
}
/* Running from TMP, so explicitly set default to CWD. */
{
char * cwd = NULL;
int cwd_buf_size = VMS_PATH_MAX;
while ( !(cwd = getcwd( NULL, cwd_buf_size ) ) /* alloc internally */
&& errno == ERANGE )
{
cwd_buf_size += VMS_PATH_MAX;
}
if ( !cwd )
{
errno_puts( "can not get current working directory" );
exit( EXITBAD );
}
fprintf( f, "$ SET DEFAULT %s\n", cwd);
free( cwd );
}
/* For each line of the command. */
while ( *cmd_string )
{
char * s = strchr( cmd_string,'\n' );
int len = s ? s + 1 - cmd_string : strlen( cmd_string );
fputc( '$', f );
/* For each chunk of a line that needs to be split. */
while ( len > 0 )
{
char * q = cmd_string;
char * qe = cmd_string + MIN( len, WRTLEN );
char * qq = q;
int quote = 0;
/* Look for matching "s -- expected in the same line. */
for ( ; q < qe; ++q )
if ( ( *q == CHAR_DQUOTE ) && ( quote = !quote ) )
qq = q;
/* When needs splitting and is inside an open quote,
* back up to opening quote and split off at it.
* When the quoted string spans over a chunk,
* pass string as a whole.
* If no matching quote found, dump the rest of command.
*/
if ( len > WRTLEN && quote )
{
q = qq;
if ( q == cmd_string )
{
for ( q = qe; q < ( cmd_string + len )
&& *q != CHAR_DQUOTE ; ++q) {}
q = ( *q == CHAR_DQUOTE) ? ( q + 1 ) : ( cmd_string + len );
}
}
fwrite( cmd_string, ( q - cmd_string ), 1, f );
len -= ( q - cmd_string );
cmd_string = q;
if ( len )
{
fputc( '-', f );
fputc( '\n', f );
}
}
}
fclose( f );
if ( DEBUG_EXECCMD )
{
FILE * f;
char buf[ WRTLEN + 1 ] = { 0 };
if ( (f = fopen( commandbuf + 1, "r" ) ) )
{
int nbytes;
printf( "Command file: %s\n", commandbuf + 1 );
do
{
nbytes = fread( buf, sizeof( buf[0] ), sizeof( buf ) - 1, f );
if ( nbytes ) fwrite(buf, sizeof( buf[0] ), nbytes, stdout);
}
while ( !feof(f) );
fclose(f);
}
}
/* Execute command file */
vms_status = system( commandbuf );
status = get_status( vms_status );
unlink( commandbuf + 1 );
}
else
{
/* Execute single line command. Strip trailing newline before execing.
* TODO:Call via popen() with capture of the output may be better here.
*/
if ( e ) *e = 0;
status = VMS_SUCCESS; /* success on empty command */
if ( *s )
{
vms_status = system( s );
status = get_status( vms_status );
}
}
times( &end_time );
timestamp_current( &time_info.end );
time_info.system = (double)( end_time.tms_cstime -
start_time.tms_cstime ) / 100.;
time_info.user = (double)( end_time.tms_cutime -
start_time.tms_cutime ) / 100.;
/* Fail for error or fatal error. OK on OK, warning or info exit. */
if ( ( status == VMS_ERROR ) || ( status == VMS_FATAL ) )
{
rstat = EXEC_CMD_FAIL;
exit_reason = EXIT_FAIL;
}
(*func)( closure, rstat, &time_info, "" , "", exit_reason );
}
void exec_wait()
{
return;
}
/* get_status() - returns status of the VMS command execution.
- Map VMS status to its severity (lower 3-bits)
- W-DCL-IVVERB is returned on unrecognized command -- map to general ERROR
*/
int get_status( int vms_status )
{
#define VMS_STATUS_DCL_IVVERB 0x00038090
int status;
switch (vms_status)
{
case VMS_STATUS_DCL_IVVERB:
status = VMS_ERROR;
break;
default:
status = vms_status & 0x07; /* $SEVERITY bits */
}
return status;
}
#define __NEW_STARLET 1
#include <stdio.h>
#include <stdlib.h>
#include <time.h>
#include <ssdef.h>
#include <stsdef.h>
#include <jpidef.h>
#include <efndef.h>
#include <iosbdef.h>
#include <iledef.h>
#include <lib$routines.h>
#include <starlet.h>
/*
* get_cpu_time() - returns CPU time in CLOCKS_PER_SEC since process start.
* on error returns (clock_t)-1.
*
* Intended to emulate (system + user) result of *NIX times(), if CRTL times()
* is not available.
* However, this accounts only for the current process. To account for child
* processes, these need to be directly spawned/forked via exec().
* Moreover, child processes should be running a C main program or a program
* that calls VAXC$CRTL_INIT or DECC$CRTL_INIT.
*/
clock_t get_cpu_time()
{
clock_t result = (clock_t) 0;
IOSB iosb;
int status;
long cputime = 0;
ILE3 jpi_items[] = {
{ sizeof( cputime ), JPI$_CPUTIM, &cputime, NULL }, /* longword int, 10ms */
{ 0 },
};
status = sys$getjpiw (EFN$C_ENF, 0, 0, jpi_items, &iosb, 0, 0);
if ( !$VMS_STATUS_SUCCESS( status ) )
{
lib$signal( status );
result = (clock_t) -1;
return result;
}
result = ( cputime / 100 ) * CLOCKS_PER_SEC;
return result;
}
int32_t shell_maxline()
{
return MAXLINE;
}
# endif /* VMS */

View File

@@ -0,0 +1,517 @@
/*
* Copyright 1993, 1995 Christopher Seiwald.
*
* This file is part of Jam - see jam.c for Copyright information.
*/
/* This file is ALSO:
* Copyright 2001-2004 David Abrahams.
* Copyright 2005 Rene Rivera.
* Distributed under the Boost Software License, Version 1.0.
* (See accompanying file LICENSE.txt or copy at
* https://www.bfgroup.xyz/b2/LICENSE.txt)
*/
/*
* filent.c - scan directories and archives on NT
*
* External routines:
* file_archscan() - scan an archive for files
* file_mkdir() - create a directory
* file_supported_fmt_resolution() - file modification timestamp resolution
*
* External routines called only via routines in filesys.c:
* file_collect_dir_content_() - collects directory content information
* file_dirscan_() - OS specific file_dirscan() implementation
* file_query_() - query information about a path from the OS
* file_collect_archive_content_() - collects information about archive members
* file_archivescan_() - OS specific file_archivescan() implementation
*/
#include "jam.h"
#ifdef OS_NT
#include "filesys.h"
#include "object.h"
#include "pathsys.h"
#include "jam_strings.h"
#include "output.h"
#ifdef __BORLANDC__
# undef FILENAME /* cpp namespace collision */
#endif
#define WIN32_LEAN_AND_MEAN
#include <windows.h>
#include <assert.h>
#include <ctype.h>
#include <direct.h>
#include <io.h>
int file_collect_archive_content_( file_archive_info_t * const archive );
/*
* file_collect_dir_content_() - collects directory content information
*/
int file_collect_dir_content_( file_info_t * const d )
{
PATHNAME f;
string pathspec[ 1 ];
string pathname[ 1 ];
LIST * files = L0;
int32_t d_length;
assert( d );
assert( d->is_dir );
assert( list_empty( d->files ) );
d_length = int32_t(strlen( object_str( d->name ) ));
memset( (char *)&f, '\0', sizeof( f ) );
f.f_dir.ptr = object_str( d->name );
f.f_dir.len = d_length;
/* Prepare file search specification for the FindXXX() Windows API. */
if ( !d_length )
string_copy( pathspec, ".\\*" );
else
{
/* We can not simply assume the given folder name will never include its
* trailing path separator or otherwise we would not support the Windows
* root folder specified without its drive letter, i.e. '\'.
*/
char const trailingChar = object_str( d->name )[ d_length - 1 ] ;
string_copy( pathspec, object_str( d->name ) );
if ( ( trailingChar != '\\' ) && ( trailingChar != '/' ) )
string_append( pathspec, "\\" );
string_append( pathspec, "*" );
}
/* The following code for collecting information about all files in a folder
* needs to be kept synchronized with how the file_query() operation is
* implemented (collects information about a single file).
*/
{
/* FIXME: Avoid duplicate FindXXX Windows API calls here and in the code
* determining a normalized path.
*/
WIN32_FIND_DATAA finfo;
HANDLE const findHandle = FindFirstFileA( pathspec->value, &finfo );
if ( findHandle == INVALID_HANDLE_VALUE )
{
string_free( pathspec );
return -1;
}
string_new( pathname );
do
{
OBJECT * pathname_obj;
f.f_base.ptr = finfo.cFileName;
f.f_base.len = int32_t(strlen( finfo.cFileName ));
string_truncate( pathname, 0 );
path_build( &f, pathname );
pathname_obj = object_new( pathname->value );
path_register_key( pathname_obj );
files = list_push_back( files, pathname_obj );
{
int found;
file_info_t * const ff = file_info( pathname_obj, &found );
ff->is_dir = finfo.dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY;
ff->is_file = !ff->is_dir;
ff->exists = 1;
timestamp_from_filetime( &ff->time, &finfo.ftLastWriteTime );
// Use the timestamp of the link target, not the link itself
// (i.e. stat instead of lstat)
if ( finfo.dwFileAttributes & FILE_ATTRIBUTE_REPARSE_POINT )
{
HANDLE hLink = CreateFileA( pathname->value, 0, FILE_SHARE_READ, NULL, OPEN_EXISTING, FILE_FLAG_BACKUP_SEMANTICS, NULL );
BY_HANDLE_FILE_INFORMATION target_finfo[ 1 ];
if ( hLink != INVALID_HANDLE_VALUE && GetFileInformationByHandle( hLink, target_finfo ) )
{
ff->is_file = target_finfo->dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY ? 0 : 1;
ff->is_dir = target_finfo->dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY ? 1 : 0;
timestamp_from_filetime( &ff->time, &target_finfo->ftLastWriteTime );
}
}
}
}
while ( FindNextFileA( findHandle, &finfo ) );
FindClose( findHandle );
}
string_free( pathname );
string_free( pathspec );
d->files = files;
return 0;
}
/*
* file_dirscan_() - OS specific file_dirscan() implementation
*/
void file_dirscan_( file_info_t * const d, scanback func, void * closure )
{
assert( d );
assert( d->is_dir );
/* Special case \ or d:\ : enter it */
{
char const * const name = object_str( d->name );
if ( name[ 0 ] == '\\' && !name[ 1 ] )
{
(*func)( closure, d->name, 1 /* stat()'ed */, &d->time );
}
else if ( name[ 0 ] && name[ 1 ] == ':' && name[ 2 ] && !name[ 3 ] )
{
/* We have just entered a 3-letter drive name spelling (with a
* trailing slash), into the hash table. Now enter its two-letter
* variant, without the trailing slash, so that if we try to check
* whether "c:" exists, we hit it.
*
* Jam core has workarounds for that. Given:
* x = c:\whatever\foo ;
* p = $(x:D) ;
* p2 = $(p:D) ;
* There will be no trailing slash in $(p), but there will be one in
* $(p2). But, that seems rather fragile.
*/
OBJECT * const dir_no_slash = object_new_range( name, 2 );
(*func)( closure, d->name, 1 /* stat()'ed */, &d->time );
(*func)( closure, dir_no_slash, 1 /* stat()'ed */, &d->time );
object_free( dir_no_slash );
}
}
}
/*
* file_mkdir() - create a directory
*/
int file_mkdir( char const * const path )
{
return _mkdir( path );
}
/*
* file_query_() - query information about a path from the OS
*
* The following code for collecting information about a single file needs to be
* kept synchronized with how the file_collect_dir_content_() operation is
* implemented (collects information about all files in a folder).
*/
int try_file_query_root( file_info_t * const info )
{
WIN32_FILE_ATTRIBUTE_DATA fileData;
char buf[ 4 ];
char const * const pathstr = object_str( info->name );
if ( !pathstr[ 0 ] )
{
buf[ 0 ] = '.';
buf[ 1 ] = 0;
}
else if ( pathstr[ 0 ] == '\\' && ! pathstr[ 1 ] )
{
buf[ 0 ] = '\\';
buf[ 1 ] = '\0';
}
else if ( pathstr[ 1 ] == ':' )
{
if ( !pathstr[ 2 ] || ( pathstr[ 2 ] == '\\' && !pathstr[ 3 ] ) )
{
buf[ 0 ] = pathstr[ 0 ];
buf[ 1 ] = ':';
buf[ 2 ] = '\\';
buf[ 3 ] = '\0';
}
else
{
return 0;
}
}
else
{
return 0;
}
/* We have a root path */
if ( !GetFileAttributesExA( buf, GetFileExInfoStandard, &fileData ) )
{
info->is_dir = 0;
info->is_file = 0;
info->exists = 0;
timestamp_clear( &info->time );
}
else
{
info->is_dir = fileData.dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY;
info->is_file = !info->is_dir;
info->exists = 1;
timestamp_from_filetime( &info->time, &fileData.ftLastWriteTime );
}
return 1;
}
void file_query_( file_info_t * const info )
{
char const * const pathstr = object_str( info->name );
const char * dir;
OBJECT * parent;
file_info_t * parent_info;
if ( try_file_query_root( info ) )
return;
if ( ( dir = strrchr( pathstr, '\\' ) ) )
{
parent = object_new_range( pathstr, int32_t(dir - pathstr) );
}
else
{
parent = object_copy( constant_empty );
}
parent_info = file_query( parent );
object_free( parent );
if ( !parent_info || !parent_info->is_dir )
{
info->is_dir = 0;
info->is_file = 0;
info->exists = 0;
timestamp_clear( &info->time );
}
else
{
info->is_dir = 0;
info->is_file = 0;
info->exists = 0;
timestamp_clear( &info->time );
if ( list_empty( parent_info->files ) )
file_collect_dir_content_( parent_info );
}
}
/*
* file_supported_fmt_resolution() - file modification timestamp resolution
*
* Returns the minimum file modification timestamp resolution supported by this
* Boost Jam implementation. File modification timestamp changes of less than
* the returned value might not be recognized.
*
* Does not take into consideration any OS or file system related restrictions.
*
* Return value 0 indicates that any value supported by the OS is also supported
* here.
*/
void file_supported_fmt_resolution( timestamp * const t )
{
/* On Windows we support nano-second file modification timestamp resolution,
* just the same as the Windows OS itself.
*/
timestamp_init( t, 0, 0 );
}
/*
* file_archscan() - scan an archive for files
*/
/* Straight from SunOS */
#define ARMAG "!<arch>\n"
#define SARMAG 8
#define ARFMAG "`\n"
struct ar_hdr
{
char ar_name[ 16 ];
char ar_date[ 12 ];
char ar_uid[ 6 ];
char ar_gid[ 6 ];
char ar_mode[ 8 ];
char ar_size[ 10 ];
char ar_fmag[ 2 ];
};
#define SARFMAG 2
#define SARHDR sizeof( struct ar_hdr )
void file_archscan( char const * arch, scanback func, void * closure )
{
OBJECT * path = object_new( arch );
file_archive_info_t * archive = file_archive_query( path );
object_free( path );
if ( filelist_empty( archive->members ) )
{
if ( file_collect_archive_content_( archive ) < 0 )
return;
}
/* Report the collected archive content. */
{
FILELISTITER iter = filelist_begin( archive->members );
FILELISTITER const end = filelist_end( archive->members );
char buf[ MAXJPATH ];
for ( ; iter != end ; iter = filelist_next( iter ) )
{
file_info_t * member_file = filelist_item( iter );
/* Construct member path: 'archive-path(member-name)'
*/
sprintf( buf, "%s(%s)",
object_str( archive->file->name ),
object_str( member_file->name ) );
{
OBJECT * const member = object_new( buf );
(*func)( closure, member, 1 /* time valid */, &member_file->time );
object_free( member );
}
}
}
}
/*
* file_archivescan_() - OS specific file_archivescan() implementation
*/
void file_archivescan_( file_archive_info_t * const archive, archive_scanback func,
void * closure )
{
}
/*
* file_collect_archive_content_() - collects information about archive members
*/
int file_collect_archive_content_( file_archive_info_t * const archive )
{
struct ar_hdr ar_hdr;
char * string_table = 0;
char buf[ MAXJPATH ];
long offset;
const char * path = object_str( archive->file->name );
int const fd = open( path , O_RDONLY | O_BINARY, 0 );
if ( ! filelist_empty( archive->members ) ) filelist_free( archive->members );
if ( fd < 0 )
return -1;
if ( read( fd, buf, SARMAG ) != SARMAG || strncmp( ARMAG, buf, SARMAG ) )
{
close( fd );
return -1;
}
offset = SARMAG;
if ( DEBUG_BINDSCAN )
out_printf( "scan archive %s\n", path );
while ( ( read( fd, &ar_hdr, SARHDR ) == SARHDR ) &&
!memcmp( ar_hdr.ar_fmag, ARFMAG, SARFMAG ) )
{
long lar_date;
long lar_size;
char * name = 0;
char * endname;
sscanf( ar_hdr.ar_date, "%ld", &lar_date );
sscanf( ar_hdr.ar_size, "%ld", &lar_size );
lar_size = ( lar_size + 1 ) & ~1;
if ( ar_hdr.ar_name[ 0 ] == '/' && ar_hdr.ar_name[ 1 ] == '/' )
{
/* This is the "string table" entry of the symbol table, holding
* filename strings longer than 15 characters, i.e. those that do
* not fit into ar_name.
*/
string_table = (char*)BJAM_MALLOC_ATOMIC( lar_size + 1 );
if ( read( fd, string_table, lar_size ) != lar_size )
out_printf( "error reading string table\n" );
string_table[ lar_size ] = '\0';
offset += SARHDR + lar_size;
continue;
}
else if ( ar_hdr.ar_name[ 0 ] == '/' && ar_hdr.ar_name[ 1 ] != ' ' )
{
/* Long filenames are recognized by "/nnnn" where nnnn is the
* string's offset in the string table represented in ASCII
* decimals.
*/
name = string_table + atoi( ar_hdr.ar_name + 1 );
for ( endname = name; *endname && *endname != '\n'; ++endname );
}
else
{
/* normal name */
name = ar_hdr.ar_name;
endname = name + sizeof( ar_hdr.ar_name );
}
/* strip trailing white-space, slashes, and backslashes */
while ( endname-- > name )
if ( !isspace( *endname ) && ( *endname != '\\' ) && ( *endname !=
'/' ) )
break;
*++endname = 0;
/* strip leading directory names, an NT specialty */
{
char * c;
if ( (c = strrchr( name, '/' )) != nullptr )
name = c + 1;
if ( (c = strrchr( name, '\\' )) != nullptr )
name = c + 1;
}
sprintf( buf, "%.*s", int(endname - name), name );
if ( strcmp( buf, "") != 0 )
{
file_info_t * member = 0;
/* NT static libraries appear to store the objects in a sequence
* reverse to the order in which they were inserted.
* Here we reverse the stored sequence by pushing members to front of
* member file list to get the intended members order.
*/
archive->members = filelist_push_front( archive->members, object_new( buf ) );
member = filelist_front( archive->members );
member->is_file = 1;
member->is_dir = 0;
member->exists = 0;
timestamp_init( &member->time, (time_t)lar_date, 0 );
}
offset += SARHDR + lar_size;
lseek( fd, offset, 0 );
}
close( fd );
return 0;
}
#endif /* OS_NT */

View File

@@ -0,0 +1,711 @@
/*
* Copyright 2001-2004 David Abrahams.
* Copyright 2005 Rene Rivera.
* Distributed under the Boost Software License, Version 1.0.
* (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt)
*/
/*
* filesys.c - OS independent file system manipulation support
*
* External routines:
* file_build1() - construct a path string based on PATHNAME information
* file_dirscan() - scan a directory for files
* file_done() - module cleanup called on shutdown
* file_info() - return cached information about a path
* file_is_file() - return whether a path identifies an existing file
* file_query() - get cached information about a path, query the OS if
* needed
* file_remove_atexit() - schedule a path to be removed on program exit
* file_time() - get a file timestamp
*
* External routines - utilities for OS specific module implementations:
* file_query_posix_() - query information about a path using POSIX stat()
*
* Internal routines:
* file_dirscan_impl() - no-profiling worker for file_dirscan()
*/
#include "jam.h"
#include "filesys.h"
#include "lists.h"
#include "object.h"
#include "pathsys.h"
#include "jam_strings.h"
#include "output.h"
#include <assert.h>
#include <sys/stat.h>
/* Internal OS specific implementation details - have names ending with an
* underscore and are expected to be implemented in an OS specific fileXXX.c
* module.
*/
void file_dirscan_( file_info_t * const dir, scanback func, void * closure );
int file_collect_dir_content_( file_info_t * const dir );
void file_query_( file_info_t * const );
void file_archivescan_( file_archive_info_t * const archive, archive_scanback func,
void * closure );
int file_collect_archive_content_( file_archive_info_t * const archive );
void file_archive_query_( file_archive_info_t * const );
static void file_archivescan_impl( OBJECT * path, archive_scanback func,
void * closure );
static void file_dirscan_impl( OBJECT * dir, scanback func, void * closure );
static void free_file_archive_info( void * xarchive, void * data );
static void free_file_info( void * xfile, void * data );
static void remove_files_atexit( void );
static struct hash * filecache_hash;
static struct hash * archivecache_hash;
/*
* file_archive_info() - return cached information about an archive
*
* Returns a default initialized structure containing only queried file's info
* in case this is the first time this file system entity has been
* referenced.
*/
file_archive_info_t * file_archive_info( OBJECT * const path, int * found )
{
OBJECT * const path_key = path_as_key( path );
file_archive_info_t * archive;
if ( !archivecache_hash )
archivecache_hash = hashinit( sizeof( file_archive_info_t ),
"file_archive_info" );
archive = (file_archive_info_t *)hash_insert( archivecache_hash, path_key,
found );
if ( !*found )
{
archive->name = path_key;
archive->file = 0;
archive->members = FL0;
}
else
object_free( path_key );
return archive;
}
/*
* file_archive_query() - get cached information about a archive file path
*
* Returns 0 in case querying the OS about the given path fails, e.g. because
* the path does not reference an existing file system object.
*/
file_archive_info_t * file_archive_query( OBJECT * const path )
{
int found;
file_archive_info_t * const archive = file_archive_info( path, &found );
file_info_t * file = file_query( path );
if ( !( file && file->is_file ) )
{
return 0;
}
archive->file = file;
return archive;
}
/*
* file_archivescan() - scan an archive for members
*/
void file_archivescan( OBJECT * path, archive_scanback func, void * closure )
{
PROFILE_ENTER( FILE_ARCHIVESCAN );
file_archivescan_impl( path, func, closure );
PROFILE_EXIT( FILE_ARCHIVESCAN );
}
/*
* file_build1() - construct a path string based on PATHNAME information
*/
void file_build1( PATHNAME * const f, string * file )
{
if ( DEBUG_SEARCH )
{
out_printf( "build file: " );
if ( f->f_root.len )
out_printf( "root = '%.*s' ", f->f_root.len, f->f_root.ptr );
if ( f->f_dir.len )
out_printf( "dir = '%.*s' ", f->f_dir.len, f->f_dir.ptr );
if ( f->f_base.len )
out_printf( "base = '%.*s' ", f->f_base.len, f->f_base.ptr );
out_printf( "\n" );
}
/* Start with the grist. If the current grist is not surrounded by <>'s, add
* them.
*/
if ( f->f_grist.len )
{
if ( f->f_grist.ptr[ 0 ] != '<' )
string_push_back( file, '<' );
string_append_range(
file, f->f_grist.ptr, f->f_grist.ptr + f->f_grist.len );
if ( file->value[ file->size - 1 ] != '>' )
string_push_back( file, '>' );
}
}
/*
* file_dirscan() - scan a directory for files
*/
void file_dirscan( OBJECT * dir, scanback func, void * closure )
{
PROFILE_ENTER( FILE_DIRSCAN );
file_dirscan_impl( dir, func, closure );
PROFILE_EXIT( FILE_DIRSCAN );
}
/*
* file_done() - module cleanup called on shutdown
*/
void file_done()
{
remove_files_atexit();
if ( filecache_hash )
{
hashenumerate( filecache_hash, free_file_info, (void *)0 );
hashdone( filecache_hash );
}
if ( archivecache_hash )
{
hashenumerate( archivecache_hash, free_file_archive_info, (void *)0 );
hashdone( archivecache_hash );
}
}
/*
* file_info() - return cached information about a path
*
* Returns a default initialized structure containing only the path's normalized
* name in case this is the first time this file system entity has been
* referenced.
*/
file_info_t * file_info( OBJECT * const path, int * found )
{
OBJECT * const path_key = path_as_key( path );
file_info_t * finfo;
if ( !filecache_hash )
filecache_hash = hashinit( sizeof( file_info_t ), "file_info" );
finfo = (file_info_t *)hash_insert( filecache_hash, path_key, found );
if ( !*found )
{
finfo->name = path_key;
finfo->files = L0;
}
else
object_free( path_key );
return finfo;
}
/*
* file_is_file() - return whether a path identifies an existing file
*/
int file_is_file( OBJECT * const path )
{
file_info_t const * const ff = file_query( path );
return ff ? ff->is_file : -1;
}
/*
* file_time() - get a file timestamp
*/
int file_time( OBJECT * const path, timestamp * const time )
{
file_info_t const * const ff = file_query( path );
if ( !ff ) return -1;
timestamp_copy( time, &ff->time );
return 0;
}
/*
* file_query() - get cached information about a path, query the OS if needed
*
* Returns 0 in case querying the OS about the given path fails, e.g. because
* the path does not reference an existing file system object.
*/
file_info_t * file_query( OBJECT * const path )
{
/* FIXME: Add tracking for disappearing files (i.e. those that can not be
* detected by stat() even though they had been detected successfully
* before) and see how they should be handled in the rest of Boost Jam code.
* Possibly allow Jamfiles to specify some files as 'volatile' which would
* make Boost Jam avoid caching information about those files and instead
* ask the OS about them every time.
*/
int found;
file_info_t * const ff = file_info( path, &found );
if ( !found )
{
file_query_( ff );
if ( ff->exists )
{
/* Set the path's timestamp to 1 in case it is 0 or undetected to avoid
* confusion with non-existing paths.
*/
if ( timestamp_empty( &ff->time ) )
timestamp_init( &ff->time, 1, 0 );
}
}
if ( !ff->exists )
{
return 0;
}
return ff;
}
#ifndef OS_NT
/*
* file_query_posix_() - query information about a path using POSIX stat()
*
* Fallback file_query_() implementation for OS specific modules.
*
* Note that the Windows POSIX stat() function implementation suffers from
* several issues:
* * Does not support file timestamps with resolution finer than 1 second,
* meaning it can not be used to detect file timestamp changes of less than
* 1 second. One possible consequence is that some fast-paced touch commands
* (such as those done by Boost Build's internal testing system if it does
* not do some extra waiting) will not be detected correctly by the build
* system.
* * Returns file modification times automatically adjusted for daylight
* savings time even though daylight savings time should have nothing to do
* with internal time representation.
*/
void file_query_posix_( file_info_t * const info )
{
struct stat statbuf;
char const * const pathstr = object_str( info->name );
char const * const pathspec = *pathstr ? pathstr : ".";
if ( stat( pathspec, &statbuf ) < 0 )
{
info->is_file = 0;
info->is_dir = 0;
info->exists = 0;
timestamp_clear( &info->time );
}
else
{
info->is_file = statbuf.st_mode & S_IFREG ? 1 : 0;
info->is_dir = statbuf.st_mode & S_IFDIR ? 1 : 0;
info->exists = 1;
#if defined(_POSIX_VERSION) && _POSIX_VERSION >= 200809
#if defined(OS_MACOSX)
timestamp_init( &info->time, statbuf.st_mtimespec.tv_sec, statbuf.st_mtimespec.tv_nsec );
#else
timestamp_init( &info->time, statbuf.st_mtim.tv_sec, statbuf.st_mtim.tv_nsec );
#endif
#else
timestamp_init( &info->time, statbuf.st_mtime, 0 );
#endif
}
}
/*
* file_supported_fmt_resolution() - file modification timestamp resolution
*
* Returns the minimum file modification timestamp resolution supported by this
* Boost Jam implementation. File modification timestamp changes of less than
* the returned value might not be recognized.
*
* Does not take into consideration any OS or file system related restrictions.
*
* Return value 0 indicates that any value supported by the OS is also supported
* here.
*/
void file_supported_fmt_resolution( timestamp * const t )
{
#if defined(_POSIX_VERSION) && _POSIX_VERSION >= 200809
timestamp_init( t, 0, 1 );
#else
/* The current implementation does not support file modification timestamp
* resolution of less than one second.
*/
timestamp_init( t, 1, 0 );
#endif
}
#endif
/*
* file_remove_atexit() - schedule a path to be removed on program exit
*/
static LIST * files_to_remove = L0;
void file_remove_atexit( OBJECT * const path )
{
files_to_remove = list_push_back( files_to_remove, object_copy( path ) );
}
/*
* file_archivescan_impl() - no-profiling worker for file_archivescan()
*/
static void file_archivescan_impl( OBJECT * path, archive_scanback func, void * closure )
{
file_archive_info_t * const archive = file_archive_query( path );
if ( !archive || !archive->file->is_file )
return;
/* Lazy collect the archive content information. */
if ( filelist_empty( archive->members ) )
{
if ( DEBUG_BINDSCAN )
printf( "scan archive %s\n", object_str( archive->file->name ) );
if ( file_collect_archive_content_( archive ) < 0 )
return;
}
/* OS specific part of the file_archivescan operation. */
file_archivescan_( archive, func, closure );
/* Report the collected archive content. */
{
FILELISTITER iter = filelist_begin( archive->members );
FILELISTITER const end = filelist_end( archive->members );
char buf[ MAXJPATH ];
for ( ; iter != end ; iter = filelist_next( iter ) )
{
file_info_t * member_file = filelist_item( iter );
LIST * symbols = member_file->files;
/* Construct member path: 'archive-path(member-name)'
*/
sprintf( buf, "%s(%s)",
object_str( archive->file->name ),
object_str( member_file->name ) );
{
OBJECT * const member = object_new( buf );
(*func)( closure, member, symbols, 1, &member_file->time );
object_free( member );
}
}
}
}
/*
* file_dirscan_impl() - no-profiling worker for file_dirscan()
*/
static void file_dirscan_impl( OBJECT * dir, scanback func, void * closure )
{
file_info_t * const d = file_query( dir );
if ( !d || !d->is_dir )
return;
/* Lazy collect the directory content information. */
if ( list_empty( d->files ) )
{
if ( DEBUG_BINDSCAN )
out_printf( "scan directory %s\n", object_str( d->name ) );
if ( file_collect_dir_content_( d ) < 0 )
return;
}
/* OS specific part of the file_dirscan operation. */
file_dirscan_( d, func, closure );
/* Report the collected directory content. */
{
LISTITER iter = list_begin( d->files );
LISTITER const end = list_end( d->files );
for ( ; iter != end; iter = list_next( iter ) )
{
OBJECT * const path = list_item( iter );
file_info_t const * const ffq = file_query( path );
/* Using a file name read from a file_info_t structure allows OS
* specific implementations to store some kind of a normalized file
* name there. Using such a normalized file name then allows us to
* correctly recognize different file paths actually identifying the
* same file. For instance, an implementation may:
* - convert all file names internally to lower case on a case
* insensitive file system
* - convert the NTFS paths to their long path variants as that
* file system each file system entity may have a long and a
* short path variant thus allowing for many different path
* strings identifying the same file.
*/
(*func)( closure, ffq->name, 1 /* stat()'ed */, &ffq->time );
}
}
}
static void free_file_archive_info( void * xarchive, void * data )
{
file_archive_info_t * const archive = (file_archive_info_t *)xarchive;
if ( archive ) filelist_free( archive->members );
}
static void free_file_info( void * xfile, void * data )
{
file_info_t * const file = (file_info_t *)xfile;
object_free( file->name );
list_free( file->files );
}
static void remove_files_atexit( void )
{
LISTITER iter = list_begin( files_to_remove );
LISTITER const end = list_end( files_to_remove );
for ( ; iter != end; iter = list_next( iter ) )
remove( object_str( list_item( iter ) ) );
list_free( files_to_remove );
files_to_remove = L0;
}
/*
* FILELIST linked-list implementation
*/
FILELIST * filelist_new( OBJECT * path )
{
FILELIST * list = (FILELIST *)BJAM_MALLOC( sizeof( FILELIST ) );
memset( list, 0, sizeof( *list ) );
list->size = 0;
list->head = 0;
list->tail = 0;
return filelist_push_back( list, path );
}
FILELIST * filelist_push_back( FILELIST * list, OBJECT * path )
{
FILEITEM * item;
file_info_t * file;
/* Lazy initialization
*/
if ( filelist_empty( list ) )
{
list = filelist_new( path );
return list;
}
item = (FILEITEM *)BJAM_MALLOC( sizeof( FILEITEM ) );
memset( item, 0, sizeof( *item ) );
item->value = (file_info_t *)BJAM_MALLOC( sizeof( file_info_t ) );
file = item->value;
memset( file, 0, sizeof( *file ) );
file->name = path;
file->files = L0;
if ( list->tail )
{
list->tail->next = item;
}
else
{
list->head = item;
}
list->tail = item;
list->size++;
return list;
}
FILELIST * filelist_push_front( FILELIST * list, OBJECT * path )
{
FILEITEM * item;
file_info_t * file;
/* Lazy initialization
*/
if ( filelist_empty( list ) )
{
list = filelist_new( path );
return list;
}
item = (FILEITEM *)BJAM_MALLOC( sizeof( FILEITEM ) );
memset( item, 0, sizeof( *item ) );
item->value = (file_info_t *)BJAM_MALLOC( sizeof( file_info_t ) );
file = item->value;
memset( file, 0, sizeof( *file ) );
file->name = path;
file->files = L0;
if ( list->head )
{
item->next = list->head;
}
else
{
list->tail = item;
}
list->head = item;
list->size++;
return list;
}
FILELIST * filelist_pop_front( FILELIST * list )
{
FILEITEM * item;
if ( filelist_empty( list ) ) return list;
item = list->head;
if ( item )
{
if ( item->value ) free_file_info( item->value, 0 );
list->head = item->next;
list->size--;
if ( !list->size ) list->tail = list->head;
#ifdef BJAM_NO_MEM_CACHE
BJAM_FREE( item );
#endif
}
return list;
}
int filelist_length( FILELIST * list )
{
int result = 0;
if ( !filelist_empty( list ) ) result = list->size;
return result;
}
void filelist_free( FILELIST * list )
{
if ( filelist_empty( list ) ) return;
while ( filelist_length( list ) ) filelist_pop_front( list );
#ifdef BJAM_NO_MEM_CACHE
BJAM_FREE( list );
#endif
}
int filelist_empty( FILELIST * list )
{
return ( list == FL0 );
}
FILELISTITER filelist_begin( FILELIST * list )
{
if ( filelist_empty( list )
|| list->head == 0 ) return (FILELISTITER)0;
return &list->head->value;
}
FILELISTITER filelist_end( FILELIST * list )
{
return (FILELISTITER)0;
}
FILELISTITER filelist_next( FILELISTITER iter )
{
if ( iter )
{
/* Given FILEITEM.value is defined as first member of FILEITEM structure
* and FILELISTITER = &FILEITEM.value,
* FILEITEM = *(FILEITEM **)FILELISTITER
*/
FILEITEM * item = (FILEITEM *)iter;
iter = ( item->next ? &item->next->value : (FILELISTITER)0 );
}
return iter;
}
file_info_t * filelist_item( FILELISTITER it )
{
file_info_t * result = (file_info_t *)0;
if ( it )
{
result = (file_info_t *)*it;
}
return result;
}
file_info_t * filelist_front( FILELIST * list )
{
if ( filelist_empty( list )
|| list->head == 0 ) return (file_info_t *)0;
return list->head->value;
}
file_info_t * filelist_back( FILELIST * list )
{
if ( filelist_empty( list )
|| list->tail == 0 ) return (file_info_t *)0;
return list->tail->value;
}

View File

@@ -0,0 +1,125 @@
/*
* Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
*
* This file is part of Jam - see jam.c for Copyright information.
*/
/* This file is ALSO:
* Copyright 2001-2004 David Abrahams.
* Distributed under the Boost Software License, Version 1.0.
* (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt)
*/
/*
* filesys.h - OS specific file routines
*/
#ifndef FILESYS_DWA20011025_H
#define FILESYS_DWA20011025_H
#include "config.h"
#include "hash.h"
#include "lists.h"
#include "object.h"
#include "pathsys.h"
#include "timestamp.h"
#include <string>
typedef struct file_info_t
{
OBJECT * name;
char is_file;
char is_dir;
char exists;
timestamp time;
LIST * files;
} file_info_t;
typedef struct file_item FILEITEM;
struct file_item
{
file_info_t * value; /* expected to be equivalent with &FILEITEM */
FILEITEM * next;
};
typedef struct file_list
{
FILEITEM * head;
FILEITEM * tail;
int size;
} FILELIST;
typedef file_info_t * * FILELISTITER; /* also &FILEITEM equivalent */
typedef struct file_archive_info_t
{
OBJECT * name;
file_info_t * file;
FILELIST * members;
} file_archive_info_t;
typedef void (*archive_scanback)( void * closure, OBJECT * path, LIST * symbols,
int found, timestamp const * const );
typedef void (*scanback)( void * closure, OBJECT * path, int found,
timestamp const * const );
void file_archscan( char const * arch, scanback func, void * closure );
void file_archivescan( OBJECT * path, archive_scanback func, void * closure );
void file_build1( PATHNAME * const f, string * file ) ;
void file_dirscan( OBJECT * dir, scanback func, void * closure );
file_info_t * file_info( OBJECT * const path, int * found );
int file_is_file( OBJECT * const path );
int file_mkdir( char const * const path );
file_info_t * file_query( OBJECT * const path );
void file_remove_atexit( OBJECT * const path );
void file_supported_fmt_resolution( timestamp * const );
int file_time( OBJECT * const path, timestamp * const );
namespace b2 { namespace filesys {
inline bool is_file(const std::string &path)
{
OBJECT * path_o = object_new(path.c_str());
bool result = file_is_file(path_o) == 1;
object_free(path_o);
return result;
}
}}
/* Archive/library file support */
file_archive_info_t * file_archive_info( OBJECT * const path, int * found );
file_archive_info_t * file_archive_query( OBJECT * const path );
/* FILELIST linked-list */
FILELIST * filelist_new( OBJECT * path );
FILELIST * filelist_push_back( FILELIST * list, OBJECT * path );
FILELIST * filelist_push_front( FILELIST * list, OBJECT * path );
FILELIST * filelist_pop_front( FILELIST * list );
int filelist_length( FILELIST * list );
void filelist_free( FILELIST * list );
FILELISTITER filelist_begin( FILELIST * list );
FILELISTITER filelist_end( FILELIST * list );
FILELISTITER filelist_next( FILELISTITER it );
file_info_t * filelist_item( FILELISTITER it );
file_info_t * filelist_front( FILELIST * list );
file_info_t * filelist_back( FILELIST * list );
int filelist_empty( FILELIST * list );
#define FL0 ((FILELIST *)0)
/* Internal utility worker functions. */
void file_query_posix_( file_info_t * const );
void file_done();
#endif

View File

@@ -0,0 +1,533 @@
/*
* Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
*
* This file is part of Jam - see jam.c for Copyright information.
*/
/* This file is ALSO:
* Copyright 2001-2004 David Abrahams.
* Copyright 2005 Rene Rivera.
* Distributed under the Boost Software License, Version 1.0.
* (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt)
*/
/*
* fileunix.c - manipulate file names and scan directories on UNIX/AmigaOS
*
* External routines:
* file_archscan() - scan an archive for files
* file_mkdir() - create a directory
* file_supported_fmt_resolution() - file modification timestamp resolution
*
* External routines called only via routines in filesys.c:
* file_collect_dir_content_() - collects directory content information
* file_dirscan_() - OS specific file_dirscan() implementation
* file_query_() - query information about a path from the OS
* file_collect_archive_content_() - collects information about archive members
* file_archivescan_() - OS specific file_archivescan() implementation
*/
#include "jam.h"
#ifdef USE_FILEUNIX
#include "filesys.h"
#include "object.h"
#include "pathsys.h"
#include "jam_strings.h"
#include "output.h"
#include <assert.h>
#include <errno.h>
#include <stdio.h>
#include <sys/stat.h> /* needed for mkdir() */
#if defined( sun ) || defined( __sun ) || defined( linux )
# include <unistd.h> /* needed for read and close prototype */
#endif
#if defined( OS_SEQUENT ) || \
defined( OS_DGUX ) || \
defined( OS_SCO ) || \
defined( OS_ISC )
# define PORTAR 1
#endif
#if defined( OS_RHAPSODY ) || defined( OS_MACOSX ) || defined( OS_NEXT )
# include <sys/dir.h>
# include <unistd.h> /* need unistd for rhapsody's proper lseek */
# define STRUCT_DIRENT struct direct
#else
# include <dirent.h>
# define STRUCT_DIRENT struct dirent
#endif
#ifdef OS_COHERENT
# include <arcoff.h>
# define HAVE_AR
#endif
#if defined( OS_MVS ) || defined( OS_INTERIX )
#define ARMAG "!<arch>\n"
#define SARMAG 8
#define ARFMAG "`\n"
#define HAVE_AR
struct ar_hdr /* archive file member header - printable ascii */
{
char ar_name[ 16 ]; /* file member name - `/' terminated */
char ar_date[ 12 ]; /* file member date - decimal */
char ar_uid[ 6 ]; /* file member user id - decimal */
char ar_gid[ 6 ]; /* file member group id - decimal */
char ar_mode[ 8 ]; /* file member mode - octal */
char ar_size[ 10 ]; /* file member size - decimal */
char ar_fmag[ 2 ]; /* ARFMAG - string to end header */
};
#endif
#if defined( OS_QNX ) || \
defined( OS_BEOS ) || \
defined( OS_HAIKU ) || \
defined( OS_MPEIX )
# define NO_AR
# define HAVE_AR
#endif
#ifndef HAVE_AR
# ifdef OS_AIX
/* Define these for AIX to get the definitions for both small and big archive
* file format variants.
*/
# define __AR_SMALL__
# define __AR_BIG__
# endif
# include <ar.h>
#endif
/*
* file_collect_dir_content_() - collects directory content information
*/
int file_collect_dir_content_( file_info_t * const d )
{
LIST * files = L0;
PATHNAME f;
int n;
STRUCT_DIRENT ** namelist;
STRUCT_DIRENT * dirent;
string path[ 1 ];
char const * dirstr;
assert( d );
assert( d->is_dir );
assert( list_empty( d->files ) );
dirstr = object_str( d->name );
memset( (char *)&f, '\0', sizeof( f ) );
f.f_dir.ptr = dirstr;
f.f_dir.len = strlen( dirstr );
if ( !*dirstr ) dirstr = ".";
if ( -1 == ( n = scandir( dirstr, &namelist, NULL, alphasort ) ) )
{
if (n != ENOENT && n != ENOTDIR)
err_printf( "[errno %d] scandir '%s' failed: %s\n",
errno, dirstr, strerror(errno) );
return -1;
}
string_new( path );
while ( n-- )
{
OBJECT * name;
dirent = namelist[ n ];
f.f_base.ptr = dirent->d_name
#ifdef old_sinix
- 2 /* Broken structure definition on sinix. */
#endif
;
f.f_base.len = strlen( f.f_base.ptr );
string_truncate( path, 0 );
path_build( &f, path );
name = object_new( path->value );
/* Immediately stat the file to preserve invariants. */
if ( file_query( name ) )
files = list_push_back( files, name );
else
object_free( name );
free( dirent );
}
string_free( path );
free( namelist );
d->files = files;
return 0;
}
/*
* file_dirscan_() - OS specific file_dirscan() implementation
*/
void file_dirscan_( file_info_t * const d, scanback func, void * closure )
{
assert( d );
assert( d->is_dir );
/* Special case / : enter it */
if ( !strcmp( object_str( d->name ), "/" ) )
(*func)( closure, d->name, 1 /* stat()'ed */, &d->time );
}
/*
* file_mkdir() - create a directory
*/
int file_mkdir( char const * const path )
{
/* Explicit cast to remove const modifiers and avoid related compiler
* warnings displayed when using the intel compiler.
*/
return mkdir( (char *)path, 0777 );
}
/*
* file_query_() - query information about a path from the OS
*/
void file_query_( file_info_t * const info )
{
file_query_posix_( info );
}
int file_collect_archive_content_( file_archive_info_t * const archive );
/*
* file_archscan() - scan an archive for files
*/
void file_archscan( char const * arch, scanback func, void * closure )
{
OBJECT * path = object_new( arch );
file_archive_info_t * archive = file_archive_query( path );
object_free( path );
if ( filelist_empty( archive->members ) )
{
if ( file_collect_archive_content_( archive ) < 0 )
return;
}
/* Report the collected archive content. */
{
FILELISTITER iter = filelist_begin( archive->members );
FILELISTITER const end = filelist_end( archive->members );
char buf[ MAXJPATH ];
for ( ; iter != end ; iter = filelist_next( iter ) )
{
file_info_t * member_file = filelist_item( iter );
/* Construct member path: 'archive-path(member-name)'
*/
sprintf( buf, "%s(%s)",
object_str( archive->file->name ),
object_str( member_file->name ) );
{
OBJECT * const member = object_new( buf );
(*func)( closure, member, 1 /* time valid */, &member_file->time );
object_free( member );
}
}
}
}
/*
* file_archivescan_() - OS specific file_archivescan() implementation
*/
void file_archivescan_( file_archive_info_t * const archive, archive_scanback func,
void * closure )
{
}
/*
* file_collect_archive_content_() - collects information about archive members
*/
#ifndef AIAMAG /* God-fearing UNIX */
#define SARFMAG 2
#define SARHDR sizeof( struct ar_hdr )
int file_collect_archive_content_( file_archive_info_t * const archive )
{
#ifndef NO_AR
struct ar_hdr ar_hdr;
char * string_table = 0;
char buf[ MAXJPATH ];
long offset;
int fd;
const char * path = object_str( archive->file->name );
if ( ! filelist_empty( archive->members ) ) filelist_free( archive->members );
if ( ( fd = open( path, O_RDONLY, 0 ) ) < 0 )
return -1;
if ( read( fd, buf, SARMAG ) != SARMAG ||
strncmp( ARMAG, buf, SARMAG ) )
{
close( fd );
return -1;
}
offset = SARMAG;
if ( DEBUG_BINDSCAN )
out_printf( "scan archive %s\n", path );
while ( ( read( fd, &ar_hdr, SARHDR ) == SARHDR ) &&
!( memcmp( ar_hdr.ar_fmag, ARFMAG, SARFMAG )
#ifdef ARFZMAG
/* OSF also has a compressed format */
&& memcmp( ar_hdr.ar_fmag, ARFZMAG, SARFMAG )
#endif
) )
{
char lar_name_[ 257 ];
char * lar_name = lar_name_ + 1;
long lar_date;
long lar_size;
long lar_offset;
char * c;
char * src;
char * dest;
int32_t ar_hdr_name_size = sizeof( ar_hdr.ar_name ); // Workaround for sizeof strncpy warning.
strncpy( lar_name, ar_hdr.ar_name, ar_hdr_name_size );
sscanf( ar_hdr.ar_date, "%ld", &lar_date );
sscanf( ar_hdr.ar_size, "%ld", &lar_size );
if ( ar_hdr.ar_name[ 0 ] == '/' )
{
if ( ar_hdr.ar_name[ 1 ] == '/' )
{
/* This is the "string table" entry of the symbol table, holding
* filename strings longer than 15 characters, i.e. those that
* do not fit into ar_name.
*/
string_table = (char *)BJAM_MALLOC_ATOMIC( lar_size );
lseek( fd, offset + SARHDR, 0 );
if ( read( fd, string_table, lar_size ) != lar_size )
out_printf("error reading string table\n");
}
else if ( string_table && ar_hdr.ar_name[ 1 ] != ' ' )
{
/* Long filenames are recognized by "/nnnn" where nnnn is the
* offset of the string in the string table represented in ASCII
* decimals.
*/
dest = lar_name;
lar_offset = atoi( lar_name + 1 );
src = &string_table[ lar_offset ];
while ( *src != '/' )
*dest++ = *src++;
*dest = '/';
}
}
c = lar_name - 1;
while ( ( *++c != ' ' ) && ( *c != '/' ) );
*c = '\0';
if ( DEBUG_BINDSCAN )
out_printf( "archive name %s found\n", lar_name );
sprintf( buf, "%s", lar_name );
if ( strcmp( buf, "") != 0 )
{
file_info_t * member = 0;
archive->members = filelist_push_back( archive->members, object_new( buf ) );
member = filelist_back( archive->members );
member->is_file = 1;
member->is_dir = 0;
member->exists = 0;
timestamp_init( &member->time, (time_t)lar_date, 0 );
}
offset += SARHDR + ( ( lar_size + 1 ) & ~1 );
lseek( fd, offset, 0 );
}
if ( string_table )
BJAM_FREE( string_table );
close( fd );
#endif /* NO_AR */
return 0;
}
#else /* AIAMAG - RS6000 AIX */
static void collect_archive_content_small( int fd, file_archive_info_t * const archive )
{
struct fl_hdr fl_hdr;
struct {
struct ar_hdr hdr;
char pad[ 256 ];
} ar_hdr ;
char buf[ MAXJPATH ];
long offset;
const char * path = object_str( archive->file->name );
if ( read( fd, (char *)&fl_hdr, FL_HSZ ) != FL_HSZ )
return;
sscanf( fl_hdr.fl_fstmoff, "%ld", &offset );
if ( DEBUG_BINDSCAN )
out_printf( "scan archive %s\n", path );
while ( offset > 0 && lseek( fd, offset, 0 ) >= 0 &&
read( fd, &ar_hdr, sizeof( ar_hdr ) ) >= (int)sizeof( ar_hdr.hdr ) )
{
long lar_date;
int lar_namlen;
sscanf( ar_hdr.hdr.ar_namlen, "%d" , &lar_namlen );
sscanf( ar_hdr.hdr.ar_date , "%ld", &lar_date );
sscanf( ar_hdr.hdr.ar_nxtmem, "%ld", &offset );
if ( !lar_namlen )
continue;
ar_hdr.hdr._ar_name.ar_name[ lar_namlen ] = '\0';
sprintf( buf, "%s", ar_hdr.hdr._ar_name.ar_name );
if ( strcmp( buf, "") != 0 )
{
file_info_t * member = 0;
archive->members = filelist_push_back( archive->members, object_new( buf ) );
member = filelist_back( archive->members );
member->is_file = 1;
member->is_dir = 0;
member->exists = 0;
timestamp_init( &member->time, (time_t)lar_date, 0 );
}
}
}
/* Check for OS versions supporting the big variant. */
#ifdef AR_HSZ_BIG
static void collect_archive_content_big( int fd, file_archive_info_t * const archive )
{
struct fl_hdr_big fl_hdr;
struct {
struct ar_hdr_big hdr;
char pad[ 256 ];
} ar_hdr ;
char buf[ MAXJPATH ];
long long offset;
const char * path = object_str( archive->file->name );
if ( read( fd, (char *)&fl_hdr, FL_HSZ_BIG ) != FL_HSZ_BIG )
return;
sscanf( fl_hdr.fl_fstmoff, "%lld", &offset );
if ( DEBUG_BINDSCAN )
out_printf( "scan archive %s\n", path );
while ( offset > 0 && lseek( fd, offset, 0 ) >= 0 &&
read( fd, &ar_hdr, sizeof( ar_hdr ) ) >= sizeof( ar_hdr.hdr ) )
{
long lar_date;
int lar_namlen;
sscanf( ar_hdr.hdr.ar_namlen, "%d" , &lar_namlen );
sscanf( ar_hdr.hdr.ar_date , "%ld" , &lar_date );
sscanf( ar_hdr.hdr.ar_nxtmem, "%lld", &offset );
if ( !lar_namlen )
continue;
ar_hdr.hdr._ar_name.ar_name[ lar_namlen ] = '\0';
sprintf( buf, "%s", ar_hdr.hdr._ar_name.ar_name );
if ( strcmp( buf, "") != 0 )
{
file_info_t * member = 0;
archive->members = filelist_push_back( archive->members, object_new( buf ) );
member = filelist_back( archive->members );
member->is_file = 1;
member->is_dir = 0;
member->exists = 0;
timestamp_init( &member->time, (time_t)lar_date, 0 );
}
}
}
#endif /* AR_HSZ_BIG */
int file_collect_archive_content_( file_archive_info_t * const archive )
{
int fd;
char fl_magic[ SAIAMAG ];
const char * path = object_str( archive->file->name );
if ( ! filelist_empty( archive->members ) ) filelist_free( archive->members );
if ( ( fd = open( path, O_RDONLY, 0 ) ) < 0 )
return -1;
if ( read( fd, fl_magic, SAIAMAG ) != SAIAMAG ||
lseek( fd, 0, SEEK_SET ) == -1 )
{
close( fd );
return -1;
}
if ( !strncmp( AIAMAG, fl_magic, SAIAMAG ) )
{
/* read small variant */
collect_archive_content_small( fd, archive );
}
#ifdef AR_HSZ_BIG
else if ( !strncmp( AIAMAGBIG, fl_magic, SAIAMAG ) )
{
/* read big variant */
collect_archive_content_big( fd, archive );
}
#endif
close( fd );
return 0;
}
#endif /* AIAMAG - RS6000 AIX */
#endif /* USE_FILEUNIX */

View File

@@ -0,0 +1,440 @@
/*
* Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
*
* This file is part of Jam - see jam.c for Copyright information.
*/
/* This file is ALSO:
* Copyright 2001-2004 David Abrahams.
* Copyright 2005 Rene Rivera.
* Copyright 2015 Artur Shepilko.
* Distributed under the Boost Software License, Version 1.0.
* (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt)
*/
#include "jam.h"
#include "filesys.h"
#include "object.h"
#include "pathsys.h"
#include "jam_strings.h"
#ifdef OS_VMS
/*
* filevms.c - manipulate file names and scan directories on VMS.
*
* This implementation is based on POSIX-style path manipulation.
*
* VMS CTRL directly supports both POSIX- and native VMS-style path expressions,
* with the POSIX-to-VMS path translation performed internally by the same
* set of functions. For the most part such processing is transparent, with
* few differences mainly related to file versions (in POSIX mode only the recent
* version is visible).
*
* This should allow us to re-use fileunix.c implementation,
* excluding archive/library member processing.
*
* Thus in jam-files the path references can also remain POSIX/UNIX-style on all
* levels EXCEPT in actions scope, where these must be translated to the native
* VMS-style. This approach is somewhat similar to jam CYGWIN handling.
*
*
* External routines:
* file_archscan() - scan an archive for files
* file_mkdir() - create a directory
* file_supported_fmt_resolution() - file modification timestamp resolution
*
* External routines called only via routines in filesys.c:
* file_collect_dir_content_() - collects directory content information
* file_dirscan_() - OS specific file_dirscan() implementation
* file_query_() - query information about a path from the OS
* file_collect_archive_content_() - collects information about archive members
* file_archivescan_() - OS specific file_archivescan() implementation
*/
#include <assert.h>
#include <stdio.h>
#include <sys/stat.h> /* needed for mkdir() */
#include <unistd.h> /* needed for read and close prototype */
#include <dirent.h>
#define STRUCT_DIRENT struct dirent
void path_translate_to_os_( char const * f, string * file );
/*
* file_collect_dir_content_() - collects directory content information
*/
int file_collect_dir_content_( file_info_t * const d )
{
LIST * files = L0;
PATHNAME f;
DIR * dd;
STRUCT_DIRENT * dirent;
string path[ 1 ];
char const * dirstr;
assert( d );
assert( d->is_dir );
assert( list_empty( d->files ) );
dirstr = object_str( d->name );
memset( (char *)&f, '\0', sizeof( f ) );
f.f_dir.ptr = dirstr;
f.f_dir.len = strlen( dirstr );
if ( !*dirstr ) dirstr = ".";
if ( !( dd = opendir( dirstr ) ) )
return -1;
string_new( path );
while ( ( dirent = readdir( dd ) ) )
{
OBJECT * name;
f.f_base.ptr = dirent->d_name
#ifdef old_sinix
- 2 /* Broken structure definition on sinix. */
#endif
;
f.f_base.len = strlen( f.f_base.ptr );
string_truncate( path, 0 );
path_build( &f, path );
name = object_new( path->value );
/* Immediately stat the file to preserve invariants. */
if ( file_query( name ) )
files = list_push_back( files, name );
else
object_free( name );
}
string_free( path );
closedir( dd );
d->files = files;
return 0;
}
/*
* file_dirscan_() - OS specific file_dirscan() implementation
*/
void file_dirscan_( file_info_t * const d, scanback func, void * closure )
{
assert( d );
assert( d->is_dir );
/* Special case / : enter it */
if ( !strcmp( object_str( d->name ), "/" ) )
(*func)( closure, d->name, 1 /* stat()'ed */, &d->time );
}
/*
* file_mkdir() - create a directory
*/
int file_mkdir( char const * const path )
{
/* Explicit cast to remove const modifiers and avoid related compiler
* warnings displayed when using the intel compiler.
*/
return mkdir( (char *)path, 0777 );
}
/*
* file_query_() - query information about a path from the OS
*/
void file_query_( file_info_t * const info )
{
file_query_posix_( info );
}
/*------------------------------------------------------------------------------
* VMS-specific processing:
*
*/
#include <descrip.h>
#include <lbrdef.h>
#include <credef.h>
#include <mhddef.h>
#include <lhidef.h>
#include <lib$routines.h>
#include <starlet.h>
/* Supply missing prototypes for lbr$-routines*/
#ifdef __cplusplus
extern "C" {
#endif /* __cplusplus */
int lbr$set_module(
void **,
unsigned long *,
struct dsc$descriptor_s *,
unsigned short *,
void * );
int lbr$open( void **,
struct dsc$descriptor_s *,
void *,
void *,
void *,
void *,
void * );
int lbr$ini_control(
void **,
unsigned long *,
unsigned long *,
void * );
int lbr$get_index(
void **,
unsigned long * const,
int (*func)( struct dsc$descriptor_s *, unsigned long *),
void * );
int lbr$search(
void **,
unsigned long * const,
unsigned short *,
int (*func)( struct dsc$descriptor_s *, unsigned long *),
unsigned long *);
int lbr$close(
void ** );
#ifdef __cplusplus
}
#endif /* __cplusplus */
static void
file_cvttime(
unsigned int *curtime,
time_t *unixtime )
{
static const int32_t divisor = 10000000;
static unsigned int bastim[2] = { 0x4BEB4000, 0x007C9567 }; /* 1/1/1970 */
int delta[2], remainder;
lib$subx( curtime, bastim, delta );
lib$ediv( &divisor, delta, unixtime, &remainder );
}
static void downcase_inplace( char * p )
{
for ( ; *p; ++p )
*p = tolower( *p );
}
static file_archive_info_t * m_archive = NULL;
static file_info_t * m_member_found = NULL;
static void * m_lbr_context = NULL;
static unsigned short * m_rfa_found = NULL;
static const unsigned long LBR_MODINDEX_NUM = 1,
LBR_SYMINDEX_NUM = 2; /* GST:global symbol table */
static unsigned int set_archive_symbol( struct dsc$descriptor_s *symbol,
unsigned long *rfa )
{
file_info_t * member = m_member_found;
char buf[ MAXJPATH ] = { 0 };
strncpy(buf, symbol->dsc$a_pointer, symbol->dsc$w_length);
buf[ symbol->dsc$w_length ] = 0;
member->files = list_push_back( member->files, object_new( buf ) );
return ( 1 ); /* continue */
}
static unsigned int set_archive_member( struct dsc$descriptor_s *module,
unsigned long *rfa )
{
file_archive_info_t * archive = m_archive;
static struct dsc$descriptor_s bufdsc =
{0, DSC$K_DTYPE_T, DSC$K_CLASS_S, NULL};
struct mhddef *mhd;
char filename[128] = { 0 };
char buf[ MAXJPATH ] = { 0 };
int status;
time_t library_date;
register int i;
register char *p;
bufdsc.dsc$a_pointer = filename;
bufdsc.dsc$w_length = sizeof( filename );
status = lbr$set_module( &m_lbr_context, rfa, &bufdsc,
&bufdsc.dsc$w_length, NULL );
if ( !(status & 1) )
return ( 1 ); /* continue */
mhd = (struct mhddef *)filename;
file_cvttime( &mhd->mhd$l_datim, &library_date );
/* strncpy( filename, module->dsc$a_pointer, module->dsc$w_length );
*/
for ( i = 0, p = module->dsc$a_pointer; i < module->dsc$w_length; ++i, ++p )
filename[ i ] = *p;
filename[ i ] = '\0';
if ( strcmp( filename, "" ) != 0 )
{
file_info_t * member = 0;
/* Construct member's filename as lowercase "module.obj" */
sprintf( buf, "%s.obj", filename );
downcase_inplace( buf );
archive->members = filelist_push_back( archive->members, object_new( buf ) );
member = filelist_back( archive->members );
member->is_file = 1;
member->is_dir = 0;
member->exists = 0;
timestamp_init( &member->time, (time_t)library_date, 0 );
m_member_found = member;
m_rfa_found = rfa;
status = lbr$search(&m_lbr_context, &LBR_SYMINDEX_NUM, m_rfa_found, set_archive_symbol, NULL);
}
return ( 1 ); /* continue */
}
void file_archscan( char const * arch, scanback func, void * closure )
{
OBJECT * path = object_new( arch );
file_archive_info_t * archive = file_archive_query( path );
object_free( path );
if ( filelist_empty( archive->members ) )
{
if ( DEBUG_BINDSCAN )
out_printf( "scan archive %s\n", object_str( archive->file->name ) );
if ( file_collect_archive_content_( archive ) < 0 )
return;
}
/* Report the collected archive content. */
{
FILELISTITER iter = filelist_begin( archive->members );
FILELISTITER const end = filelist_end( archive->members );
char buf[ MAXJPATH ];
for ( ; iter != end ; iter = filelist_next( iter ) )
{
file_info_t * member_file = filelist_item( iter );
LIST * symbols = member_file->files;
/* Construct member path: 'archive-path(member-name)'
*/
sprintf( buf, "%s(%s)",
object_str( archive->file->name ),
object_str( member_file->name ) );
{
OBJECT * const member = object_new( buf );
(*func)( closure, member, 1 /* time valid */, &member_file->time );
object_free( member );
}
}
}
}
/*
* file_archivescan_() - OS specific file_archivescan() implementation
*/
void file_archivescan_( file_archive_info_t * const archive, archive_scanback func,
void * closure )
{
}
/*
* file_collect_archive_content_() - collects information about archive members
*/
int file_collect_archive_content_( file_archive_info_t * const archive )
{
unsigned short rfa[3];
static struct dsc$descriptor_s library =
{0, DSC$K_DTYPE_T, DSC$K_CLASS_S, NULL};
unsigned long lfunc = LBR$C_READ;
unsigned long typ = LBR$C_TYP_UNK;
register int status;
string buf[ 1 ];
char vmspath[ MAXJPATH ] = { 0 };
m_archive = archive;
if ( ! filelist_empty( archive->members ) ) filelist_free( archive->members );
/* Translate path to VMS
*/
string_new( buf );
path_translate_to_os_( object_str( archive->file->name ), buf );
strcpy( vmspath, buf->value );
string_free( buf );
status = lbr$ini_control( &m_lbr_context, &lfunc, &typ, NULL );
if ( !( status & 1 ) )
return -1;
library.dsc$a_pointer = vmspath;
library.dsc$w_length = strlen( vmspath );
status = lbr$open( &m_lbr_context, &library, NULL, NULL, NULL, NULL, NULL );
if ( !( status & 1 ) )
return -1;
/* Scan main index for modules.
* For each module search symbol-index to collect module's symbols.
*/
status = lbr$get_index( &m_lbr_context, &LBR_MODINDEX_NUM, set_archive_member, NULL );
if ( !( status & 1 ) )
return -1;
(void) lbr$close( &m_lbr_context );
return 0;
}
#endif /* OS_VMS */

View File

@@ -0,0 +1,29 @@
/*
* Copyright 2001-2004 David Abrahams.
* Distributed under the Boost Software License, Version 1.0.
* (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt)
*/
#include "jam.h"
#include "frames.h"
FRAME * frame_before_python_call;
void frame_init( FRAME * frame )
{
frame->prev = 0;
frame->prev_user = 0;
lol_init( frame->args );
frame->module = root_module();
frame->rulename = "module scope";
frame->file = 0;
frame->line = -1;
}
void frame_free( FRAME * frame )
{
lol_free( frame->args );
}

View File

@@ -0,0 +1,46 @@
/*
* Copyright 2001-2004 David Abrahams.
* Distributed under the Boost Software License, Version 1.0.
* (See accompanying file LICENSE.txt or copy at
* https://www.bfgroup.xyz/b2/LICENSE.txt)
*/
#ifndef FRAMES_DWA20011021_H
#define FRAMES_DWA20011021_H
#include "config.h"
#include "lists.h"
#include "modules.h"
#include "object.h"
typedef struct frame FRAME;
struct frame
{
FRAME * prev;
FRAME * prev_user; /* The nearest enclosing frame for which
module->user_module is true. */
LOL args[ 1 ];
module_t * module;
OBJECT * file;
int line;
char const * rulename;
#ifdef JAM_DEBUGGER
void * function;
#endif
};
/* When a call into Python is in progress, this variable points to the bjam
* frame that was current at the moment of the call. When the call completes,
* the variable is not defined. Furthermore, if Jam calls Python which calls Jam
* and so on, this variable only keeps the most recent Jam frame.
*/
extern FRAME * frame_before_python_call;
void frame_init( FRAME * );
void frame_free( FRAME * );
#endif

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,49 @@
/*
* Copyright 2011 Steven Watanabe
* Distributed under the Boost Software License, Version 1.0.
* (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt)
*/
#ifndef FUNCTION_SW20111123_H
#define FUNCTION_SW20111123_H
#include "config.h"
#include "object.h"
#include "frames.h"
#include "lists.h"
#include "parse.h"
#include "jam_strings.h"
typedef struct _function FUNCTION;
typedef struct _stack STACK;
STACK * stack_global( void );
void stack_push( STACK * s, LIST * l );
LIST * stack_pop( STACK * s );
FUNCTION * function_compile( PARSE * parse );
FUNCTION * function_builtin( LIST * ( * func )( FRAME * frame, int32_t flags ), int32_t flags, const char * * args );
void function_refer( FUNCTION * );
void function_free( FUNCTION * );
OBJECT * function_rulename( FUNCTION * );
void function_set_rulename( FUNCTION *, OBJECT * );
void function_location( FUNCTION *, OBJECT * *, int32_t * );
LIST * function_run( FUNCTION * function, FRAME * frame, STACK * s );
FUNCTION * function_compile_actions( const char * actions, OBJECT * file, int32_t line );
void function_run_actions( FUNCTION * function, FRAME * frame, STACK * s, string * out );
FUNCTION * function_bind_variables( FUNCTION * f, module_t * module, int32_t * counter );
FUNCTION * function_unbind_variables( FUNCTION * f );
LIST * function_get_variables( FUNCTION * f );
void function_done( void );
#ifdef HAVE_PYTHON
FUNCTION * function_python( PyObject * function, PyObject * bjam_signature );
#endif
#endif

View File

@@ -0,0 +1,152 @@
/*
* Copyright 1994 Christopher Seiwald. All rights reserved.
*
* This file is part of Jam - see jam.c for Copyright information.
*/
/*
* glob.c - match a string against a simple pattern
*
* Understands the following patterns:
*
* * any number of characters
* ? any single character
* [a-z] any single character in the range a-z
* [^a-z] any single character not in the range a-z
* \x match x
*
* External functions:
*
* glob() - match a string against a simple pattern
*
* Internal functions:
*
* globchars() - build a bitlist to check for character group match
*/
# include "jam.h"
# define CHECK_BIT( tab, bit ) ( tab[ (bit)/8 ] & (1<<( (bit)%8 )) )
# define BITLISTSIZE 16 /* bytes used for [chars] in compiled expr */
static void globchars( const char * s, const char * e, char * b );
/*
* glob() - match a string against a simple pattern.
*/
int glob( const char * c, const char * s )
{
char bitlist[ BITLISTSIZE ];
const char * here;
for ( ; ; )
switch ( *c++ )
{
case '\0':
return *s ? -1 : 0;
case '?':
if ( !*s++ )
return 1;
break;
case '[':
/* Scan for matching ]. */
here = c;
do if ( !*c++ ) return 1;
while ( ( here == c ) || ( *c != ']' ) );
++c;
/* Build character class bitlist. */
globchars( here, c, bitlist );
if ( !CHECK_BIT( bitlist, *(const unsigned char *)s ) )
return 1;
++s;
break;
case '*':
here = s;
while ( *s )
++s;
/* Try to match the rest of the pattern in a recursive */
/* call. If the match fails we'll back up chars, retrying. */
while ( s != here )
{
int r;
/* A fast path for the last token in a pattern. */
r = *c ? glob( c, s ) : *s ? -1 : 0;
if ( !r )
return 0;
if ( r < 0 )
return 1;
--s;
}
break;
case '\\':
/* Force literal match of next char. */
if ( !*c || ( *s++ != *c++ ) )
return 1;
break;
default:
if ( *s++ != c[ -1 ] )
return 1;
break;
}
}
/*
* globchars() - build a bitlist to check for character group match.
*/
static void globchars( const char * s, const char * e, char * b )
{
int neg = 0;
memset( b, '\0', BITLISTSIZE );
if ( *s == '^' )
{
++neg;
++s;
}
while ( s < e )
{
int c;
if ( ( s + 2 < e ) && ( s[1] == '-' ) )
{
for ( c = s[0]; c <= s[2]; ++c )
b[ c/8 ] |= ( 1 << ( c % 8 ) );
s += 3;
}
else
{
c = *s++;
b[ c/8 ] |= ( 1 << ( c % 8 ) );
}
}
if ( neg )
{
int i;
for ( i = 0; i < BITLISTSIZE; ++i )
b[ i ] ^= 0377;
}
/* Do not include \0 in either $[chars] or $[^chars]. */
b[0] &= 0376;
}

View File

@@ -0,0 +1,107 @@
@ECHO OFF
REM ~ Copyright 2002-2018 Rene Rivera.
REM ~ Distributed under the Boost Software License, Version 1.0.
REM ~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
if "_%1_" == "_yacc_" goto Guess_Yacc
if "_%1_" == "_test_path_" (
shift
goto Test_Path)
goto Guess
:Clear_Error
ver >nul
goto :eof
:Test_Path
REM Tests for the given executable file presence in the directories in the PATH
REM environment variable. Additionally sets FOUND_PATH to the path of the
REM found file.
call :Clear_Error
setlocal
set test=%~$PATH:1
endlocal
if not errorlevel 1 set FOUND_PATH=%~dp$PATH:1
goto :eof
:Guess
REM Let vswhere tell us where msvc is at, if available.
call :Clear_Error
call vswhere_usability_wrapper.cmd
call :Clear_Error
if NOT "_%VS170COMNTOOLS%_" == "__" (
set "B2_TOOLSET=vc143"
set "B2_TOOLSET_ROOT=%VS170COMNTOOLS%..\..\VC\"
goto :eof)
if NOT "_%VS160COMNTOOLS%_" == "__" (
set "B2_TOOLSET=vc142"
set "B2_TOOLSET_ROOT=%VS160COMNTOOLS%..\..\VC\"
goto :eof)
if NOT "_%VS150COMNTOOLS%_" == "__" (
set "B2_TOOLSET=vc141"
set "B2_TOOLSET_ROOT=%VS150COMNTOOLS%..\..\VC\"
goto :eof)
REM VSUNKCOMNTOOLS represents unknown but detected version from vswhere
if NOT "_%VSUNKCOMNTOOLS%_" == "__" (
set "B2_TOOLSET=vcunk"
set "B2_TOOLSET_ROOT=%VSUNKCOMNTOOLS%..\..\VC\"
goto :eof)
if EXIST "%VS_ProgramFiles%\Microsoft Visual Studio\2017\Enterprise\VC\Auxiliary\Build\vcvarsall.bat" (
set "B2_TOOLSET=vc141"
set "B2_TOOLSET_ROOT=%VS_ProgramFiles%\Microsoft Visual Studio\2017\Enterprise\VC\"
exit /b 0)
if EXIST "%VS_ProgramFiles%\Microsoft Visual Studio\2017\Professional\VC\Auxiliary\Build\vcvarsall.bat" (
set "B2_TOOLSET=vc141"
set "B2_TOOLSET_ROOT=%VS_ProgramFiles%\Microsoft Visual Studio\2017\Professional\VC\"
exit /b 0)
if EXIST "%VS_ProgramFiles%\Microsoft Visual Studio\2017\Community\VC\Auxiliary\Build\vcvarsall.bat" (
set "B2_TOOLSET=vc141"
set "B2_TOOLSET_ROOT=%VS_ProgramFiles%\Microsoft Visual Studio\2017\Community\VC\"
exit /b 0)
if NOT "_%VS140COMNTOOLS%_" == "__" (
set "B2_TOOLSET=vc14"
set "B2_TOOLSET_ROOT=%VS140COMNTOOLS%..\..\VC\"
exit /b 0)
if EXIST "%VS_ProgramFiles%\Microsoft Visual Studio 14.0\VC\VCVARSALL.BAT" (
set "B2_TOOLSET=vc14"
set "B2_TOOLSET_ROOT=%VS_ProgramFiles%\Microsoft Visual Studio 14.0\VC\"
exit /b 0)
if NOT "_%VS120COMNTOOLS%_" == "__" (
set "B2_TOOLSET=vc12"
set "B2_TOOLSET_ROOT=%VS120COMNTOOLS%..\..\VC\"
exit /b 0)
if EXIST "%VS_ProgramFiles%\Microsoft Visual Studio 12.0\VC\VCVARSALL.BAT" (
set "B2_TOOLSET=vc12"
set "B2_TOOLSET_ROOT=%VS_ProgramFiles%\Microsoft Visual Studio 12.0\VC\"
exit /b 0)
call :Test_Path cl.exe
if not errorlevel 1 (
set "B2_TOOLSET=msvc"
set "B2_TOOLSET_ROOT=%FOUND_PATH%..\"
exit /b 0)
call :Test_Path vcvars32.bat
if not errorlevel 1 (
set "B2_TOOLSET=msvc"
call "%FOUND_PATH%VCVARS32.BAT"
set "B2_TOOLSET_ROOT=%MSVCDir%\"
exit /b 0)
call :Test_Path bcc32c.exe
if not errorlevel 1 (
set "B2_TOOLSET=borland"
set "B2_TOOLSET_ROOT=%FOUND_PATH%..\"
exit /b 0)
call :Test_Path icl.exe
if not errorlevel 1 (
set "B2_TOOLSET=intel-win32"
set "B2_TOOLSET_ROOT=%FOUND_PATH%..\"
exit /b 0)
if EXIST "C:\MinGW\bin\gcc.exe" (
set "B2_TOOLSET=mingw"
set "B2_TOOLSET_ROOT=C:\MinGW\"
exit /b 0)
REM Could not find a suitable toolset
exit /b 1

View File

@@ -0,0 +1,388 @@
/*
* Copyright 1993, 1995 Christopher Seiwald.
*
* This file is part of Jam - see jam.c for Copyright information.
*/
/*
* hash.c - simple in-memory hashing routines
*
* External routines:
* hashinit() - initialize a hash table, returning a handle
* hashitem() - find a record in the table, and optionally enter a new one
* hashdone() - free a hash table, given its handle
*
* Internal routines:
* hashrehash() - resize and rebuild hp->tab, the hash table
*/
#include "jam.h"
#include "hash.h"
#include "compile.h"
#include "output.h"
#include <assert.h>
/*
#define HASH_DEBUG_PROFILE 1
*/
/* Header attached to all hash table data items. */
typedef struct item ITEM;
struct item
{
ITEM * next;
};
#define MAX_LISTS 32
struct hash
{
/*
* the hash table, just an array of item pointers
*/
struct
{
int32_t nel;
ITEM * * base;
} tab;
int32_t bloat; /* tab.nel / items.nel */
int32_t inel; /* initial number of elements */
/*
* the array of records, maintained by these routines - essentially a
* microallocator
*/
struct
{
int32_t more; /* how many more ITEMs fit in lists[ list ] */
ITEM * free; /* free list of items */
char * next; /* where to put more ITEMs in lists[ list ] */
int32_t size; /* sizeof( ITEM ) + aligned datalen */
int32_t nel; /* total ITEMs held by all lists[] */
int32_t list; /* index into lists[] */
struct
{
int32_t nel; /* total ITEMs held by this list */
char * base; /* base of ITEMs array */
} lists[ MAX_LISTS ];
} items;
char const * name; /* just for hashstats() */
};
static void hashrehash( struct hash * );
static void hashstat( struct hash * );
static uint32_t hash_keyval( OBJECT * key )
{
return object_hash( key );
}
#define hash_bucket(hp, keyval) ((hp)->tab.base + ((keyval) % (hp)->tab.nel))
#define hash_data_key(data) (*(OBJECT * *)(data))
#define hash_item_data(item) ((HASHDATA *)((char *)item + sizeof(ITEM)))
#define hash_item_key(item) (hash_data_key(hash_item_data(item)))
#define ALIGNED(x) ((x + sizeof(ITEM) - 1) & ~(sizeof(ITEM) - 1))
/*
* hashinit() - initialize a hash table, returning a handle
*/
struct hash * hashinit( int32_t datalen, char const * name )
{
struct hash * hp = (struct hash *)BJAM_MALLOC( sizeof( *hp ) );
hp->bloat = 3;
hp->tab.nel = 0;
hp->tab.base = 0;
hp->items.more = 0;
hp->items.free = 0;
hp->items.size = sizeof( ITEM ) + ALIGNED( datalen );
hp->items.list = -1;
hp->items.nel = 0;
hp->inel = 11; /* 47 */
hp->name = name;
return hp;
}
/*
* hash_search() - Find the hash item for the given data.
*
* Returns a pointer to a hashed item with the given key. If given a 'previous'
* pointer, makes it point to the item prior to the found item in the same
* bucket or to 0 if our item is the first item in its bucket.
*/
static ITEM * hash_search( struct hash * hp, uint32_t keyval,
OBJECT * keydata, ITEM * * previous )
{
ITEM * i = *hash_bucket( hp, keyval );
ITEM * p = 0;
for ( ; i; i = i->next )
{
if ( object_equal( hash_item_key( i ), keydata ) )
{
if ( previous )
*previous = p;
return i;
}
p = i;
}
return 0;
}
/*
* hash_insert() - insert a record in the table or return the existing one
*/
HASHDATA * hash_insert( struct hash * hp, OBJECT * key, int32_t * found )
{
ITEM * i;
uint32_t keyval = hash_keyval( key );
#ifdef HASH_DEBUG_PROFILE
profile_frame prof[ 1 ];
if ( DEBUG_PROFILE )
profile_enter( 0, prof );
#endif
if ( !hp->items.more )
hashrehash( hp );
i = hash_search( hp, keyval, key, 0 );
if ( i )
*found = 1;
else
{
ITEM * * base = hash_bucket( hp, keyval );
/* Try to grab one from the free list. */
if ( hp->items.free )
{
i = hp->items.free;
hp->items.free = i->next;
assert( !hash_item_key( i ) );
}
else
{
i = (ITEM *)hp->items.next;
hp->items.next += hp->items.size;
}
--hp->items.more;
i->next = *base;
*base = i;
*found = 0;
}
#ifdef HASH_DEBUG_PROFILE
if ( DEBUG_PROFILE )
profile_exit( prof );
#endif
return hash_item_data( i );
}
/*
* hash_find() - find a record in the table or NULL if none exists
*/
HASHDATA * hash_find( struct hash * hp, OBJECT * key )
{
ITEM * i;
uint32_t keyval = hash_keyval( key );
#ifdef HASH_DEBUG_PROFILE
profile_frame prof[ 1 ];
if ( DEBUG_PROFILE )
profile_enter( 0, prof );
#endif
if ( !hp->items.nel )
{
#ifdef HASH_DEBUG_PROFILE
if ( DEBUG_PROFILE )
profile_exit( prof );
#endif
return 0;
}
i = hash_search( hp, keyval, key, 0 );
#ifdef HASH_DEBUG_PROFILE
if ( DEBUG_PROFILE )
profile_exit( prof );
#endif
return i ? hash_item_data( i ) : 0;
}
/*
* hashrehash() - resize and rebuild hp->tab, the hash table
*/
static void hashrehash( struct hash * hp )
{
int32_t i = ++hp->items.list;
hp->items.more = i ? 2 * hp->items.nel : hp->inel;
hp->items.next = (char *)BJAM_MALLOC( hp->items.more * hp->items.size );
hp->items.free = 0;
hp->items.lists[ i ].nel = hp->items.more;
hp->items.lists[ i ].base = hp->items.next;
hp->items.nel += hp->items.more;
if ( hp->tab.base )
BJAM_FREE( (char *)hp->tab.base );
hp->tab.nel = hp->items.nel * hp->bloat;
hp->tab.base = (ITEM * *)BJAM_MALLOC( hp->tab.nel * sizeof( ITEM * ) );
memset( (char *)hp->tab.base, '\0', hp->tab.nel * sizeof( ITEM * ) );
for ( i = 0; i < hp->items.list; ++i )
{
int32_t nel = hp->items.lists[ i ].nel;
char * next = hp->items.lists[ i ].base;
for ( ; nel--; next += hp->items.size )
{
ITEM * i = (ITEM *)next;
ITEM * * ip = hp->tab.base + object_hash( hash_item_key( i ) ) %
hp->tab.nel;
/* code currently assumes rehashing only when there are no free
* items
*/
assert( hash_item_key( i ) );
i->next = *ip;
*ip = i;
}
}
}
void hashenumerate( struct hash * hp, void (* f)( void *, void * ), void * data
)
{
int32_t i;
for ( i = 0; i <= hp->items.list; ++i )
{
char * next = hp->items.lists[ i ].base;
int32_t nel = hp->items.lists[ i ].nel;
if ( i == hp->items.list )
nel -= hp->items.more;
for ( ; nel--; next += hp->items.size )
{
ITEM * const i = (ITEM *)next;
if ( hash_item_key( i ) != 0 ) /* Do not enumerate freed items. */
f( hash_item_data( i ), data );
}
}
}
/*
* hash_free() - free a hash table, given its handle
*/
void hash_free( struct hash * hp )
{
int32_t i;
if ( !hp )
return;
if ( hp->tab.base )
BJAM_FREE( (char *)hp->tab.base );
for ( i = 0; i <= hp->items.list; ++i )
BJAM_FREE( hp->items.lists[ i ].base );
BJAM_FREE( (char *)hp );
}
static void hashstat( struct hash * hp )
{
struct hashstats stats[ 1 ];
hashstats_init( stats );
hashstats_add( stats, hp );
hashstats_print( stats, hp->name );
}
void hashstats_init( struct hashstats * stats )
{
stats->count = 0;
stats->num_items = 0;
stats->tab_size = 0;
stats->item_size = 0;
stats->sets = 0;
stats->num_hashes = 0;
}
void hashstats_add( struct hashstats * stats, struct hash * hp )
{
if ( hp )
{
ITEM * * tab = hp->tab.base;
int nel = hp->tab.nel;
int count = 0;
int sets = 0;
int i;
for ( i = 0; i < nel; ++i )
{
ITEM * item;
int here = 0;
for ( item = tab[ i ]; item; item = item->next )
++here;
count += here;
if ( here > 0 )
++sets;
}
stats->count += count;
stats->sets += sets;
stats->num_items += hp->items.nel;
stats->tab_size += hp->tab.nel;
stats->item_size = hp->items.size;
++stats->num_hashes;
}
}
void hashstats_print( struct hashstats * stats, char const * name )
{
out_printf( "%s table: %d+%d+%d (%dK+%luK+%luK) items+table+hash, %f density\n",
name,
stats->count,
stats->num_items,
stats->tab_size,
stats->num_items * stats->item_size / 1024,
(long unsigned)stats->tab_size * sizeof( ITEM * * ) / 1024,
(long unsigned)stats->num_hashes * sizeof( struct hash ) / 1024,
(float)stats->count / (float)stats->sets );
}
void hashdone( struct hash * hp )
{
if ( !hp )
return;
if ( DEBUG_MEM || DEBUG_PROFILE )
hashstat( hp );
hash_free( hp );
}

View File

@@ -0,0 +1,80 @@
/*
* Copyright 1993, 1995 Christopher Seiwald.
*
* This file is part of Jam - see jam.c for Copyright information.
*/
/*
* hash.h - simple in-memory hashing routines
*/
#ifndef BOOST_JAM_HASH_H
#define BOOST_JAM_HASH_H
#include "config.h"
#include "object.h"
/*
* An opaque struct representing an item in the hash table. The first element of
* every struct stored in the table must be an OBJECT * which is treated as the
* key.
*/
typedef struct hashdata HASHDATA;
/*
* hashinit() - initialize a hash table, returning a handle.
*
* Parameters:
* datalen - item size
* name - used for debugging
*/
struct hash * hashinit( int32_t datalen, char const * name );
/*
* hash_free() - free a hash table, given its handle
*/
void hash_free( struct hash * );
void hashdone( struct hash * );
/*
* hashenumerate() - call f(i, data) on each item, i in the hash table. The
* enumeration order is unspecified.
*/
void hashenumerate( struct hash *, void (* f)( void *, void * ), void * data );
/*
* hash_insert() - insert a new item in a hash table, or return an existing one.
*
* Preconditions:
* - hp must be a hash table created by hashinit()
* - key must be an object created by object_new()
*
* Postconditions:
* - if the key does not already exist in the hash table, *found == 0 and the
* result will be a pointer to an uninitialized item. The key of the new
* item must be set to a value equal to key before any further operations on
* the hash table except hashdone().
* - if the key is present then *found == 1 and the result is a pointer to the
* existing record.
*/
HASHDATA * hash_insert( struct hash *, OBJECT * key, int32_t * found );
/*
* hash_find() - find a record in the table or NULL if none exists
*/
HASHDATA * hash_find( struct hash *, OBJECT * key );
struct hashstats {
int count;
int num_items;
int tab_size;
int item_size;
int sets;
int num_hashes;
};
void hashstats_init( struct hashstats * stats );
void hashstats_add( struct hashstats * stats, struct hash * );
void hashstats_print( struct hashstats * stats, char const * name );
#endif

View File

@@ -0,0 +1,534 @@
/*
* This file has been donated to Jam.
*/
/*
* Craig W. McPheeters, Alias|Wavefront.
*
* hcache.c hcache.h - handle caching of #includes in source files.
*
* Create a cache of files scanned for headers. When starting jam, look for the
* cache file and load it if present. When finished the binding phase, create a
* new header cache. The cache contains files, their timestamps and the header
* files found in their scan. During the binding phase of jam, look in the
* header cache first for the headers contained in a file. If the cache is
* present and valid, use its contents. This results in dramatic speedups with
* large projects (e.g. 3min -> 1min startup for one project.)
*
* External routines:
* hcache_init() - read and parse the local .jamdeps file.
* hcache_done() - write a new .jamdeps file.
* hcache() - return list of headers on target. Use cache or do a scan.
*
* The dependency file format is an ASCII file with 1 line per target. Each line
* has the following fields:
* @boundname@ timestamp_sec timestamp_nsec @file@ @file@ @file@ ...
*/
#include "config.h"
#ifdef OPT_HEADER_CACHE_EXT
#include "jam.h"
#include "hcache.h"
#include "hash.h"
#include "headers.h"
#include "lists.h"
#include "modules.h"
#include "object.h"
#include "parse.h"
#include "regexp.h"
#include "rules.h"
#include "search.h"
#include "timestamp.h"
#include "variable.h"
#include "output.h"
#include <errno.h>
#include <string.h>
typedef struct hcachedata HCACHEDATA ;
struct hcachedata
{
OBJECT * boundname;
timestamp time;
LIST * includes;
LIST * hdrscan; /* the HDRSCAN value for this target */
int age; /* if too old, we will remove it from cache */
HCACHEDATA * next;
};
static struct hash * hcachehash = 0;
static HCACHEDATA * hcachelist = 0;
static int queries = 0;
static int hits = 0;
#define CACHE_FILE_VERSION "version 5"
#define CACHE_RECORD_HEADER "header"
#define CACHE_RECORD_END "end"
/*
* Return the name of the header cache file. May return NULL.
*
* The user sets this by setting the HCACHEFILE variable in a Jamfile. We cache
* the result so the user can not change the cache file during header scanning.
*/
static const char * cache_name( void )
{
static OBJECT * name = 0;
if ( !name )
{
LIST * const hcachevar = var_get( root_module(), constant_HCACHEFILE );
if ( !list_empty( hcachevar ) )
{
TARGET * const t = bindtarget( list_front( hcachevar ) );
pushsettings( root_module(), t->settings );
/* Do not expect the cache file to be generated, so pass 0 as the
* third argument to search. Expect the location to be specified via
* LOCATE, so pass 0 as the fourth argument.
*/
object_free( t->boundname );
t->boundname = search( t->name, &t->time, 0, 0 );
popsettings( root_module(), t->settings );
name = object_copy( t->boundname );
}
}
return name ? object_str( name ) : 0;
}
/*
* Return the maximum age a cache entry can have before it is purged from the
* cache.
*/
static int cache_maxage( void )
{
int age = 100;
LIST * const var = var_get( root_module(), constant_HCACHEMAXAGE );
if ( !list_empty( var ) )
{
age = atoi( object_str( list_front( var ) ) );
if ( age < 0 )
age = 0;
}
return age;
}
/*
* Read a netstring. The caveat is that the string can not contain ASCII 0. The
* returned value is as returned by object_new().
*/
OBJECT * read_netstring( FILE * f )
{
unsigned long len;
static char * buf = NULL;
static unsigned long buf_len = 0;
if ( fscanf( f, " %9lu", &len ) != 1 )
return NULL;
if ( fgetc( f ) != (int)'\t' )
return NULL;
if ( len > 1024 * 64 )
return NULL; /* sanity check */
if ( len > buf_len )
{
unsigned long new_len = buf_len * 2;
if ( new_len < len )
new_len = len;
buf = (char *)BJAM_REALLOC( buf, new_len + 1 );
if ( buf )
buf_len = new_len;
}
if ( !buf )
return NULL;
if ( fread( buf, 1, len, f ) != len )
return NULL;
if ( fgetc( f ) != (int)'\n' )
return NULL;
buf[ len ] = 0;
return object_new( buf );
}
/*
* Write a netstring.
*/
void write_netstring( FILE * f, char const * s )
{
if ( !s )
s = "";
fprintf( f, "%lu\t%s\n", (long unsigned)strlen( s ), s );
}
void hcache_init()
{
FILE * f;
OBJECT * version = 0;
int header_count = 0;
const char * hcachename;
if ( hcachehash )
return;
hcachehash = hashinit( sizeof( HCACHEDATA ), "hcache" );
if ( !( hcachename = cache_name() ) )
return;
if ( !( f = fopen( hcachename, "rb" ) ) )
{
if ( errno != ENOENT )
err_printf( "[errno %d] failed to read hcache file '%s': %s",
errno, hcachename, strerror(errno) );
return;
}
version = read_netstring( f );
if ( !version || strcmp( object_str( version ), CACHE_FILE_VERSION ) )
goto bail;
while ( 1 )
{
HCACHEDATA cachedata;
HCACHEDATA * c;
OBJECT * record_type = 0;
OBJECT * time_secs_str = 0;
OBJECT * time_nsecs_str = 0;
OBJECT * age_str = 0;
OBJECT * includes_count_str = 0;
OBJECT * hdrscan_count_str = 0;
int i;
int count;
LIST * l;
int found;
cachedata.boundname = 0;
cachedata.includes = 0;
cachedata.hdrscan = 0;
record_type = read_netstring( f );
if ( !record_type )
{
err_printf( "invalid %s\n", hcachename );
goto cleanup;
}
if ( !strcmp( object_str( record_type ), CACHE_RECORD_END ) )
{
object_free( record_type );
break;
}
if ( strcmp( object_str( record_type ), CACHE_RECORD_HEADER ) )
{
err_printf( "invalid %s with record separator <%s>\n",
hcachename, record_type ? object_str( record_type ) : "<null>" );
goto cleanup;
}
cachedata.boundname = read_netstring( f );
time_secs_str = read_netstring( f );
time_nsecs_str = read_netstring( f );
age_str = read_netstring( f );
includes_count_str = read_netstring( f );
if ( !cachedata.boundname || !time_secs_str || !time_nsecs_str ||
!age_str || !includes_count_str )
{
err_printf( "invalid %s\n", hcachename );
goto cleanup;
}
timestamp_init( &cachedata.time, atoi( object_str( time_secs_str ) ),
atoi( object_str( time_nsecs_str ) ) );
cachedata.age = atoi( object_str( age_str ) ) + 1;
count = atoi( object_str( includes_count_str ) );
for ( l = L0, i = 0; i < count; ++i )
{
OBJECT * const s = read_netstring( f );
if ( !s )
{
err_printf( "invalid %s\n", hcachename );
list_free( l );
goto cleanup;
}
l = list_push_back( l, s );
}
cachedata.includes = l;
hdrscan_count_str = read_netstring( f );
if ( !hdrscan_count_str )
{
err_printf( "invalid %s\n", hcachename );
goto cleanup;
}
count = atoi( object_str( hdrscan_count_str ) );
for ( l = L0, i = 0; i < count; ++i )
{
OBJECT * const s = read_netstring( f );
if ( !s )
{
err_printf( "invalid %s\n", hcachename );
list_free( l );
goto cleanup;
}
l = list_push_back( l, s );
}
cachedata.hdrscan = l;
c = (HCACHEDATA *)hash_insert( hcachehash, cachedata.boundname, &found )
;
if ( !found )
{
c->boundname = cachedata.boundname;
c->includes = cachedata.includes;
c->hdrscan = cachedata.hdrscan;
c->age = cachedata.age;
timestamp_copy( &c->time, &cachedata.time );
}
else
{
err_printf( "can not insert header cache item, bailing on %s"
"\n", hcachename );
goto cleanup;
}
c->next = hcachelist;
hcachelist = c;
++header_count;
object_free( record_type );
object_free( time_secs_str );
object_free( time_nsecs_str );
object_free( age_str );
object_free( includes_count_str );
object_free( hdrscan_count_str );
continue;
cleanup:
if ( record_type ) object_free( record_type );
if ( time_secs_str ) object_free( time_secs_str );
if ( time_nsecs_str ) object_free( time_nsecs_str );
if ( age_str ) object_free( age_str );
if ( includes_count_str ) object_free( includes_count_str );
if ( hdrscan_count_str ) object_free( hdrscan_count_str );
if ( cachedata.boundname ) object_free( cachedata.boundname );
if ( cachedata.includes ) list_free( cachedata.includes );
if ( cachedata.hdrscan ) list_free( cachedata.hdrscan );
goto bail;
}
if ( DEBUG_HEADER )
out_printf( "hcache read from file %s\n", hcachename );
bail:
if ( version )
object_free( version );
fclose( f );
}
void hcache_done()
{
FILE * f;
HCACHEDATA * c;
int header_count = 0;
const char * hcachename;
int maxage;
if ( !hcachehash )
return;
if ( !( hcachename = cache_name() ) )
goto cleanup;
if ( !( f = fopen( hcachename, "wb" ) ) )
{
err_printf( "[errno %d] failed to write hcache file '%s': %s",
errno, hcachename, strerror(errno) );
goto cleanup;
}
maxage = cache_maxage();
/* Print out the version. */
write_netstring( f, CACHE_FILE_VERSION );
c = hcachelist;
for ( c = hcachelist; c; c = c->next )
{
LISTITER iter;
LISTITER end;
char time_secs_str[ 30 ];
char time_nsecs_str[ 30 ];
char age_str[ 30 ];
char includes_count_str[ 30 ];
char hdrscan_count_str[ 30 ];
if ( maxage == 0 )
c->age = 0;
else if ( c->age > maxage )
continue;
sprintf( includes_count_str, "%lu", (long unsigned)list_length(
c->includes ) );
sprintf( hdrscan_count_str, "%lu", (long unsigned)list_length(
c->hdrscan ) );
sprintf( time_secs_str, "%lu", (long unsigned)c->time.secs );
sprintf( time_nsecs_str, "%lu", (long unsigned)c->time.nsecs );
sprintf( age_str, "%lu", (long unsigned)c->age );
write_netstring( f, CACHE_RECORD_HEADER );
write_netstring( f, object_str( c->boundname ) );
write_netstring( f, time_secs_str );
write_netstring( f, time_nsecs_str );
write_netstring( f, age_str );
write_netstring( f, includes_count_str );
for ( iter = list_begin( c->includes ), end = list_end( c->includes );
iter != end; iter = list_next( iter ) )
write_netstring( f, object_str( list_item( iter ) ) );
write_netstring( f, hdrscan_count_str );
for ( iter = list_begin( c->hdrscan ), end = list_end( c->hdrscan );
iter != end; iter = list_next( iter ) )
write_netstring( f, object_str( list_item( iter ) ) );
fputs( "\n", f );
++header_count;
}
write_netstring( f, CACHE_RECORD_END );
if ( DEBUG_HEADER )
out_printf( "hcache written to %s. %d dependencies, %.0f%% hit rate\n",
hcachename, header_count, queries ? 100.0 * hits / queries : 0 );
fclose ( f );
cleanup:
for ( c = hcachelist; c; c = c->next )
{
list_free( c->includes );
list_free( c->hdrscan );
object_free( c->boundname );
}
hcachelist = 0;
if ( hcachehash )
hashdone( hcachehash );
hcachehash = 0;
}
LIST * hcache( TARGET * t, int rec, regexp * re[], LIST * hdrscan )
{
HCACHEDATA * c;
++queries;
if ( ( c = (HCACHEDATA *)hash_find( hcachehash, t->boundname ) ) )
{
if ( !timestamp_cmp( &c->time, &t->time ) )
{
LIST * const l1 = hdrscan;
LIST * const l2 = c->hdrscan;
LISTITER iter1 = list_begin( l1 );
LISTITER const end1 = list_end( l1 );
LISTITER iter2 = list_begin( l2 );
LISTITER const end2 = list_end( l2 );
while ( iter1 != end1 && iter2 != end2 )
{
if ( !object_equal( list_item( iter1 ), list_item( iter2 ) ) )
iter1 = end1;
else
{
iter1 = list_next( iter1 );
iter2 = list_next( iter2 );
}
}
if ( iter1 != end1 || iter2 != end2 )
{
if ( DEBUG_HEADER )
{
out_printf( "HDRSCAN out of date in cache for %s\n",
object_str( t->boundname ) );
out_printf(" real : ");
list_print( hdrscan );
out_printf( "\n cached: " );
list_print( c->hdrscan );
out_printf( "\n" );
}
list_free( c->includes );
list_free( c->hdrscan );
c->includes = L0;
c->hdrscan = L0;
}
else
{
if ( DEBUG_HEADER )
out_printf( "using header cache for %s\n", object_str(
t->boundname ) );
c->age = 0;
++hits;
return list_copy( c->includes );
}
}
else
{
if ( DEBUG_HEADER )
out_printf ("header cache out of date for %s\n", object_str(
t->boundname ) );
list_free( c->includes );
list_free( c->hdrscan );
c->includes = L0;
c->hdrscan = L0;
}
}
else
{
int found;
c = (HCACHEDATA *)hash_insert( hcachehash, t->boundname, &found );
if ( !found )
{
c->boundname = object_copy( t->boundname );
c->next = hcachelist;
hcachelist = c;
}
}
/* 'c' points at the cache entry. Its out of date. */
{
LIST * const l = headers1( L0, t->boundname, rec, re );
timestamp_copy( &c->time, &t->time );
c->age = 0;
c->includes = list_copy( l );
c->hdrscan = list_copy( hdrscan );
return l;
}
}
#endif /* OPT_HEADER_CACHE_EXT */

View File

@@ -0,0 +1,20 @@
/*
* This file is not part of Jam
*/
/*
* hcache.h - handle #includes in source files
*/
#ifndef HCACHE_H
#define HCACHE_H
#include "config.h"
#include "lists.h"
#include "regexp.h"
#include "rules.h"
void hcache_init( void );
void hcache_done( void );
LIST * hcache( TARGET * t, int rec, regexp * re[], LIST * hdrscan );
#endif

View File

@@ -0,0 +1,146 @@
/*
* Copyright 1993, 2000 Christopher Seiwald.
*
* This file is part of Jam - see jam.c for Copyright information.
*/
/* This file is ALSO:
* Copyright 2001-2004 David Abrahams.
* Distributed under the Boost Software License, Version 1.0.
* (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt)
*/
/*
* hdrmacro.c - handle header files that define macros used in #include
* statements.
*
* we look for lines like "#define MACRO <....>" or '#define MACRO " "' in
* the target file. When found, we then phony up a rule invocation like:
*
* $(HDRRULE) <target> : <resolved included files> ;
*
* External routines:
* headers1() - scan a target for "#include MACRO" lines and try to resolve
* them when needed
*
* Internal routines:
* headers1() - using regexp, scan a file and build include LIST
*/
#include "jam.h"
#include "hdrmacro.h"
#include "compile.h"
#include "hash.h"
#include "lists.h"
#include "object.h"
#include "parse.h"
#include "rules.h"
#include "jam_strings.h"
#include "subst.h"
#include "variable.h"
#include "output.h"
#include <errno.h>
#include <string.h>
/* this type is used to store a dictionary of file header macros */
typedef struct header_macro
{
OBJECT * symbol;
OBJECT * filename; /* we could maybe use a LIST here ?? */
} HEADER_MACRO;
static struct hash * header_macros_hash = 0;
/*
* headers() - scan a target for include files and call HDRRULE
*/
#define MAXINC 10
void macro_headers( TARGET * t )
{
static regexp * re = 0;
FILE * f;
char buf[ 1024 ];
if ( DEBUG_HEADER )
out_printf( "macro header scan for %s\n", object_str( t->name ) );
/* This regexp is used to detect lines of the form
* "#define MACRO <....>" or "#define MACRO "....."
* in the header macro files.
*/
if ( !re )
{
OBJECT * const re_str = object_new(
"^[ ]*#[ ]*define[ ]*([A-Za-z][A-Za-z0-9_]*)[ ]*"
"[<\"]([^\">]*)[\">].*$" );
re = regex_compile( re_str );
object_free( re_str );
}
if ( !( f = fopen( object_str( t->boundname ), "r" ) ) )
{
err_printf( "[errno %d] failed to scan include file '%s': %s",
errno, object_str( t->boundname ), strerror(errno) );
return;
}
while ( fgets( buf, sizeof( buf ), f ) )
{
HEADER_MACRO var;
HEADER_MACRO * v = &var;
if ( regexec( re, buf ) && re->startp[ 1 ] )
{
OBJECT * symbol;
int found;
/* we detected a line that looks like "#define MACRO filename */
( (char *)re->endp[ 1 ] )[ 0 ] = '\0';
( (char *)re->endp[ 2 ] )[ 0 ] = '\0';
if ( DEBUG_HEADER )
out_printf( "macro '%s' used to define filename '%s' in '%s'\n",
re->startp[ 1 ], re->startp[ 2 ], object_str( t->boundname )
);
/* add macro definition to hash table */
if ( !header_macros_hash )
header_macros_hash = hashinit( sizeof( HEADER_MACRO ),
"hdrmacros" );
symbol = object_new( re->startp[ 1 ] );
v = (HEADER_MACRO *)hash_insert( header_macros_hash, symbol, &found
);
if ( !found )
{
v->symbol = symbol;
v->filename = object_new( re->startp[ 2 ] ); /* never freed */
}
else
object_free( symbol );
/* XXXX: FOR NOW, WE IGNORE MULTIPLE MACRO DEFINITIONS !! */
/* WE MIGHT AS WELL USE A LIST TO STORE THEM.. */
}
}
fclose( f );
}
OBJECT * macro_header_get( OBJECT * macro_name )
{
HEADER_MACRO * v;
if ( header_macros_hash && ( v = (HEADER_MACRO *)hash_find(
header_macros_hash, macro_name ) ) )
{
if ( DEBUG_HEADER )
out_printf( "### macro '%s' evaluated to '%s'\n", object_str( macro_name
), object_str( v->filename ) );
return v->filename;
}
return 0;
}

View File

@@ -0,0 +1,22 @@
/*
* Copyright 1993, 1995 Christopher Seiwald.
*
* This file is part of Jam - see jam.c for Copyright information.
*/
/*
* hdrmacro.h - parses header files for #define MACRO <filename> or
* #define MACRO "filename" definitions
*/
#ifndef HDRMACRO_SW20111118_H
#define HDRMACRO_SW20111118_H
#include "config.h"
#include "object.h"
#include "rules.h"
void macro_headers( TARGET * );
OBJECT * macro_header_get( OBJECT * macro_name );
#endif

View File

@@ -0,0 +1,207 @@
/*
* Copyright 1993, 2000 Christopher Seiwald.
*
* This file is part of Jam - see jam.c for Copyright information.
*/
/* This file is ALSO:
* Copyright 2001-2004 David Abrahams.
* Distributed under the Boost Software License, Version 1.0.
* (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt)
*/
/*
* headers.c - handle #includes in source files
*
* Using regular expressions provided as the variable $(HDRSCAN), headers()
* searches a file for #include files and phonies up a rule invocation:
* $(HDRRULE) <target> : <include files> ;
*
* External routines:
* headers() - scan a target for include files and call HDRRULE
*
* Internal routines:
* headers1() - using regexp, scan a file and build include LIST
*/
#include "jam.h"
#include "headers.h"
#include "compile.h"
#include "hdrmacro.h"
#include "lists.h"
#include "modules.h"
#include "object.h"
#include "parse.h"
#include "rules.h"
#include "subst.h"
#include "variable.h"
#include "output.h"
#ifdef OPT_HEADER_CACHE_EXT
# include "hcache.h"
#endif
#include <errno.h>
#include <string.h>
#ifndef OPT_HEADER_CACHE_EXT
static LIST * headers1( LIST *, OBJECT * file, int rec, regexp * re[] );
#endif
/*
* headers() - scan a target for include files and call HDRRULE
*/
#define MAXINC 10
void headers( TARGET * t )
{
LIST * hdrscan;
LIST * hdrrule;
#ifndef OPT_HEADER_CACHE_EXT
LIST * headlist = L0;
#endif
regexp * re[ MAXINC ];
int rec = 0;
LISTITER iter;
LISTITER end;
hdrscan = var_get( root_module(), constant_HDRSCAN );
if ( list_empty( hdrscan ) )
return;
hdrrule = var_get( root_module(), constant_HDRRULE );
if ( list_empty( hdrrule ) )
return;
if ( DEBUG_HEADER )
out_printf( "header scan %s\n", object_str( t->name ) );
/* Compile all regular expressions in HDRSCAN */
iter = list_begin( hdrscan );
end = list_end( hdrscan );
for ( ; ( rec < MAXINC ) && iter != end; iter = list_next( iter ) )
{
re[ rec++ ] = regex_compile( list_item( iter ) );
}
/* Doctor up call to HDRRULE rule */
/* Call headers1() to get LIST of included files. */
{
FRAME frame[ 1 ];
frame_init( frame );
lol_add( frame->args, list_new( object_copy( t->name ) ) );
#ifdef OPT_HEADER_CACHE_EXT
lol_add( frame->args, hcache( t, rec, re, hdrscan ) );
#else
lol_add( frame->args, headers1( headlist, t->boundname, rec, re ) );
#endif
if ( lol_get( frame->args, 1 ) )
{
OBJECT * rulename = list_front( hdrrule );
/* The third argument to HDRRULE is the bound name of $(<). */
lol_add( frame->args, list_new( object_copy( t->boundname ) ) );
list_free( evaluate_rule( bindrule( rulename, frame->module ), rulename, frame ) );
}
/* Clean up. */
frame_free( frame );
}
}
/*
* headers1() - using regexp, scan a file and build include LIST.
*/
#ifndef OPT_HEADER_CACHE_EXT
static
#endif
LIST * headers1( LIST * l, OBJECT * file, int rec, regexp * re[] )
{
FILE * f;
char buf[ 1024 ];
int i;
static regexp * re_macros = 0;
#ifdef OPT_IMPROVED_PATIENCE_EXT
static int count = 0;
++count;
if ( ( ( count == 100 ) || !( count % 1000 ) ) && DEBUG_MAKE )
{
out_printf( "...patience...\n" );
out_flush();
}
#endif
/* The following regexp is used to detect cases where a file is included
* through a line like "#include MACRO".
*/
if ( re_macros == 0 )
{
OBJECT * const re_str = object_new(
"#[ \t]*include[ \t]*([A-Za-z][A-Za-z0-9_]*).*$" );
re_macros = regex_compile( re_str );
object_free( re_str );
}
if ( !( f = fopen( object_str( file ), "r" ) ) )
{
/* No source files will be generated when -n flag is passed */
if ( !globs.noexec || errno != ENOENT )
err_printf( "[errno %d] failed to scan file '%s': %s",
errno, object_str( file ), strerror(errno) );
return l;
}
while ( fgets( buf, sizeof( buf ), f ) )
{
for ( i = 0; i < rec; ++i )
if ( regexec( re[ i ], buf ) && re[ i ]->startp[ 1 ] )
{
( (char *)re[ i ]->endp[ 1 ] )[ 0 ] = '\0';
if ( DEBUG_HEADER )
out_printf( "header found: %s\n", re[ i ]->startp[ 1 ] );
l = list_push_back( l, object_new( re[ i ]->startp[ 1 ] ) );
}
/* Special treatment for #include MACRO. */
if ( regexec( re_macros, buf ) && re_macros->startp[ 1 ] )
{
OBJECT * header_filename;
OBJECT * macro_name;
( (char *)re_macros->endp[ 1 ] )[ 0 ] = '\0';
if ( DEBUG_HEADER )
out_printf( "macro header found: %s", re_macros->startp[ 1 ] );
macro_name = object_new( re_macros->startp[ 1 ] );
header_filename = macro_header_get( macro_name );
object_free( macro_name );
if ( header_filename )
{
if ( DEBUG_HEADER )
out_printf( " resolved to '%s'\n", object_str( header_filename )
);
l = list_push_back( l, object_copy( header_filename ) );
}
else
{
if ( DEBUG_HEADER )
out_printf( " ignored !!\n" );
}
}
}
fclose( f );
return l;
}
void regerror( char const * s )
{
out_printf( "re error %s\n", s );
}

View File

@@ -0,0 +1,26 @@
/*
* Copyright 1993, 1995 Christopher Seiwald.
*
* This file is part of Jam - see jam.c for Copyright information.
*/
/*
* headers.h - handle #includes in source files
*/
#ifndef HEADERS_SW20111118_H
#define HEADERS_SW20111118_H
#include "config.h"
#include "object.h"
#include "rules.h"
#include "regexp.h"
void headers( TARGET * t );
#ifdef OPT_HEADER_CACHE_EXT
struct regexp;
LIST * headers1( LIST *l, OBJECT * file, int rec, struct regexp *re[] );
#endif
#endif

Some files were not shown because too many files have changed in this diff Show More