import os.path, glob
import SCons.Options, SCons.Environment, SCons.Script.SConscript, SCons.Node.FS
import SCons.Defaults, SCons.Action
+from SCons.Script import *
## \defgroup use Predefined Framework Configurators
#
SCONS_TOOLS = [
"Doxygen",
"Dia2Png",
+ "CopyToDir",
+ "InstallIncludes",
+ "ProgramNoScan",
+ "CompileCheck",
]
opts = None
opts.Add('EXTRA_DEFINES', 'Additional preprocessor defines', '')
opts.Add('EXTRA_LIBS', 'Additional libraries to link against', '')
opts.Add(SCons.Options.BoolOption('final','Enable optimization',0))
+ opts.Add('PREFIX', 'Installation prefix', '/usr/local')
+ opts.Add('LIBINSTALLDIR', 'Library install dir', '$PREFIX/lib')
+ opts.Add('BININSTALLDIR', 'Executable install dir', '$PREFIX/bin')
+ opts.Add('INCLUDEINSTALLDIR', 'Include-file install dir', '$PREFIX/include')
+ opts.Add('OBJINSTALLDIR', 'Static object file install dir', '$LIBINSTALLDIR')
+ opts.Add('DOCINSTALLDIR', 'Documentation install dir', '$PREFIX/doc')
+ opts.Add('CPP_INCLUDE_EXTENSIONS', 'File extensions to include in source install',
+ [ '.h', '.hh', '.ih', '.mpp', '.cci', '.ct', '.cti', '.mpp' ])
+ opts.Add('CPP_EXCLUDE_EXTENSIONS', 'File extensions to exclude from source install',
+ [ '.test.hh' ])
# A finalizer is any callable object. All finalizers will be called
# in MakeEnvironment. We use them so every finalizer has knowledge of
# libraries are provided by the distribution, you probably don't need
# to specify any parameters. If your configuration is more complex,
# refer to the <a
-# href="http://www.boost.org/tools/build/v1/build_system.htm">Boost.Build</a>
+# href="http://www.boost.org/tools/build/v2/index.html">Boost.Build</a>
# documentation for a definition of the terms used above (toolset,
# variant, runtime ...).
#
opts.Add('BOOST_RUNTIME', 'The boost runtime to use', '')
opts.Add('BOOST_DEBUG_RUNTIME', 'The boost debug runtime to use', '')
opts.Add('BOOST_LIBDIR', 'The directory of the boost libraries', '')
+ opts.Add('BOOST_PREFIX', 'The prefix into which boost is installed', '')
+ opts.Add('BOOST_VERSION', 'The version of boost to use', '')
Finalizer(FinalizeBoost)
## \brief Finalize Boost environment
if runtime: runtime = "-" + runtime
env['BOOST_VARIANT'] = "-" + env['BOOST_TOOLSET'] + runtime
- env['BOOSTTESTLIB'] = 'libboost_unit_test_framework' + env['BOOST_VARIANT']
+ if env['BOOST_VARIANT'] and env['BOOST_VERSION']:
+ env['BOOST_VARIANT'] = env['BOOST_VARIANT'] + '-%s' % env['BOOST_VERSION'].replace('.','_')
+
+ env['BOOSTTESTLIB'] = 'boost_unit_test_framework' + env['BOOST_VARIANT']
+ env['BOOSTREGEXLIB'] = 'boost_regex' + env['BOOST_VARIANT']
+ env['BOOSTFSLIB'] = 'boost_filesystem' + env['BOOST_VARIANT']
+ env['BOOSTIOSTREAMSLIB'] = 'boost_iostreams' + env['BOOST_VARIANT']
+
+ if env['BOOST_PREFIX']:
+ env['BOOST_LIBDIR'] = os.path.join(env['BOOST_PREFIX'], 'lib')
+ env['BOOST_INCLUDES'] = os.path.join(env['BOOST_PREFIX'],
+ 'include/boost-%s'
+ % env['BOOST_VERSION'].replace('.','_'))
env.Append(LIBPATH = [ '$BOOST_LIBDIR' ],
CPPPATH = [ '$BOOST_INCLUDES' ])
+ if env['BOOST_LIBDIR']:
+ env.Append(ENV = { 'LD_LIBRARY_PATH': env['BOOST_LIBDIR'] })
+
## \brief Use STLPort as STL replacement if available
#
# Use <a href="http://www.stlport.org">STLPort</a> as a replacement
global opts, finalizers
InitOpts()
env = SCons.Environment.Environment(options=opts)
- if SCons.Script.SConscript.Arguments.get('final'):
- env['final'] = 1
+ env.Replace(**SCons.Script.SConscript.Arguments)
+ #for opt in opts.options:
+ # if SCons.Script.SConscript.Arguments.get(opt.key):
+ # env[opt.key] = SCons.Script.SConscript.Arguments.get(opt.key)
+ #if SCons.Script.SConscript.Arguments.get('final'):
+ # env['final'] = 1
+ env.Help("\nSupported build variables (either in SConfig or on the command line:\n")
env.Help(opts.GenerateHelpText(env))
# We want to pass the SSH_AUTH_SOCK system env-var so we can ssh
# These are the default compilation parameters. We should probably
# make these configurable
- env.Append(CXXFLAGS = [ '-Wall', '-Woverloaded-virtual', '-Wno-long-long' ],
- LOCALLIBDIR = [ '#' ],
+ env.Append(LOCALLIBDIR = [ '#' ],
LIBPATH = [ '$LOCALLIBDIR' ])
if env['final']:
- env.Append(CXXFLAGS = [ '-O3' ],
- CPPDEFINES = [ 'NDEBUG' ])
+ env.Append(CXXFLAGS = [ '-O3' ])
else:
- env.Append(CXXFLAGS = [ '-O0', '-g', '-fno-inline' ],
- LINKFLAGS = [ '-g' ])
+ # The boost-regex library is not compiled with _GLIBCXX_DEBUG so this fails:
+ # CPPDEFINES = [ '_GLIBCXX_DEBUG' ],
+ env.Append(CXXFLAGS = [ '-O0', '-g' ],
+ CPPDEFINES = { 'SENF_DEBUG': ''},
+ LINKFLAGS = [ '-g', '-rdynamic' ])
env.Append(CPPDEFINES = [ '$EXTRA_DEFINES' ],
- LIBS = [ '$EXTRA_LIBS' ])
+ LIBS = [ '$EXTRA_LIBS' ],
+ ALLLIBS = [])
return env
# in the current directory. The sources will be returned as a tuple of
# sources, test-sources. The target helpers all accept such a tuple as
# their source argument.
-def GlobSources(exclude=[]):
+def GlobSources(exclude=[], subdirs=[]):
testSources = glob.glob("*.test.cc")
sources = [ x for x in glob.glob("*.cc") if x not in testSources and x not in exclude ]
+ for subdir in subdirs:
+ testSources += glob.glob(os.path.join(subdir,"*.test.cc"))
+ sources += [ x for x in glob.glob(os.path.join(subdir,"*.cc"))
+ if x not in testSources and x not in exclude ]
return (sources, testSources)
## \brief Add generic standard targets for every module
#
# \ingroup target
def GlobalTargets(env):
- env.Depends(env.Alias('all'),'#')
+ env.Alias('all', [ 'default', 'all_tests', 'all_docs' ])
## \brief Return path of a built library within $LOCALLIBDIR
# \internal
-def LibPath(lib): return '$LOCALLIBDIR/lib%s.a' % lib
+def LibPath(lib): return '${LOCALLIBDIR}/${LIBPREFIX}%s${LIBADDSUFFIX}${LIBSUFFIX}' % lib
+
+def Test(env, sources, LIBS = [], OBJECTS = []):
+ test = [ env.BoostUnitTests(
+ target = 'test',
+ objects = [],
+ test_sources = sources,
+ LIBS = [ x + '$LIBADDSUFFIX' for x in LIBS ],
+ OBJECTS = OBJECTS,
+ DEPENDS = [ env.File(LibPath(x)) for x in LIBS ]) ]
+ compileTestSources = [ src for src in sources
+ if 'COMPILE_CHECK' in file(src).read() ]
+ if compileTestSources:
+ test.extend(env.CompileCheck(source = compileTestSources))
+ env.Alias('all_tests', test)
+ env.Command(env.File('test'), test, [])
+ #env.Alias(env.File('test'), test)
+
## \brief Build object files
#
# provide both \a sources and \a testSources.
#
# \ingroup target
-def Objects(env, sources, testSources = None, LIBS = []):
+def Objects(env, sources, testSources = None, LIBS = [], OBJECTS = [], no_includes = False):
if type(sources) == type(()):
testSources = sources[1]
sources = sources[0]
+ if type(sources) is not type([]):
+ sources = [ sources ]
objects = None
if sources:
- objects = env.Object(sources)
+ obsources = [ source
+ for source in sources
+ if type(source) is type('') and not source.endswith('.o') ]
+ objects = [ source
+ for source in sources
+ if type(source) is not type('') or source.endswith('.o') ]
+ if obsources:
+ objects += env.Object(obsources)
if testSources:
- test = env.BoostUnitTests(
+ test = [ env.BoostUnitTests(
target = 'test',
- source = sources,
- test_source = testSources,
- LIBS = LIBS,
- DEPENDS = [ env.File(LibPath(x)) for x in LIBS ])
+ objects = objects,
+ test_sources = testSources,
+ LIBS = [ x + '$LIBADDSUFFIX' for x in LIBS ],
+ OBJECTS = OBJECTS,
+ DEPENDS = [ env.File(LibPath(x)) for x in LIBS ]) ]
+ compileTestSources = [ src for src in testSources
+ if 'COMPILE_CHECK' in file(src).read() ]
+ if compileTestSources:
+ test.extend(env.CompileCheck(source = compileTestSources))
env.Alias('all_tests', test)
# Hmm ... here I'd like to use an Alias instead of a file
# however the alias does not seem to live in the subdirectory
# which breaks 'scons -u test'
- env.Alias(env.File('test'), test)
+ env.Command(env.File('test'), test, [])
+ #env.Alias(env.File('test'), test)
return objects
+def InstallIncludeFiles(env, files):
+ # Hrmpf ... why do I need this in 0.97??
+ if env.GetOption('clean'):
+ return
+ target = env.Dir(env['INCLUDEINSTALLDIR'])
+ base = env.Dir(env['INSTALL_BASE'])
+ for f in files:
+ src = env.File(f)
+ env.Alias('install_all', env.Install(target.Dir(src.dir.get_path(base)), src))
+
+def InstallSourceIncludes(env, sources):
+ target = env.Dir(env['INCLUDEINSTALLDIR']).Dir(
+ env.Dir('.').get_path(env.Dir(env['INSTALL_BASE'])))
+ install = env.InstallIncludes( target = target,
+ source = [ type(x) is str and env.File(x) or x
+ for x in sources ],
+ INSTALL_BASE = env.Dir('.') )
+ env.Alias( 'install_all', install )
+
+def InstallWithSources(env, targets, dir, sources, testSources = [], no_includes = False):
+ if type(sources) is type(()):
+ sources, testSources = sources
+ if type(sources) is not type([]):
+ sources = [ sources ]
+ if type(testSources) is not type([]):
+ testSources = [ testSources ]
+
+ installs = [ env.Install(dir, targets) ]
+ env.Alias( 'install_all', installs[:] )
+
+ if not no_includes:
+ sources = targets
+ if testSources:
+ sources.append( env.File('.test.bin') )
+ installs.append(
+ InstallSourceIncludes(env, sources))
+
+ return installs
+
## \brief Build documentation with doxygen
#
# The doxygen target helper will build software documentation using
# generated) by the given XSLT stylesheet. Since the HTML
# generated by doxygen is broken, we first filter the code through
# HTML-\c tidy and filter out some error messages.
-# \li If xml output is generatedwe create files \c bug.xmli and \c
+# \li If xml output is generated we create files \c bug.xmli and \c
# todo.xmli which contain all bugs and todo items specified in the
# sources. The format of these files is much more suited to
# postprocessing and is a more database like format as the doxygen
#
# \ingroup target
def Doxygen(env, doxyfile = "Doxyfile", extra_sources = []):
+ if not 'all' in BUILD_TARGETS and not 'doc' in BUILD_TARGETS and not 'all_docs' in BUILD_TARGETS:
+ return []
# ARGHHH !!! without the [:] we are changing the target list
# ||| WITHIN THE DOXYGEN BUILDER
docs = env.Doxygen(doxyfile)[:]
docs,
SCons.Action.Action(("for html in %s/*.html; do " +
" echo $$html;" +
- " sed -e 's/id=\"current\"/class=\"current\"/' $${html}" +
- " | tidy -ascii -q --show-warnings no --fix-uri no" +
- " | xsltproc --nonet --html --stringparam topdir %s -o $${html}.new %s - 2>&1" +
- " | grep '^-'" +
- " | grep -v 'ID .* already defined';" +
- " mv $${html}.new $${html}; " +
- "done")
+ " mv $${html} $${html}.orig;" +
+ " sed -e 's/id=\"current\"/class=\"current\"/' $${html}.orig" +
+ " | tidy -ascii -q --wrap 0 --show-warnings no --fix-uri no " +
+ " | sed -e 's/name=\"\([^\"]*\)\"\([^>]*\) id=\"\\1\"/name=\"\\1\"\\2/g'" +
+ " | xsltproc --novalid --nonet --html --stringparam topdir %s -o $${html} %s -;"
+ "done; true")
% (htmlnode.dir.abspath, reltopdir, xslfile.abspath)))
for doc in docs:
- env.Depends(doc,xslfile)
+ env.Depends(doc, xslfile)
if xmlnode:
xrefs = []
for type in env.get("DOXY_XREF_TYPES",[ "bug", "todo" ]):
xref = os.path.join(xmlnode.dir.abspath,type+".xml")
xref_pp = env.Command(xref+'i', [ xref, os.path.join(basedir,'xrefxtract.xslt'), xmlnode ],
- [ "test -s $SOURCE && xsltproc -o $TARGET" +
+ [ "test -s $SOURCE && xsltproc --nonet -o $TARGET" +
" --stringparam module $MODULE" +
" --stringparam type $TYPE" +
" ${SOURCES[1]} $SOURCE || touch $TARGET" ],
- MODULE = xmlnode.dir.dir.dir.name,
+ MODULE = xmlnode.dir.dir.dir.abspath[
+ len(env.Dir('#').abspath)+1:],
TYPE = type)
env.SideEffect(xref, xmlnode)
env.AddPreAction(docs, "rm -f %s" % (xref,))
xrefs.extend(xref_pp)
docs.extend(xrefs)
- env.Depends(docs,extra_sources)
+ if extra_sources and htmlnode:
+ env.Depends(docs,
+ [ env.CopyToDir( source=source, target=htmlnode.dir )
+ for source in extra_sources ])
+
+ if extra_sources and xmlnode:
+ env.Depends(docs,
+ [ env.CopyToDir( source=source, target=xmlnode.dir )
+ for source in extra_sources ])
+
+ if not htmlnode and not xmlnode:
+ env.Depends(docs, extra_sources)
+
for doc in docs :
env.Alias('all_docs', doc)
env.Clean('all_docs', doc)
env.Clean('all', doc)
+ l = len(env.Dir('#').abspath)
+ if htmlnode:
+ env.Alias('install_all',
+ env.Command('$DOCINSTALLDIR' + htmlnode.dir.abspath[l:], htmlnode.dir,
+ [ SCons.Defaults.Copy('$TARGET','$SOURCE') ]))
+ if tagnode:
+ env.Alias('install_all',
+ env.Install( '$DOCINSTALLDIR' + tagnode.dir.abspath[l:],
+ tagnode ))
+
return docs
## \brief Build combined doxygen cross-reference
docs = env.Alias('all_docs')[0].sources
xrefs = [ doc for doc in docs if os.path.splitext(doc.name)[1] == ".xmli" ]
xref = env.Command("doc/html/xref.xml", xrefs,
- [ "echo -e '<?xml version=\"1.0\"?>\\n<xref>' >$TARGET",
+ [ "echo '<?xml version=\"1.0\"?>' > $TARGET",
+ "echo '<xref>' >> $TARGET",
"cat $SOURCES >> $TARGET",
"echo '</xref>' >>$TARGET" ])
commands = []
if HTML_HEADER:
- commands.append(
- "sed -e 's/\\$$title/$TITLE/g' -e 's/\\$$projectname/Overview/g' ${SOURCES[2]} > $TARGET")
- commands.append("xsltproc --stringparam title '$TITLE' --stringparam types '$DOXY_XREF_TYPES' ${SOURCES[1]} $SOURCE >> $TARGET")
+ commands.append("sed" +
+ " -e 's/\\$$title/$TITLE/g'" +
+ " -e 's/\\$$projectname/Overview/g'" +
+ " ${SOURCES[2]} > $TARGET")
+ commands.append("xsltproc" +
+ " --stringparam title '$TITLE'" +
+ " --stringparam types '$DOXY_XREF_TYPES'" +
+ " ${SOURCES[1]} $SOURCE >> $TARGET")
if HTML_FOOTER:
commands.append(
"sed -e 's/\\$$title/$TITLE/g' -e 's/\\$$projectname/Overview/g' ${SOURCES[%d]} >> $TARGET"
% (HTML_HEADER and 3 or 2))
+ if env.get('DOXY_HTML_XSL'):
+ xslfile = env.File(env['DOXY_HTML_XSL'])
+ reltopdir = '../' * len(xref[0].dir.abspath[len(env.Dir('#').abspath)+1:].split('/'))
+ if reltopdir : reltopdir = reltopdir[:-1]
+ else : reltopdir = '.'
+ commands.append(("xsltproc -o ${TARGET}.tmp" +
+ " --nonet --html" +
+ " --stringparam topdir %s" +
+ " ${SOURCES[-1]} $TARGET 2>/dev/null")
+ % reltopdir)
+ commands.append("mv ${TARGET}.tmp ${TARGET}")
+ sources.append(xslfile)
+
xref = env.Command("doc/html/xref.html", sources, commands,
TITLE = TITLE)
# The library is added to the list of default targets.
#
#\ingroup target
-def Lib(env, library, sources, testSources = None, LIBS = []):
- objects = Objects(env,sources,testSources,LIBS=LIBS)
+def Lib(env, library, sources, testSources = None, LIBS = [], OBJECTS = [], no_includes = False):
+ objects = Objects(env,sources,testSources,LIBS=LIBS,OBJECTS=OBJECTS)
lib = None
if objects:
lib = env.Library(env.File(LibPath(library)),objects)
env.Default(lib)
env.Append(ALLLIBS = library)
+ env.Alias('default', lib)
+ InstallWithSources(env, lib, '$LIBINSTALLDIR', sources, testSources, no_includes)
return lib
+## \brief Build Object from multiple sources
+def Object(env, target, sources, testSources = None, LIBS = [], OBJECTS = [], no_includes = False):
+ objects = Objects(env,sources,testSources,LIBS=LIBS,OBJECTS=OBJECTS)
+ ob = None
+ if objects:
+ ob = env.Command(target+"${OBJADDSUFFIX}${OBJSUFFIX}", objects, "ld -r -o $TARGET $SOURCES")
+ env.Default(ob)
+ env.Alias('default', ob)
+ InstallWithSources(env, ob, '$OBJINSTALLDIR', sources, testSources, no_includes)
+ return ob
+
## \brief Build executable
#
# This target helper will build the given binary. The \a sources, \a
# construction environment parameters or the framework helpers.
#
# \ingroup target
-def Binary(env, binary, sources, testSources = None, LIBS = []):
- objects = Objects(env,sources,testSources,LIBS=LIBS)
+def Binary(env, binary, sources, testSources = None, LIBS = [], OBJECTS = [], no_includes = False):
+ objects = Objects(env,sources,testSources,LIBS=LIBS,OBJECTS=OBJECTS)
program = None
if objects:
- progEnv = env.Copy()
- progEnv.Prepend(LIBS = LIBS)
- program = progEnv.Program(target=binary,source=objects)
+ progEnv = env.Clone()
+ progEnv.Prepend(LIBS = [ x + '$LIBADDSUFFIX' for x in LIBS ])
+ program = progEnv.ProgramNoScan(target=binary,source=objects+OBJECTS)
env.Default(program)
env.Depends(program, [ env.File(LibPath(x)) for x in LIBS ])
+ env.Alias('default', program)
+ InstallWithSources(env, program, '$BININSTALLDIR', sources, testSources, no_includes)
return program
+
+def AllIncludesHH(env, headers):
+ headers.sort()
+ target = env.File("all_includes.hh")
+ file(target.abspath,"w").write("".join([ '#include "%s"\n' % f
+ for f in headers ]))
+ env.Clean('all', target)