X-Git-Url: http://g0dil.de/git?a=blobdiff_plain;f=senfscons%2FSENFSCons.py;h=176b8ea670ff190dcc104ed4fc2c6caab5b8221f;hb=1a29b75bf774444ec182dfff8480a0c53597bf85;hp=8fd75038d457b58d3ce2b0529d7e48c8aeb815a5;hpb=fb503247154c4a0f966cef898c813e18b8bec6ed;p=senf.git diff --git a/senfscons/SENFSCons.py b/senfscons/SENFSCons.py index 8fd7503..176b8ea 100644 --- a/senfscons/SENFSCons.py +++ b/senfscons/SENFSCons.py @@ -46,250 +46,6 @@ from SCons.Script import * # The SENFSCons framework includes a series of builders. Each builder # is defined in it's own package. -# Tools to load in MakeEnvironment -SCONS_TOOLS = [ - "Doxygen", - "Dia2Png", - "PkgDraw", - "CopyToDir", - "ProgramNoScan", - "CompileCheck", -] - -opts = None -finalizers = [] - -# This is the directory SENFSCons.py resides -basedir = os.path.abspath(os.path.split(__file__)[0]) - -## \brief Initialize configuration options -# \internal -def InitOpts(): - global opts - if opts is not None: return - opts = SCons.Options.Options('SConfig') - opts.Add('CXX', 'C++ compiler to use', 'g++') - opts.Add('EXTRA_DEFINES', 'Additional preprocessor defines', '') - opts.Add('EXTRA_LIBS', 'Additional libraries to link against', '') - opts.Add('EXTRA_CCFLAGS', 'Additional compilation parameters', '') - opts.Add(SCons.Options.BoolOption('final','Enable optimization',0)) - opts.Add(SCons.Options.BoolOption('debug','Enable debug symbols in binaries',0)) - opts.Add(SCons.Options.BoolOption('profile','Enable profiling',0)) - opts.Add('PREFIX', 'Installation prefix', '/usr/local') - opts.Add('LIBINSTALLDIR', 'Library install dir', '$PREFIX/lib') - opts.Add('BININSTALLDIR', 'Executable install dir', '$PREFIX/bin') - opts.Add('INCLUDEINSTALLDIR', 'Include-file install dir', '$PREFIX/include') - opts.Add('OBJINSTALLDIR', 'Static object file install dir', '$LIBINSTALLDIR') - opts.Add('DOCINSTALLDIR', 'Documentation install dir', '$PREFIX/doc') - opts.Add('CPP_INCLUDE_EXTENSIONS', 'File extensions to include in source install', - [ '.h', '.hh', '.ih', '.mpp', '.cci', '.ct', '.cti', '.mpp' ]) - opts.Add('CPP_EXCLUDE_EXTENSIONS', 'File extensions to exclude from source install', - [ '.test.hh' ]) - -# A finalizer is any callable object. All finalizers will be called -# in MakeEnvironment. We use them so every finalizer has knowledge of -# all frameworks in use (e.g.: the boost runtime depends on the use of -# stlport). - -## \brief Register finalizer -# \internal -def Finalizer(f): - global finalizers - finalizers.append(f) - -## \brief Initialize the use of the Boost library -# -# Configure the use of the Boost -# libraries. Most of these libraries are header-only, some however -# depend on a built library. The library selection is somewhat -# involved and depends on the threading model and the type of build -# (debug or final). -# -# \par Configuration Parameters: -# -# -# -# -# -# -# -#
\c BOOST_INCLUDESInclude directory.
\c BOOST_LIBDIRLibrary directory
\c BOOST_VARIANTComplete variant specification
\c BOOST_TOOLSETToolset to use
\c BOOST_RUNTIMERuntime to use
\c BOOST_DEBUG_RUNTIMEExplicit debug runtime
-# -# You can either specify \c BOOST_VARIANT explicitly or specify \c -# BOOST_TOOLSET and \c BOOST_RUNTIME. If you give \c BOOST_TOOLSET, \c -# BOOST_RUNTIME defaults to empty and \c BOOST_DEBUG_RUNTIME defaults -# to \c gd, If \c BOOST_TOOLSET is specified and you have included -# STLPort support (UseSTLPort()), then \c p is appended to both -# runtimes. -# -# The Boost configuration can get realtively complex. If the boost -# libraries are provided by the distribution, you probably don't need -# to specify any parameters. If your configuration is more complex, -# refer to the Boost.Build -# documentation for a definition of the terms used above (toolset, -# variant, runtime ...). -# -# \ingroup use -def UseBoost(): - global opts - InitOpts() - opts.Add('BOOST_INCLUDES', 'Boost include directory', '') - opts.Add('BOOST_VARIANT', 'The boost variant to use', '') - opts.Add('BOOST_TOOLSET', 'The boost toolset to use', '') - opts.Add('BOOST_RUNTIME', 'The boost runtime to use', '') - opts.Add('BOOST_DEBUG_RUNTIME', 'The boost debug runtime to use', '') - opts.Add('BOOST_LIBDIR', 'The directory of the boost libraries', '') - opts.Add('BOOST_PREFIX', 'The prefix into which boost is installed', '') - opts.Add('BOOST_VERSION', 'The version of boost to use', '') - Finalizer(FinalizeBoost) - -## \brief Finalize Boost environment -# \internal -def FinalizeBoost(env): - env.Tool('BoostUnitTests', [basedir]) - - if env['BOOST_TOOLSET']: - runtime = "" - if env['final'] : runtime += env.get('BOOST_RUNTIME','') - else : runtime += env.get('BOOST_DEBUG_RUNTIME','gd') - if env['STLPORT_LIB'] : runtime += "p" - if runtime: runtime = "-" + runtime - env['BOOST_VARIANT'] = "-" + env['BOOST_TOOLSET'] + runtime - - if env['BOOST_VARIANT'] and env['BOOST_VERSION']: - env['BOOST_VARIANT'] = env['BOOST_VARIANT'] + '-%s' % env['BOOST_VERSION'].replace('.','_') - - env['BOOSTTESTLIB'] = 'boost_unit_test_framework' + env['BOOST_VARIANT'] - env['BOOSTREGEXLIB'] = 'boost_regex' + env['BOOST_VARIANT'] - env['BOOSTFSLIB'] = 'boost_filesystem' + env['BOOST_VARIANT'] - env['BOOSTIOSTREAMSLIB'] = 'boost_iostreams' + env['BOOST_VARIANT'] - env['BOOSTSIGNALSLIB'] = 'boost_signals' + env['BOOST_VARIANT'] - - if env['BOOST_PREFIX']: - env['BOOST_LIBDIR'] = os.path.join(env['BOOST_PREFIX'], 'lib') - env['BOOST_INCLUDES'] = os.path.join(env['BOOST_PREFIX'], - 'include/boost-%s' - % env['BOOST_VERSION'].replace('.','_')) - - env.Append(LIBPATH = [ '$BOOST_LIBDIR' ], - CPPPATH = [ '$BOOST_INCLUDES' ]) - - if env['BOOST_LIBDIR']: - env.Append(ENV = { 'LD_LIBRARY_PATH': env['BOOST_LIBDIR'] }) - -## \brief Use STLPort as STL replacement if available -# -# Use STLPort as a replacement -# for the system STL. STLPort has the added feature of providing fully -# checked containers and iterators. This can greatly simplify -# debugging. However, STLPort and Boost interact in a non-trivial way -# so the configuration is relatively complex. This command does not -# enforce the use of STLPort, it is only used if available. -# -# \par Configuration Parameters: -# -# -# -# -# -#
\c STLPORT_INCLUDESInclude directory.
\c STLPORT_LIBDIRLibrary directory
\c STLPORT_LIBName of STLPort library
\c STLPORT_DEBUGLIBName of STLPort debug library
-# -# If \c STLPORT_LIB is specified, \c STLPORT_DEBUGLIB defaults to \c -# STLPORT_LIB with \c _stldebug appended. The STLPort library will -# only be used, if \c STLPORT_LIB is set in \c SConfig. -# -# \ingroup use -def UseSTLPort(): - global opts - InitOpts() - opts.Add('STLPORT_INCLUDES', 'STLport include directory', '') - opts.Add('STLPORT_LIB', 'Name of the stlport library or empty to not use stlport', '') - opts.Add('STLPORT_DEBUGLIB', 'Name of the stlport debug library','') - opts.Add('STLPORT_LIBDIR', 'The directory of the stlport libraries','') - Finalizer(FinalizeSTLPort) - -# \} - -## \brief Finalize STLPort environment -# \internal -def FinalizeSTLPort(env): - if env['STLPORT_LIB']: - if not env['STLPORT_DEBUGLIB']: - env['STLPORT_DEBUGLIB'] = env['STLPORT_LIB'] + '_stldebug' - env.Append(LIBPATH = [ '$STLPORT_LIBDIR' ], - CPPPATH = [ '$STLPORT_INCLUDES' ]) - if env['final']: - env.Append(LIBS = [ '$STLPORT_LIB' ]) - else: - env.Append(LIBS = [ '$STLPORT_DEBUGLIB' ], - CPPDEFINES = [ '_STLP_DEBUG' ]) - -## \brief Build a configured construction environment -# -# This function is called after all frameworks are specified to build -# a tailored construction environment. You can then use this -# construction environment just like an ordinary SCons construction -# environment (which it is ...) -# -# This call will set some default compilation parameters depending on -# the \c final command line option: specifying final=1 will -# built a release version of the code. -def MakeEnvironment(): - global opts, finalizers - InitOpts() - env = SCons.Environment.Environment(options=opts) - env.Replace(**SCons.Script.SConscript.Arguments) - #for opt in opts.options: - # if SCons.Script.SConscript.Arguments.get(opt.key): - # env[opt.key] = SCons.Script.SConscript.Arguments.get(opt.key) - #if SCons.Script.SConscript.Arguments.get('final'): - # env['final'] = 1 - env.Help("\nSupported build variables (either in SConfig or on the command line:\n") - env.Help(opts.GenerateHelpText(env)) - - # We want to pass the SSH_AUTH_SOCK system env-var so we can ssh - # into other hosts from within SCons rules. I have used rules like - # this e.g. to automatically install stuff on a remote system ... - if os.environ.has_key('SSH_AUTH_SOCK'): - env.Append( ENV = { 'SSH_AUTH_SOCK': os.environ['SSH_AUTH_SOCK'] } ) - - for finalizer in finalizers: - finalizer(env) - - for tool in SCONS_TOOLS: - env.Tool(tool, [basedir]) - - # These are the default compilation parameters. We should probably - # make these configurable - env.Append(LOCALLIBDIR = [ '#' ], - LIBPATH = [ '$LOCALLIBDIR' ]) - - if env['final']: - env.Append(CXXFLAGS = [ '-O3' ]) - if env['profile']: - env.Append(CXXFLAGS = [ '-g', '-pg' ], - LINKFLAGS = [ '-g', '-pg' ]) - else: - # The boost-regex library is not compiled with _GLIBCXX_DEBUG so this fails: - # CPPDEFINES = [ '_GLIBCXX_DEBUG' ], - env.Append(CXXFLAGS = [ '-O0', '-g' ], - CPPDEFINES = { 'SENF_DEBUG': ''}) - if env['profile']: - env.Append(CXXFLAGS = [ '-pg' ], - LINKFLAGS = [ '-pg' ]) - if env['debug'] or env['profile']: - env.Append(LINKFLAGS = [ '-g', '-rdynamic' ]) - else: - env.Append(LINKFLAGS = [ '-Wl,-S', '-rdynamic' ]) - - env.Append(CPPDEFINES = [ '$EXTRA_DEFINES' ], - LIBS = [ '$EXTRA_LIBS' ], - CXXFLAGS = [ '$EXTRA_CCFLAGS' ], - ALLOBJECTS = []) - - return env - ## \brief Find normal and test C++ sources # # GlobSources() will return a list of all C++ source files (named @@ -367,7 +123,7 @@ def Test(env, sources, LIBS = [], OBJECTS = []): if compileTestSources: test.extend(env.CompileCheck(source = compileTestSources)) env.Alias('all_tests', test) - env.Command(env.File('test'), test, []) + env.Command(env.File('test'), test, [ 'true' ]) #env.Alias(env.File('test'), test) @@ -422,7 +178,7 @@ def Objects(env, sources, testSources = None, OBJECTS = []): # Hmm ... here I'd like to use an Alias instead of a file # however the alias does not seem to live in the subdirectory # which breaks 'scons -u test' - env.Command(env.File('test'), test, []) + env.Command(env.File('test'), test, [ 'true' ]) #env.Alias(env.File('test'), test) return objects @@ -439,195 +195,68 @@ def InstallIncludeFiles(env, files): ## \brief Build documentation with doxygen # -# The doxygen target helper will build software documentation using -# the given \a doxyfile (which defaults to \c Doxyfile). The Doxygen -# builder used supports automatic dependency generation (dependencies -# are automatically generated from the parameters specified in the \a -# doxyfile), automatic target emitters (the exact targets created are -# found parsing the \a doxyfile) and lots of other features. See the -# Doxygen builder documentation -# -# If \a extra_sources are given, the generated documentation will -# depend on them. This can be used to build images or other -# supplementary files. -# -# The doxygen target helper extends the builder with additional -# functionality: -# -# \li Fix tagfiles by removing namespace entries. These entries only -# work for namespaces completely defined in a single module. As -# soon as another module (which references the tagfile) has it's -# own members in that namespace, the crosslinking will break. -# \li If \c DOXY_HTML_XSL is defined as a construction environment -# variable, preprocess all generated html files (if html files are -# generated) by the given XSLT stylesheet. Since the HTML -# generated by doxygen is broken, we first filter the code through -# HTML-\c tidy and filter out some error messages. -# \li If xml output is generated we create files \c bug.xmli and \c -# todo.xmli which contain all bugs and todo items specified in the -# sources. The format of these files is much more suited to -# postprocessing and is a more database like format as the doxygen -# generated files (which are more presentation oriented). if \c -# DOXY_XREF_TYPES is given, it will specify the cross reference -# types to support (defaults to \c bug and \c todo). See \xrefitem -# in the doxygen documentation. -# # \ingroup target def Doxygen(env, doxyfile = "Doxyfile", extra_sources = []): - if not 'all' in BUILD_TARGETS and not 'doc' in BUILD_TARGETS and not 'all_docs' in BUILD_TARGETS: - return [] - # ARGHHH !!! without the [:] we are changing the target list - # ||| WITHIN THE DOXYGEN BUILDER - docs = env.Doxygen(doxyfile)[:] - xmlnode = None - htmlnode = None - tagnode = None - for doc in docs: - if isinstance(doc,SCons.Node.FS.Dir): continue - if doc.name == 'xml.stamp' : xmlnode = doc - if doc.name == 'html.stamp' : htmlnode = doc - if doc.name == 'search.idx' : continue - if os.path.splitext(doc.name)[1] == '.stamp' : continue # ignore other file stamps - # otherwise it must be the tag file - tagnode = doc - - if tagnode: - # Postprocess the tag file to remove the (broken) namespace - # references - env.AddPostAction( - docs, - SCons.Action.Action("xsltproc --nonet -o %(target)s.temp %(template)s %(target)s && mv %(target)s.temp %(target)s" - % { 'target': tagnode.abspath, - 'template': os.path.join(basedir,"tagmunge.xsl") })) - - if htmlnode and env.get('DOXY_HTML_XSL'): - xslfile = env.File(env['DOXY_HTML_XSL']) - reltopdir = '../' * len(htmlnode.dir.abspath[len(env.Dir('#').abspath)+1:].split('/')) - if reltopdir : reltopdir = reltopdir[:-1] - else : reltopdir = '.' - env.AddPostAction( - docs, - SCons.Action.Action(("for html in %s/*.html; do " + - " echo $$html;" + - " mv $${html} $${html}.orig;" + - " sed -e 's/id=\"current\"/class=\"current\"/' $${html}.orig" + - " | tidy -ascii -q --wrap 0 --show-warnings no --fix-uri no " + - " | sed -e 's/name=\"\([^\"]*\)\"\([^>]*\) id=\"\\1\"/name=\"\\1\"\\2/g'" + - " | xsltproc --novalid --nonet --html --stringparam topdir %s -o $${html} %s -;" - "done; true") - % (htmlnode.dir.abspath, reltopdir, xslfile.abspath))) - for doc in docs: - env.Depends(doc, xslfile) - - if xmlnode: - xrefs = [] - for type in env.get("DOXY_XREF_TYPES",[ "bug", "todo" ]): - xref = os.path.join(xmlnode.dir.abspath,type+".xml") - xref_pp = env.Command(xref+'i', [ xref, os.path.join(basedir,'xrefxtract.xslt'), xmlnode ], - [ "test -s $SOURCE && xsltproc --nonet -o $TARGET" + - " --stringparam module $MODULE" + - " --stringparam type $TYPE" + - " ${SOURCES[1]} $SOURCE || touch $TARGET" ], - MODULE = xmlnode.dir.dir.dir.abspath[ - len(env.Dir('#').abspath)+1:], - TYPE = type) - env.SideEffect(xref, xmlnode) - env.AddPreAction(docs, "rm -f %s" % (xref,)) - env.AddPostAction(docs, "test -r %s || touch %s" % (xref,xref)) - xrefs.extend(xref_pp) - docs.extend(xrefs) - - if extra_sources and htmlnode: - env.Depends(docs, - [ env.CopyToDir( source=source, target=htmlnode.dir ) - for source in extra_sources ]) - - if extra_sources and xmlnode: - env.Depends(docs, - [ env.CopyToDir( source=source, target=xmlnode.dir ) - for source in extra_sources ]) - - if not htmlnode and not xmlnode: - env.Depends(docs, extra_sources) - - for doc in docs : - env.Alias('all_docs', doc) - env.Clean('all_docs', doc) - env.Clean('all', doc) - + # There is one small problem we need to solve with this builder: The Doxygen builder reads + # the Doxyfile and thus depends on the environment variables set by doclib/doxygen.sh. We + # thus have to provide all necessary definitions here manually via DOXYENV ! + + if type(doxyfile) is type(""): + doxyfile = env.File(doxyfile) + + # Module name is derived from the doxyfile path + # Utils/Console/Doxyfile -> Utils_Console + module = doxyfile.dir.abspath[len(env.Dir('#').abspath)+1:].replace('/','_') + if not module : module = "Main" + + # Rule to generate tagfile + # (need to exclude the 'clean' case, otherwise we'll have duplicate nodes) + if not env.GetOption('clean'): + tagfile = env.Doxygen(doxyfile, + DOXYOPTS = [ '--tagfile-name', '"${MODULE}.tag"', + '--tagfile' ], + DOXYENV = { 'TOPDIR' : env.Dir('#').abspath, + 'output_dir' : 'doc', + 'html_dir' : 'html', + 'html' : 'NO', + 'generate_tagfile': 'doc/${MODULE}.tag' }, + MODULE = module ) + env.Append(ALL_TAGFILES = tagfile[0].abspath) + env.Depends(tagfile, env.File('#/doclib/doxygen.sh')) + + # Rule to generate HTML documentation + doc = env.Doxygen(doxyfile, + DOXYOPTS = [ '--tagfiles', '"$ALL_TAGFILES"', + '--tagfile-name', '"${MODULE}.tag"', + '--html' ], + MODULE = module, + DOXYENV = { 'TOPDIR' : env.Dir('#').abspath, + 'tagfiles' : '${ALL_TAGFILES}', + 'output_dir' : 'doc', + 'html_dir' : 'html', + 'html' : 'YES' } ) + env.Depends(doc, env.File('#/doclib/doxygen.sh')) + + # Copy the extra_sources (the images) into the documentation directory + # (need to exclude the 'clean' case otherwise there are multiple ways to clean the copies) + if not env.GetOption('clean'): + if extra_sources: + env.Depends(doc, + [ env.CopyToDir( source=source, target=doc[0].dir ) + for source in extra_sources ]) + + # Install documentation into DOCINSTALLDIR l = len(env.Dir('#').abspath) - if htmlnode: - env.Alias('install_all', - env.Command('$DOCINSTALLDIR' + htmlnode.dir.abspath[l:], htmlnode.dir, - [ SCons.Defaults.Copy('$TARGET','$SOURCE') ])) - if tagnode: - env.Alias('install_all', - env.Install( '$DOCINSTALLDIR' + tagnode.dir.abspath[l:], - tagnode )) + env.Alias('install_all', + env.Command('$DOCINSTALLDIR' + doc[0].dir.abspath[l:], doc[0].dir, + [ SCons.Defaults.Copy('$TARGET','$SOURCE') ])) - return docs - -## \brief Build combined doxygen cross-reference -# -# This command will build a complete cross-reference of \c xrefitems -# accross all modules. -# -# Right now, this command is very project specific. It needs to be -# generalized. -# -# \ingroup target -def DoxyXRef(env, docs=None, - HTML_HEADER = None, HTML_FOOTER = None, - TITLE = "Cross-reference of action points"): - if docs is None: - docs = env.Alias('all_docs')[0].sources - xrefs = [ doc for doc in docs if os.path.splitext(doc.name)[1] == ".xmli" ] - xref = env.Command("doc/html/xref.xml", xrefs, - [ "echo '' > $TARGET", - "echo '' >> $TARGET", - "cat $SOURCES >> $TARGET", - "echo '' >>$TARGET" ]) - - # Lastly we create the html file - sources = [ xref, "%s/xrefhtml.xslt" % basedir ] - if HTML_HEADER : sources.append(HTML_HEADER) - if HTML_FOOTER : sources.append(HTML_FOOTER) - - commands = [] - if HTML_HEADER: - commands.append("sed" + - " -e 's/\\$$title/$TITLE/g'" + - " -e 's/\\$$projectname/Overview/g'" + - " ${SOURCES[2]} > $TARGET") - commands.append("xsltproc" + - " --stringparam title '$TITLE'" + - " --stringparam types '$DOXY_XREF_TYPES'" + - " ${SOURCES[1]} $SOURCE >> $TARGET") - if HTML_FOOTER: - commands.append( - "sed -e 's/\\$$title/$TITLE/g' -e 's/\\$$projectname/Overview/g' ${SOURCES[%d]} >> $TARGET" - % (HTML_HEADER and 3 or 2)) - - if env.get('DOXY_HTML_XSL'): - xslfile = env.File(env['DOXY_HTML_XSL']) - reltopdir = '../' * len(xref[0].dir.abspath[len(env.Dir('#').abspath)+1:].split('/')) - if reltopdir : reltopdir = reltopdir[:-1] - else : reltopdir = '.' - commands.append(("xsltproc -o ${TARGET}.tmp" + - " --nonet --html" + - " --stringparam topdir %s" + - " ${SOURCES[-1]} $TARGET 2>/dev/null") - % reltopdir) - commands.append("mv ${TARGET}.tmp ${TARGET}") - sources.append(xslfile) - - xref = env.Command("doc/html/xref.html", sources, commands, - TITLE = TITLE) - - env.Alias('all_docs',xref) - return xref + # Useful aliases + env.Alias('all_docs', doc) + env.Clean('all_docs', doc) + env.Clean('all', doc) + return doc ## \brief Build library # @@ -685,3 +314,6 @@ def AllIncludesHH(env, headers): file(target.abspath,"w").write("".join([ '#include "%s"\n' % f for f in headers ])) env.Clean('all', target) + +def PhonyTarget(env, target, action, sources=[]): + env.AlwaysBuild(env.Alias(target, sources, env.Action(action)))