X-Git-Url: http://g0dil.de/git?a=blobdiff_plain;f=doclib%2FSConscript;h=9fbf7d9f0c18a0da6ae43741c8eb5d740271e953;hb=164fe477094d42463722584e527a02379ab5d985;hp=0d93b9338f81176ba9c4c09cbacf179f26a83fa0;hpb=47be4c5261340d3e2ea208bb0b2d96fa256d829a;p=senf.git diff --git a/doclib/SConscript b/doclib/SConscript index 0d93b93..9fbf7d9 100644 --- a/doclib/SConscript +++ b/doclib/SConscript @@ -1,19 +1,336 @@ # -*- python -*- +# +# The documentation generation process is tightly integrated with the +# scons build framework: +# +# * SCons analyzes the Doxyfile's to find all the documentation +# dependencies. This happens in the doxygen builder in +# senfscons/Doxygen.py. +# +# * the doclib/doxy-header.html and/or doclib/doxy-footer.html files +# are regenerated +# +# * If any documentation is out-of-date with respect to it's source +# files, the documentation is regenerated. +# +# * To fix some link errors, the additional 'linklint' and 'fixlinks' +# targets are used +# +# +# 1. Scanning the Doxyfile's +# +# The doxygen builder scans all documentation source files which have +# the text 'doxyfile' in any case in their name. It understands +# @INCLUDE directives and will find all the dependencies of the +# documentation: +# +# * All the source files as selected by INPUT, INPUT_PATTERN, +# RECURSIVE and so on. +# +# * Any referenced tag-files +# +# * Documentation header and/or footer +# +# * The INPUT_FILTER program +# +# * Any included doxygen configuration files +# +# +# 2. Regenerating header and/or footer +# +# If needed, the doxy-header.html and/or doxy-footer.html file will be +# regenerated. The header and/or footer are generated from templates +# using a simple python based templating system called yaptu which is +# included in doclib/. +# +# +# 3. Calling doxygen +# +# The doxygen call itself is quite complex since there is some pre- +# and post-processing going on. We can separate this step into two +# steps +# +# * Building prerequisites (e.g. images) +# +# * The processing done by the Doxygen builder and doclib/doxygen.sh +# +# +# 3.1. Building prerequisites +# +# The prerequisites are images referenced by the documentation. These +# images are mostly generated using the Dia2Png builder. +# +# +# 3.2. The main doxygen build (Doxygen builder) +# +# The Doxygen builder will call the doxygen command to build the +# documentation. +# +# The doxygen command is configured as 'doclib/doxygen.sh' which +# does some additional processing in addition to calling doxygen +# proper +# +# * it sets environment variables depending on command line arguments. +# These variables are then used in the Doxyfile's +# +# * after doxygen is finished, 'installdox' is called to resolve +# tag file references. +# +# * the HTML documentation is post-processed using some sed, tidy, and +# an XSLT template +# +# * a generated tag file is post-processed using an XSLT template +# +# (see doclib/doxygen.sh for more information). The Doxygen +# configuration is set up such, that +# +# * doxygen calls 'doclib/filter.pl' on each source file. This filter +# will strip excess whitespace from the beginning of lines in +# '\code' and '
' blocks. Additionally it will expand all tabs,
+#   tab width is 8 spaces (there should be no tabs in the source but
+#   ...)
+# 
+# * doxygen calls 'doclib/dot' to generate the 'dot' images.
+#
+# * 'doclib/dot' calls 'doclib/dot-munge.pl' on the .dot
+#    files. dot-munge.pl changes the font and font-size and adds
+#    line-breaks to long labels
+#
+# * 'doclib/dot' calls the real dot binary. If the resulting image is
+#   more than 800 pixels wide, dot is called again, this time using
+#   the oposite rank direction (top-bottom vs. left-right). The image
+#   with the smaller width is selected and returned.
+#
+#
+# 4. Fixing broken links
+#
+# After the documentation has been generated, additional calls first
+# to the 'linklint' and then to the 'fixlinks' target will try to fix
+# broken links generated by doxygen. First, 'linklint' will call the
+# linklint tool to check for broken links in the documentation.
+#
+# 'fixlinks' is then called which calls 'doclib/fixlinks.py' which
+# scans *all* html files, builds an index of all (unique) anchors and
+# then fixes the url part of all links with correct anchor but bad
+# file name.
+#
+
 
 Import('env')
-import SENFSCons
+import SENFSCons, datetime, os
+
+###########################################################################
+
+import yaptu
+
+def modules():
+    # Naja ... etwas rumgehackt aber was solls ...
+    global EXTRA_MODULES
+    mods = {}
+    pathbase = len(env.Dir('#').abspath)+1
+    for module in env.Alias('all_docs')[0].sources:
+        if module.name != 'html.stamp' : continue 
+        mods[module.dir.dir.dir.abspath] = [ module.dir.dir.dir.name,
+                                             module.dir.abspath[pathbase:],
+                                             0 ]
+        
+    rv = []
+    keys = mods.keys()
+    keys.sort()
+    for mod in keys:
+        i = 0
+        while i < len(rv):
+            if len(rv[i]) > pathbase and mod.startswith(rv[i] + '/'):
+                level = mods[rv[i]][2] + 1
+                i += 1
+                while i < len(rv) and mods[rv[i]][2] >= level:
+                    i += 1
+                rv[i:i] = [ mod ]
+                mods[mod][2] = level
+                break
+            i += 1
+        if i == len(rv):
+            rv.append(mod)
+
+    for mod in keys:
+        if mods[mod][2] == 0:
+            mods[mod][0] = 'lib' + mods[mod][0]
+
+    n = 0
+    for name,path in EXTRA_MODULES:
+        path = env.Dir(path).dir.dir.abspath
+        i = 0
+        while i < len(rv):
+            if rv[i] == path:
+                mods[rv[i]][0] = name
+                m = 1
+                while i+m < len(rv) and mods[rv[i+m]][2] > mods[rv[i]][2]:
+                    m += 1
+                rv[n:n] = rv[i:i+m]
+                rv[i+m:i+2*m] = []
+                i += m
+                n += m
+            else:
+                i += 1
+
+    return ( tuple(mods[mod]) for mod in rv )
+
+def indices():
+    ix = len(env.Dir('#').abspath)+1
+    return [ doc.dir.abspath[ix:]
+             for doc in env.Alias('all_docs')[0].sources
+             if doc.name == "search.idx" ]
+
+def writeTemplate(target = None, source = None, env = None):
+    file(target[0].abspath,"w").write(processTemplate(env))
+
+def processTemplate(env):
+    return yaptu.process(str(env['TEMPLATE']), globals(), env.Dictionary())
+
+writeTemplate = env.Action(writeTemplate, varlist = [ 'TEMPLATE' ])
 
 ###########################################################################
 
-# This is not nice .. really ...
-env.Alias(
-    'all_docs',
-    env.Command('search.php',
-                [ 'doxy-header-overview.html', 'doxy-footer.html', 'html-munge.xsl' ], 
-                [ "sed -e 's/\\$$title/Search results/g' -e 's/\\$$projectname/Overview/g' ${SOURCES[0]} > ${TARGET}.tmp",
-                  'echo "PHPSEARCH" >> ${TARGET}.tmp',
-                  'cat ${SOURCES[1]} >> ${TARGET}.tmp',
-                  'xsltproc --nonet --html --stringparam topdir .. -o - ${SOURCES[2]} ${TARGET}.tmp 2>/dev/null'
-                  + '| sed -e "s/PHPSEARCH//" > $TARGET',
-                  'rm -f ${TARGET}.tmp' ]))
+# Extra documentation modules which are handled (named) different from
+# library modules
+EXTRA_MODULES = [
+    ('Overview', '#/doc/html'),
+    ('Examples', '#/Examples/doc/html'),
+    ('HowTos', '#/HowTos/doc/html'),
+    ('SENFSCons', '#/senfscons/doc/html') ]
+
+HEADER = """
+
+
+$title
+
+
+
+
+
+
+
+
+
+
+
+ """ + +FOOTER = """
+
+
+ +""" + +SEARCH_PHP=""" + +""" + +SEARCH_PATHS_PHP="""""" + +env.Append( ENV = { + 'TODAY' : str(datetime.date.today()), + 'TEXINPUTS' : os.environ.get('TEXINPUTS',env.Dir('#/doclib').abspath + ':'), + 'DOXYGEN' : env.get('DOXYGEN', 'doxygen'), +}) + +env.Replace( + ALL_TAGFILES = [], + DOXYGENCOM = "doclib/doxygen.sh $DOXYOPTS $SOURCE", +) + +env.PhonyTarget('linklint', [], [ + 'rm -rf linklint', + 'linklint -doc linklint -limit 99999999 `find -type d -name html -printf "/%P/@ "`', + '[ ! -r linklint/errorX.html ] || python doclib/linklint_addnames.py linklint/errorX.html.new', + '[ ! -r linklint/errorX.html.new ] || mv linklint/errorX.html.new linklint/errorX.html', + '[ ! -r linklint/errorAX.html ] || python doclib/linklint_addnames.py linklint/errorAX.html.new', + '[ ! -r linklint/errorAX.html.new ] || mv linklint/errorAX.html.new linklint/errorAX.html', + 'echo -e "\\nLokal link check results: linklint/index.html\\nRemote link check results: linklint/urlindex.html\\n"', +]) + +env.PhonyTarget('fixlinks', [], [ + 'python doclib/fix-links.py -v -s .svn -s linklint -s debian linklint/errorX.txt linklint/errorAX.txt', +]) + + +header = env.Command('doxy-header.html', 'SConscript', writeTemplate, + TEMPLATE = Literal(HEADER), + TITLE = "Documentation and API reference") +env.Depends(header, env.Value(repr(list(modules())))) + +footer = env.Command('doxy-footer.html', 'SConscript', writeTemplate, + TEMPLATE = Literal(FOOTER)) + +env.Alias('all_docs', + env.Command('search.php', [ 'html-munge.xsl', 'SConscript' ], + [ writeTemplate, + 'xsltproc --nonet --html --stringparam topdir .. -o - $SOURCE $TARGET 2>/dev/null' + + "| sed" + + r" -e 's/\[\[//g'" + + r" -e 's/\$$projectname/Overview/g'" + + r" -e 's/\$$title/Search results/g'" + + "> ${TARGETS[0]}.tmp", + 'mv ${TARGET}.tmp ${TARGET}' ], + TEMPLATE = Literal(HEADER + + SEARCH_PHP.replace('',']]') + + FOOTER), + TITLE = "Search results")) +env.Alias('all_docs', + env.Command('search_paths.php', 'SConscript', writeTemplate, + TEMPLATE = Literal(SEARCH_PATHS_PHP))) + +env.Alias('install_all', + env.Install( '$DOCINSTALLDIR/doclib', [ 'favicon.ico', + 'logo-head.png', + 'search.php', + 'search_functions.php', + 'search_paths.php', + 'senf.css' ] )) +env.Clean('all', 'doxy-header.html') # I should not need this but I do ... +env.Clean('all_docs', 'doxy-header.html') # I should not need this but I do ...