# MIT License # # Copyright The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE """Build SCons documentation.""" import glob import os.path import re import shutil import sys import time import SCons.Builder import SCons.Util Import('command_line', 'env', 'whereis', 'revaction') # # -- Check prerequisites for building the documentation --- # skip_doc = False try: import SConsDoc import SConsExamples except ImportError as exc: print("doc: SConsDoc failed to import, the error was:") print(f" ImportError: {exc}") print(" Please make sure that python-lxml is installed.") skip_doc = True fop = whereis('fop') xep = whereis('xep') if not fop and not xep: print("doc: No PDF renderer found (fop|xep)!") skip_doc = True skip_doc_arg = ARGUMENTS.get('SKIP_DOC') if skip_doc_arg is not None: skip_doc = skip_doc_arg in ['True', '1', 'true'] # # --- Configure build # env = env.Clone() build = os.path.join(command_line.build_dir, 'doc') gs = whereis('gs') lynx = whereis('lynx') dist_doc_tar_gz = '$DISTDIR/scons-doc-${VERSION}.tar.gz' tar_deps = [] tar_list = [] orig_env = env env = orig_env.Clone(SCONS_PY=File('#/scripts/scons.py').rfile()) # # --- Helpers --- # def writeVersionXml(verfile, date, ver, rev, copyright_years): """Helper function: Write a version.xml file.""" try: os.unlink(verfile) except OSError: pass # okay if the file didn't exist dir, f = os.path.split(verfile) os.makedirs(dir, exist_ok=True) with open(verfile, "w") as vf: vf.write(f"""\ """) # The names of the target files for the MAN pages man_page_list = ['scons.1', 'scons-time.1', 'sconsign.1'] # Template for the MAN page texts when we can't properly # create them because the skip_doc flag is set (required # modules/tools aren't installed in the current system) man_replace_tpl = r""".TH "%(uctitle)s" "1" "%(today)s" "SCons %(version)s" "SCons %(version)s" .ie \n(.g .ds Aq \(aq .el .ds Aq ' .nh .ad l .SH "NOTE" %(title)s \- This is a replacement file, stemming from an incomplete packaging process without the required doc modules installed. Please update the system and try running the build again. """ # # --- Processing --- # if skip_doc: print("doc: ...skipping building User Guide.") print(" ...creating fake MAN pages.") # Since the top-level SConstruct requires the MAN # pages to exist for the basic packaging, we create simple # stub texts here as replacement... scdir = os.path.join(build, 'man') if not os.path.isdir(scdir): os.makedirs(scdir) today = time.strftime( "%Y-%m-%d", time.gmtime(int(os.environ.get('SOURCE_DATE_EPOCH', time.time()))) ) version = env.subst('$VERSION') for m in man_page_list: man, _ = os.path.splitext(m) with open(os.path.join(scdir, m), "w") as fman: fman.write( man_replace_tpl % { 'uctitle': man.upper().replace("-", "\\-"), 'today': today, 'title': man, 'version': version, } ) else: if not lynx: print( "doc: Warning, lynx is not installed. " "Created release packages will not be complete!" ) # Always create a version.xml file containing the version information # for this run. Ignore it for dependency purposes so we don't # rebuild all the docs every time just because the date changes. date, ver, rev, copyright_years = env.Dictionary( 'DATE', 'VERSION', 'REVISION', 'COPYRIGHT_YEARS' ) version_xml = File(os.path.join(build, "version.xml")) writeVersionXml(str(version_xml), date, ver, rev, copyright_years) def _glob_install_action(target, source, env): """Builder for copying files to an Install dir. Selection is based on globbing for filename extension. """ if not SCons.Util.is_List(target): target = [target] if not SCons.Util.is_List(source): source = [source] for t, s in zip(target, source): shutil.copy(str(s), str(t)) def _glob_install_emitter(target, source, env): """Emitter for GlobInstall Builder.""" if not SCons.Util.is_List(target): target = [target] if not SCons.Util.is_List(source): source = [source] res = [] res_src = [] tdir = env.Dir(target[0]) for g in glob.glob(str(source[0])): head, tail = os.path.split(g) res.append(os.path.join(str(tdir), tail)) res_src.append(g) return res, res_src _glob_install_builder = SCons.Builder.Builder( action=_glob_install_action, emitter=_glob_install_emitter ) env['BUILDERS']['GlobInstall'] = _glob_install_builder def _chunked_install_action(target, source, env): """Builder for copying ChunkedHTML files to an Install dir.""" if not SCons.Util.is_List(target): target = [target] if not SCons.Util.is_List(source): source = [source] tdir, tail = os.path.split(str(target[0])) spattern = os.path.join(os.path.split(str(source[0]))[0], '*.html') for g in glob.glob(spattern): shutil.copy(g, tdir) def _chunked_install_emitter(target, source, env): """Emitter for ChunkedInstall Builder.""" if not SCons.Util.is_List(target): target = [target] if not SCons.Util.is_List(source): source = [source] tdir = env.Dir(target[0]) head, tail = os.path.split(str(source[0])) return os.path.join(str(tdir), tail), source _chunked_install_builder = SCons.Builder.Builder( action=_chunked_install_action, emitter=_chunked_install_emitter ) env['BUILDERS']['ChunkedInstall'] = _chunked_install_builder if not env.GetOption('clean'): # # Ensure that all XML files are valid against our XSD, and # that all example names and example output suffixes are unique # print("Validating files against SCons XSD...") if SConsDoc.validate_all_xml(['SCons'], xsdfile='xsd/scons.xsd'): print("OK") else: print("Validation failed! Please correct the errors above and try again.") sys.exit(1) print("Checking whether all example names are unique...") if SConsExamples.exampleNamesAreUnique(os.path.join('doc', 'user')): print("OK") else: print( "Not all example names and suffixes are unique! " "Please correct the errors listed above and try again." ) sys.exit(1) # List of prerequisite files in the build/doc folder buildsuite = [] def copy_dbfiles(env, toolpath, paths, fpattern, use_builddir=True): """Helper function, copies a bunch of files matching the given fpattern to a target directory. """ global buildsuite if not SCons.Util.is_List(toolpath): toolpath = [toolpath] if not SCons.Util.is_List(paths): paths = [paths] if not SCons.Util.is_List(fpattern): fpattern = [fpattern] if use_builddir: target_dir = env.Dir( os.path.join(command_line.build_dir, *(toolpath + paths)) ) buildsuite.extend( env.GlobInstall( target_dir, os.path.join('..', *(toolpath + paths + fpattern)) ) ) else: target_dir = env.Dir(os.path.join(*(toolpath + paths))) buildsuite.extend( env.GlobInstall(target_dir, os.path.join(*(paths + fpattern))) ) # # Copy generated files (.gen/.mod/.xml) to the build folder # copy_dbfiles(env, build, 'generated', '*.gen', False) copy_dbfiles(env, build, 'generated', '*.mod', False) copy_dbfiles(env, build, ['generated', 'examples'], '*', False) # # Copy XSLT files (.xslt) to the build folder # copy_dbfiles(env, build, 'xslt', '*.*', False) # # Copy DocBook stylesheets and Tool to the build folder # dbtoolpath = ['SCons', 'Tool', 'docbook'] copy_dbfiles(env, dbtoolpath, [], '__init__.py') copy_dbfiles(env, dbtoolpath, 'utils', 'xmldepend.xsl') dbpath = dbtoolpath + ['docbook-xsl-1.76.1'] copy_dbfiles(env, dbpath, [], 'VERSION') copy_dbfiles(env, dbpath, ['common'], '*.*') copy_dbfiles(env, dbpath, ['lib'], '*.*') copy_dbfiles(env, dbpath, ['html'], '*.*') copy_dbfiles(env, dbpath, ['fo'], '*.*') copy_dbfiles(env, dbpath, ['manpages'], '*.*') copy_dbfiles(env, dbpath, ['epub'], '*.xsl') copy_dbfiles(env, dbpath, ['xhtml-1_1'], '*.*') # # Copy additional Tools (gs, zip) # toolpath = ['SCons', 'Tool'] copy_dbfiles(env, toolpath, [], 'gs.py') copy_dbfiles(env, toolpath, [], 'zip.py') # Each document will live in its own subdirectory "build/doc/xxx". # List them here by their subfolder names. Note how the specifiers # for each subdir (=DOCTARGETS) have nothing to do with which # formats get created, but which of the outputs get installed # to the build folder and added to the different source and binary # packages in the end. # # In addition to the list of target formats (DOCTARGETS), we also # store some dependency information in this dict. The DOCDEPENDS # list contains all files from each local "MANIFEST", after # installing/copying them to the build directory. It basically # links the original sources to the respective build folder, # such that a simple 'python bootstrap.py' rebuilds the # documentation when a file, like 'doc/user/depends.xml' # for example, changes. # # Finally, in DOCNODES we store the created PDF and HTML files, # such that we can then install them in the proper places for # getting picked up by the archiving/packaging stages. DOCTARGETS = 0 DOCDEPENDS = 1 DOCNODES = 2 docs = { # 'design': (['chunked', 'pdf'], [], []), # 'python10' : (['chunked','html','pdf'], [], []), # 'reference': (['chunked', 'html', 'pdf'], [], []), # 'developer' : (['chunked','html','pdf'], [], []), 'user': (['chunked', 'html', 'pdf', 'epub', 'text'], [], []), 'man': (['man', 'epub', 'text'], [], []), } # # We have to tell SCons to scan the top-level XML files which # get included by the document XML files in the subdirectories. # def _parse_manifest_lines(basedir, manifest) -> list: """ Scans a MANIFEST file, and returns the list of source files. Has basic support for recursive globs '**', filename wildcards of the form '*.xml' and comment lines, starting with a '#'. Args: basedir: base path to find files in. Note this does not run in an SCons context so path must not need further processing (e.g. no '#' signs) manifest: path to manifest file """ sources = [] basewd = os.path.abspath(basedir) with open(manifest) as m: lines = m.readlines() for l in lines: if l.startswith('#'): # Skip comments continue l = l.rstrip('\n') if l.endswith('**'): # Glob all files recursively globwd = os.path.dirname(os.path.join(basewd, l)) for path, dirs, files in os.walk(globwd): for f in files: fpath = os.path.join(globwd, path, f) sources.append(os.path.relpath(fpath, basewd)) elif '*' in l: # Glob file pattern files = glob.glob(os.path.join(basewd, l)) for f in files: sources.append(os.path.relpath(f, basewd)) else: sources.append(l) return sources manifest = File('MANIFEST').rstr() src_files = _parse_manifest_lines('.', manifest) for s in src_files: if not s: continue base, ext = os.path.splitext(s) if ext in ['.fig', '.jpg']: buildsuite.extend( env.Command(os.path.join(build, s), s, Copy("$TARGET", "$SOURCE")) ) else: revaction([env.File(os.path.join(build, s))], [env.File(s)], env) for doc in docs: # Read MANIFEST file and copy the listed files to the build directory, # while branding them with the SCons copyright and the current # revision number... if not os.path.exists(os.path.join(build, doc)): env.Execute(Mkdir(os.path.join(build, doc))) if not os.path.exists(os.path.join(build, doc, 'titlepage')): env.Execute(Mkdir(os.path.join(build, doc, 'titlepage'))) manifest = File(os.path.join(doc, 'MANIFEST')).rstr() src_files = _parse_manifest_lines(doc, manifest) for s in src_files: if not s: continue doc_s = os.path.join(doc, s) build_s = os.path.join(build, doc, s) base, ext = os.path.splitext(doc_s) head, tail = os.path.split(s) if head: target_dir = os.path.join(build, doc, head) else: target_dir = os.path.join(build, doc) if ext in ['.fig', '.jpg', '.svg']: docs[doc][DOCDEPENDS].extend( env.Command(build_s, doc_s, Copy("$TARGET", "$SOURCE")) ) else: btarget = env.File(build_s) docs[doc][DOCDEPENDS].append(btarget) revaction([btarget], [env.File(doc_s)], env) # # For each document, build the document itself in HTML, # and PDF formats. # for doc in docs: # # Call SCons in each local doc folder # cleanopt = '' if env.GetOption('clean'): cleanopt = ' -c' scdir = os.path.join(build, doc) sctargets = [] if 'html' in docs[doc][DOCTARGETS]: sctargets.append(env.File(os.path.join(scdir, 'index.html'))) if 'chunked' in docs[doc][DOCTARGETS]: sctargets.append( env.File(os.path.join(scdir, f'scons-{doc}', 'index.html')) ) if 'pdf' in docs[doc][DOCTARGETS]: sctargets.append(env.File(os.path.join(scdir, f'scons-{doc}.pdf'))) if 'epub' in docs[doc][DOCTARGETS]: sctargets.append(env.File(os.path.join(scdir, f'scons-{doc}.epub'))) if 'man' in docs[doc][DOCTARGETS]: for m in man_page_list: sctargets.append(os.path.join(scdir, m)) man, _1 = os.path.splitext(m) sctargets.append(os.path.join(scdir, f'scons-{man}.pdf')) sctargets.append(os.path.join(scdir, f'scons-{man}.html')) docs[doc][DOCNODES].extend( env.Command( target=sctargets, source=buildsuite + docs[doc][DOCDEPENDS], action="cd %s && $PYTHON ${SCONS_PY.abspath}%s" % (scdir, cleanopt), ) ) install_css = False for doc in docs: # Collect the output files for this subfolder htmldir = os.path.join(build, 'HTML', f'scons-{doc}') htmlindex = os.path.join(htmldir, 'index.html') html = os.path.join(build, 'HTML', f'scons-{doc}.html') pdf = os.path.join(build, 'PDF', f'scons-{doc}.pdf') epub = os.path.join(build, 'EPUB', f'scons-{doc}.epub') text = os.path.join(build, 'TEXT', f'scons-{doc}.txt') if 'chunked' in docs[doc][DOCTARGETS]: installed_chtml = env.ChunkedInstall( env.Dir(htmldir), os.path.join(build, doc, f'scons-{doc}', 'index.html'), ) installed_chtml_css = env.Install( env.Dir(htmldir), os.path.join(build, doc, 'scons.css') ) env.Depends(installed_chtml, docs[doc][DOCNODES]) env.Depends(installed_chtml_css, docs[doc][DOCNODES]) tar_deps.extend([htmlindex, installed_chtml_css]) tar_list.extend([htmldir]) Local(htmlindex) env.Ignore(htmlindex, version_xml) if 'html' in docs[doc][DOCTARGETS]: env.InstallAs( target=env.File(html), source=env.File(os.path.join(build, doc, 'index.html')), ) tar_deps.extend([html]) tar_list.extend([html]) Local(html) env.Ignore(html, version_xml) install_css = True if 'pdf' in docs[doc][DOCTARGETS]: env.InstallAs( target=env.File(pdf), source=env.File(os.path.join(build, doc, f'scons-{doc}.pdf')), ) Local(pdf) env.Ignore(pdf, version_xml) tar_deps.append(pdf) tar_list.append(pdf) if 'epub' in docs[doc][DOCTARGETS] and gs: env.InstallAs( target=env.File(epub), source=env.File(os.path.join(build, doc, f'scons-{doc}.epub')), ) Local(epub) env.Ignore(epub, version_xml) tar_deps.append(epub) tar_list.append(epub) if ( 'text' in docs[doc][DOCTARGETS] and lynx and (('html' in docs[doc][DOCTARGETS]) or (doc == 'man')) ): texthtml = os.path.join(build, doc, 'index.html') if doc == 'man': # Special handling for single MAN file texthtml = os.path.join(build, doc, 'scons-scons.html') env.Command( target=text, source=env.File(texthtml), action="lynx -dump ${SOURCE.abspath} > $TARGET", ) Local(text) env.Ignore(text, version_xml) tar_deps.append(text) tar_list.append(text) if 'man' in docs[doc][DOCTARGETS]: for m in man_page_list: man, _1 = os.path.splitext(m) pdf = os.path.join(build, 'PDF', f'{man}-man.pdf') html = os.path.join(build, 'HTML', f'{man}-man.html') env.InstallAs( target=env.File(pdf), source=env.File(os.path.join(build, 'man', f'scons-{man}.pdf')), ) env.InstallAs( target=env.File(html), source=env.File(os.path.join(build, 'man', f'scons-{man}.html')), ) tar_deps.extend([pdf, html]) tar_list.extend([pdf, html]) # Install CSS file, common to all single HTMLs if install_css: css_file = os.path.join(build, 'HTML', 'scons.css') env.InstallAs( target=env.File(css_file), source=env.File(os.path.join(build, 'user', 'scons.css')), ) tar_deps.extend([css_file]) tar_list.extend([css_file]) Local(css_file) if not skip_doc: # Build API DOCS # TODO: Better specify dependencies on source files pdf_file = env.Command( target='#/build/doc/api/scons-api.pdf', source=env.Glob('#/SCons/*'), action=[Delete("#/build/doc/api"), "cd doc && make pdf"], ) pdf_install = os.path.join(build, 'PDF', 'scons-api.pdf') env.InstallAs(target=pdf_install, source=pdf_file) tar_deps.append(pdf_install) tar_list.append(pdf_install) htmldir = os.path.join(build, 'HTML', 'scons-api') html_files = env.Command( target='#/build/doc/HTML/scons-api/index.html', source=env.Glob('#/SCons/*'), action="cd doc && make dirhtml BUILDDIR=${HTMLDIR}", HTMLDIR=htmldir, ) tar_deps.append(htmldir) tar_list.append(htmldir) # # Now actually create the tar file of the documentation, # for easy distribution to the web site. # if tar_deps: tar_list = ' '.join([x.replace(build + '/', '') for x in tar_list]) t = env.Command( target=dist_doc_tar_gz, source=tar_deps, action="tar cf${TAR_HFLAG} - -C %s %s | gzip > $TARGET" % (build, tar_list), ) AddPostAction(dist_doc_tar_gz, Chmod(dist_doc_tar_gz, 0o644)) Local(t) Alias('doc', t) else: Alias('doc', os.path.join(command_line.build_dir, 'doc'))