diff options
Diffstat (limited to 'src/engine')
-rw-r--r-- | src/engine/SCons/Scanner/LaTeX.py | 201 | ||||
-rw-r--r-- | src/engine/SCons/Tool/__init__.py | 8 | ||||
-rw-r--r-- | src/engine/SCons/Tool/pdf.py | 2 | ||||
-rw-r--r-- | src/engine/SCons/Tool/tex.py | 459 |
4 files changed, 488 insertions, 182 deletions
diff --git a/src/engine/SCons/Scanner/LaTeX.py b/src/engine/SCons/Scanner/LaTeX.py index ceb9bf5..c544108 100644 --- a/src/engine/SCons/Scanner/LaTeX.py +++ b/src/engine/SCons/Scanner/LaTeX.py @@ -31,19 +31,33 @@ __revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__" import os.path import string +import re import SCons.Scanner def LaTeXScanner(): - """Return a prototype Scanner instance for scanning LaTeX source files""" + """Return a prototype Scanner instance for scanning LaTeX source files + when built with latex. + """ ds = LaTeX(name = "LaTeXScanner", suffixes = '$LATEXSUFFIXES', - path_variable = 'TEXINPUTS', - regex = '\\\\(include|includegraphics(?:\[[^\]]+\])?|input|bibliography|usepackage){([^}]*)}', + # in the search order, see below in LaTeX class docstring + graphics_extensions = ['.eps', '.ps'], recursive = 0) return ds -class LaTeX(SCons.Scanner.Classic): +def PDFLaTeXScanner(): + """Return a prototype Scanner instance for scanning LaTeX source files + when built with pdflatex. + """ + ds = LaTeX(name = "PDFLaTeXScanner", + suffixes = '$LATEXSUFFIXES', + # in the search order, see below in LaTeX class docstring + graphics_extensions = ['.png', '.pdf', '.jpg', '.tif'], + recursive = 0) + return ds + +class LaTeX(SCons.Scanner.Base): """Class for scanning LaTeX files for included files. Unlike most scanners, which use regular expressions that just @@ -51,76 +65,191 @@ class LaTeX(SCons.Scanner.Classic): of the keyword for the inclusion ("include", "includegraphics", "input", or "bibliography"), and then the file name itself. Based on a quick look at LaTeX documentation, it seems that we - need a should append .tex suffix for the "include" keywords, - append .tex if there is no extension for the "input" keyword, - but leave the file name untouched for "includegraphics." For - the "bibliography" keyword we need to add .bib if there is - no extension. (This need to be revisited since if there - is no extension for an "includegraphics" keyword latex will - append .ps or .eps to find the file; while pdftex will use - other extensions.) + should append .tex suffix for the "include" keywords, append .tex if + there is no extension for the "input" keyword, and need to add .bib + for the "bibliography" keyword that does not accept extensions by itself. + + Finally, if there is no extension for an "includegraphics" keyword + latex will append .ps or .eps to find the file, while pdftex may use .pdf, + .jpg, .tif, .mps, or .png. + + The actual subset and search order may be altered by + DeclareGraphicsExtensions command. This complication is ignored. + The default order corresponds to experimentation with teTeX + $ latex --version + pdfeTeX 3.141592-1.21a-2.2 (Web2C 7.5.4) + kpathsea version 3.5.4 + The order is: + ['.eps', '.ps'] for latex + ['.png', '.pdf', '.jpg', '.tif']. + + Another difference is that the search path is determined by the type + of the file being searched: + env['TEXINPUTS'] for "input" and "include" keywords + env['TEXPICTS'] for "includegraphics" keyword + env['BIBINPUTS'] for "bibliography" keyword + env['BSTINPUTS'] for "bibliographystyle" keyword + + FIXME: also look for the class or style in document[class|style]{} + FIXME: also look for the argument of bibliographystyle{} """ - def latex_name(self, include): + keyword_paths = {'include': 'TEXINPUTS', + 'input': 'TEXINPUTS', + 'includegraphics': 'TEXPICTS', + 'bibliography': 'BIBINPUTS', + 'bibliographystyle': 'BSTINPUTS', + 'usepackage': 'TEXINPUTS'} + env_variables = SCons.Util.unique(keyword_paths.values()) + + def __init__(self, name, suffixes, graphics_extensions, *args, **kw): + + regex = '\\\\(include|includegraphics(?:\[[^\]]+\])?|input|bibliography|usepackage){([^}]*)}' + self.cre = re.compile(regex, re.M) + self.graphics_extensions = graphics_extensions + + def _scan(node, env, path=(), self=self): + node = node.rfile() + if not node.exists(): + return [] + return self.scan(node, path) + + class FindMultiPathDirs: + """The stock FindPathDirs function has the wrong granularity: + it is called once per target, while we need the path that depends + on what kind of included files is being searched. This wrapper + hides multiple instances of FindPathDirs, one per the LaTeX path + variable in the environment. When invoked, the function calculates + and returns all the required paths as a dictionary (converted into + a tuple to become hashable). Then the scan function converts it + back and uses a dictionary of tuples rather than a single tuple + of paths. + """ + def __init__(self, dictionary): + self.dictionary = {} + for k, n in dictionary.iteritems(): + self.dictionary[k] = SCons.Scanner.FindPathDirs(n) + def __call__(self, env, dir=None, target=None, source=None, argument=None): + di = {} + for k, c in self.dictionary.iteritems(): + p = c(env, dir=None, target=None, source=None, argument=None) + di[k] = p + # To prevent "dict is not hashable error" + rv = tuple([(k,v) for k, v in di.iteritems()]) + return rv + + class LaTeXScanCheck: + """Skip all but LaTeX source files, i.e., do not scan *.eps, + *.pdf, *.jpg, etc. + """ + def __init__(self, suffixes): + self.suffixes = suffixes + def __call__(self, node, env): + current = not node.has_builder() or node.is_up_to_date() + scannable = node.get_suffix() in env.subst(self.suffixes) + # Returning false means that the file is not scanned. + return scannable and current + + kw['function'] = _scan + kw['path_function'] = FindMultiPathDirs(LaTeX.keyword_paths) + kw['recursive'] = 1 + kw['skeys'] = suffixes + kw['scan_check'] = LaTeXScanCheck(suffixes) + kw['name'] = name + + apply(SCons.Scanner.Base.__init__, (self,) + args, kw) + + def _latex_names(self, include): filename = include[1] if include[0] == 'input': base, ext = os.path.splitext( filename ) if ext == "": - filename = filename + '.tex' + return [filename + '.tex'] if (include[0] == 'include'): - filename = filename + '.tex' + return [filename + '.tex'] if include[0] == 'bibliography': base, ext = os.path.splitext( filename ) if ext == "": - filename = filename + '.bib' + return [filename + '.bib'] if include[0] == 'usepackage': base, ext = os.path.splitext( filename ) if ext == "": - filename = filename + '.sty' - return filename + return [filename + '.sty'] + if include[0] == 'includegraphics': + base, ext = os.path.splitext( filename ) + if ext == "": + return [filename + e for e in self.graphics_extensions] + return [filename] + def sort_key(self, include): - return SCons.Node.FS._my_normcase(self.latex_name(include)) + return SCons.Node.FS._my_normcase(str(include)) + def find_include(self, include, source_dir, path): - i = SCons.Node.FS.find_file(self.latex_name(include), - (source_dir,) + path) + try: + sub_path = path[include[0]] + except: + sub_path = () + try_names = self._latex_names(include) + for n in try_names: + i = SCons.Node.FS.find_file(n, (source_dir,) + sub_path) + if i: + return i, include return i, include def scan(self, node, path=()): - # # Modify the default scan function to allow for the regular # expression to return a comma separated list of file names # as can be the case with the bibliography keyword. - # - # cache the includes list in node so we only scan it once: + + # Cache the includes list in node so we only scan it once: + path_dict = dict(list(path)) + noopt_cre = re.compile('\[.*$') if node.includes != None: includes = node.includes else: includes = self.cre.findall(node.get_contents()) + # 1. Split comma-separated lines, e.g. + # ('bibliography', 'phys,comp') + # should become two entries + # ('bibliography', 'phys') + # ('bibliography', 'comp') + # 2. Remove the options, e.g., such as + # ('includegraphics[clip,width=0.7\\linewidth]', 'picture.eps') + # should become + # ('includegraphics', 'picture.eps') + split_includes = [] + for include in includes: + inc_type = noopt_cre.sub('', include[0]) + inc_list = string.split(include[1],',') + for j in range(len(inc_list)): + split_includes.append( (inc_type, inc_list[j]) ) + # + includes = split_includes node.includes = includes # This is a hand-coded DSU (decorate-sort-undecorate, or # Schwartzian transform) pattern. The sort key is the raw name - # of the file as specifed on the #include line (including the - # " or <, since that may affect what file is found), which lets + # of the file as specifed on the \include, \input, etc. line. + # TODO: what about the comment in the original Classic scanner: + # """which lets # us keep the sort order constant regardless of whether the file - # is actually found in a Repository or locally. + # is actually found in a Repository or locally.""" nodes = [] source_dir = node.get_dir() for include in includes: # # Handle multiple filenames in include[1] # - inc_list = string.split(include[1],',') - for j in range(len(inc_list)): - include_local = [include[0],inc_list[j]] - n, i = self.find_include(include_local, source_dir, path) - + n, i = self.find_include(include, source_dir, path_dict) if n is None: - SCons.Warnings.warn(SCons.Warnings.DependencyWarning, - "No dependency generated for file: %s (included from: %s) -- file not found" % (i, node)) + # Do not bother with 'usepackage' warnings, as they most + # likely refer to system-level files + if include[0] != 'usepackage': + SCons.Warnings.warn(SCons.Warnings.DependencyWarning, + "No dependency generated for file: %s (included from: %s) -- file not found" % (i, node)) else: - sortkey = self.sort_key(include) + sortkey = self.sort_key(n) nodes.append((sortkey, n)) - + # nodes.sort() nodes = map(lambda pair: pair[1], nodes) return nodes diff --git a/src/engine/SCons/Tool/__init__.py b/src/engine/SCons/Tool/__init__.py index f197b68..9924ab1 100644 --- a/src/engine/SCons/Tool/__init__.py +++ b/src/engine/SCons/Tool/__init__.py @@ -55,6 +55,7 @@ DefaultToolpath=[] CScanner = SCons.Scanner.C.CScanner() DScanner = SCons.Scanner.D.DScanner() LaTeXScanner = SCons.Scanner.LaTeX.LaTeXScanner() +PDFLaTeXScanner = SCons.Scanner.LaTeX.PDFLaTeXScanner() ProgramScanner = SCons.Scanner.Prog.ProgramScanner() SourceFileScanner = SCons.Scanner.Base({}, name='SourceFileScanner') @@ -76,8 +77,13 @@ for suffix in CSuffixes: for suffix in DSuffixes: SourceFileScanner.add_scanner(suffix, DScanner) +# FIXME: what should be done here? Two scanners scan the same extensions, +# but look for different files, e.g., "picture.eps" vs. "picture.pdf". +# The builders for DVI and PDF explicitly reference their scanners +# I think that means this is not needed??? for suffix in LaTeXSuffixes: - SourceFileScanner.add_scanner(suffix, LaTeXScanner) + SourceFileScanner.add_scanner(suffix, LaTeXScanner) + SourceFileScanner.add_scanner(suffix, PDFLaTeXScanner) class Tool: def __init__(self, name, toolpath=[], **kw): diff --git a/src/engine/SCons/Tool/pdf.py b/src/engine/SCons/Tool/pdf.py index b0cd126..37c4c01 100644 --- a/src/engine/SCons/Tool/pdf.py +++ b/src/engine/SCons/Tool/pdf.py @@ -41,7 +41,7 @@ def generate(env): global PDFBuilder if PDFBuilder is None: PDFBuilder = SCons.Builder.Builder(action = {}, - source_scanner = SCons.Tool.LaTeXScanner, + source_scanner = SCons.Tool.PDFLaTeXScanner, prefix = '$PDFPREFIX', suffix = '$PDFSUFFIX', emitter = {}, diff --git a/src/engine/SCons/Tool/tex.py b/src/engine/SCons/Tool/tex.py index db083ec..59980e5 100644 --- a/src/engine/SCons/Tool/tex.py +++ b/src/engine/SCons/Tool/tex.py @@ -43,20 +43,53 @@ import SCons.Node import SCons.Node.FS import SCons.Util -warning_rerun_re = re.compile('(^LaTeX Warning:.*Rerun)|(^Package \w+ Warning:.*Rerun)', re.MULTILINE) +Verbose = False +must_rerun_latex = True + +# these are files that just need to be checked for changes and then rerun latex +check_suffixes = ['.toc', '.lof', '.lot', '.out', '.nav', '.snm'] + +# these are files that require bibtex or makeindex to be run when they change +all_suffixes = check_suffixes + ['.bbl', '.idx', '.nlo', '.glo'] + +# +# regular expressions used to search for Latex features +# or outputs that require rerunning latex +# +# search for all .aux files opened by latex (recorded in the .log file) +openout_aux_re = re.compile(r"\\openout.*`(.*\.aux)'") + +#printindex_re = re.compile(r"^[^%]*\\printindex", re.MULTILINE) +#printnomenclature_re = re.compile(r"^[^%]*\\printnomenclature", re.MULTILINE) +#printglossary_re = re.compile(r"^[^%]*\\printglossary", re.MULTILINE) + +# search to find rerun warnings +warning_rerun_str = '(^LaTeX Warning:.*Rerun)|(^Package \w+ Warning:.*Rerun)' +warning_rerun_re = re.compile(warning_rerun_str, re.MULTILINE) + +# search to find citation rerun warnings rerun_citations_str = "^LaTeX Warning:.*\n.*Rerun to get citations correct" rerun_citations_re = re.compile(rerun_citations_str, re.MULTILINE) +# search to find undefined references or citations warnings undefined_references_str = '(^LaTeX Warning:.*undefined references)|(^Package \w+ Warning:.*undefined citations)' undefined_references_re = re.compile(undefined_references_str, re.MULTILINE) -openout_aux_re = re.compile(r"\\openout.*`(.*\.aux)'") -openout_re = re.compile(r"\\openout.*`(.*)'") - -makeindex_re = re.compile(r"^[^%]*\\makeindex", re.MULTILINE) +# used by the emitter +auxfile_re = re.compile(r".", re.MULTILINE) tableofcontents_re = re.compile(r"^[^%]*\\tableofcontents", re.MULTILINE) +makeindex_re = re.compile(r"^[^%]*\\makeindex", re.MULTILINE) bibliography_re = re.compile(r"^[^%]*\\bibliography", re.MULTILINE) +listoffigures_re = re.compile(r"^[^%]*\\listoffigures", re.MULTILINE) +listoftables_re = re.compile(r"^[^%]*\\listoftables", re.MULTILINE) +hyperref_re = re.compile(r"^[^%]*\\usepackage.*\{hyperref\}", re.MULTILINE) +makenomenclature_re = re.compile(r"^[^%]*\\makenomenclature", re.MULTILINE) +makeglossary_re = re.compile(r"^[^%]*\\makeglossary", re.MULTILINE) +beamer_re = re.compile(r"^[^%]*\\documentclass\{beamer\}", re.MULTILINE) + +# search to find all files opened by Latex (recorded in .log file) +openout_re = re.compile(r"\\openout.*`(.*)'") # An Action sufficient to build any generic tex file. TeXAction = None @@ -71,10 +104,51 @@ BibTeXAction = None # An action to run MakeIndex on a file. MakeIndexAction = None +# An action to run MakeIndex (for nomencl) on a file. +MakeNclAction = None + +# An action to run MakeIndex (for glossary) on a file. +MakeGlossaryAction = None + +# Used as a return value of modify_env_var if the variable is not set. +class _Null: + pass +_null = _Null + +# The user specifies the paths in env[variable], similar to other builders. +# They may be relative and must be converted to absolute, as expected +# by LaTeX and Co. The environment may already have some paths in +# env['ENV'][var]. These paths are honored, but the env[var] paths have +# higher precedence. All changes are un-done on exit. +def modify_env_var(env, var, abspath): + try: + save = env['ENV'][var] + except KeyError: + save = _null + env.PrependENVPath(var, abspath) + try: + if SCons.Util.is_List(env[var]): + env.PrependENVPath(var, [os.path.abspath(str(p)) for p in env[var]]) + else: + # Split at os.pathsep to convert into absolute path + env.PrependENVPath(var, [os.path.abspath(p) for p in str(env[var]).split(os.pathsep)]) + except KeyError: + pass + # Convert into a string explicitly to append ":" (without which it won't search system + # paths as well). The problem is that env.AppendENVPath(var, ":") + # does not work, refuses to append ":" (os.pathsep). + if SCons.Util.is_List(env['ENV'][var]): + env['ENV'][var] = os.pathsep.join(env['ENV'][var]) + # Append the trailing os.pathsep character here to catch the case with no env[var] + env['ENV'][var] = env['ENV'][var] + os.pathsep + return save + def InternalLaTeXAuxAction(XXXLaTeXAction, target = None, source= None, env=None): """A builder for LaTeX files that checks the output in the aux file and decides how many times to use LaTeXAction, and BibTeXAction.""" + global must_rerun_latex + # This routine is called with two actions. In this file for DVI builds # with LaTeXAction and from the pdflatex.py with PDFLaTeXAction # set this up now for the case where the user requests a different extension @@ -88,139 +162,188 @@ def InternalLaTeXAuxAction(XXXLaTeXAction, target = None, source= None, env=None basedir = os.path.split(str(source[0]))[0] basefile = os.path.split(str(basename))[1] abspath = os.path.abspath(basedir) - targetbase = SCons.Util.splitext(str(target[0]))[0] targetext = os.path.splitext(str(target[0]))[1] targetdir = os.path.split(str(target[0]))[0] - # Not sure if these environment changes should go here or make the - # user do them I undo all but TEXPICTS but there is still the side - # effect of creating the empty (':') entries in the environment. - - def modify_env_var(env, var, abspath): - try: - save = env['ENV'][var] - except KeyError: - save = ':' - env['ENV'][var] = '' - if SCons.Util.is_List(env['ENV'][var]): - env['ENV'][var] = [abspath] + env['ENV'][var] - else: - env['ENV'][var] = abspath + os.pathsep + env['ENV'][var] - return save - - texinputs_save = modify_env_var(env, 'TEXINPUTS', abspath) - bibinputs_save = modify_env_var(env, 'BIBINPUTS', abspath) - bstinputs_save = modify_env_var(env, 'BSTINPUTS', abspath) - texpicts_save = modify_env_var(env, 'TEXPICTS', abspath) - - # Create these file names with the target directory since they will - # be made there. That's because the *COM variables have the cd - # command in the prolog. - - bblfilename = os.path.join(targetdir, basefile + '.bbl') - bblContents = "" - if os.path.exists(bblfilename): - bblContents = open(bblfilename, "rb").read() - - idxfilename = os.path.join(targetdir, basefile + '.idx') - idxContents = "" - if os.path.exists(idxfilename): - idxContents = open(idxfilename, "rb").read() - - tocfilename = os.path.join(targetdir, basefile + '.toc') - tocContents = "" - if os.path.exists(tocfilename): - tocContents = open(tocfilename, "rb").read() - - # generate the file name that latex will generate - resultfilename = os.path.join(targetdir, basefile + targetext) + saved_env = {} + for var in SCons.Scanner.LaTeX.LaTeX.env_variables: + saved_env[var] = modify_env_var(env, var, abspath) - # Run LaTeX once to generate a new aux file and log file. - result = XXXLaTeXAction(target, source, env) - if result != 0: - return result - - # Decide if various things need to be run, or run again. We check + # Create base file names with the target directory since the auxiliary files + # will be made there. That's because the *COM variables have the cd + # command in the prolog. We check # for the existence of files before opening them--even ones like the # aux file that TeX always creates--to make it possible to write tests # with stubs that don't necessarily generate all of the same files. - # Read the log file to find all .aux files - logfilename = os.path.join(targetbase + '.log') - auxfiles = [] - if os.path.exists(logfilename): - content = open(logfilename, "rb").read() - auxfiles = openout_aux_re.findall(content) - - # Now decide if bibtex will need to be run. - for auxfilename in auxfiles: - target_aux = os.path.join(targetdir, auxfilename) - if os.path.exists(target_aux): - content = open(target_aux, "rb").read() - if string.find(content, "bibdata") != -1: - bibfile = env.fs.File(targetbase) - result = BibTeXAction(bibfile, bibfile, env) - if result != 0: - return result - break - - must_rerun_latex = 0 - # Now decide if latex will need to be run again due to table of contents. - if os.path.exists(tocfilename) and tocContents != open(tocfilename, "rb").read(): - must_rerun_latex = 1 - - # Now decide if latex will need to be run again due to bibliography. - if os.path.exists(bblfilename) and bblContents != open(bblfilename, "rb").read(): - must_rerun_latex = 1 - - # Now decide if latex will need to be run again due to index. - if os.path.exists(idxfilename) and idxContents != open(idxfilename, "rb").read(): - # We must run makeindex - idxfile = env.fs.File(targetbase) - result = MakeIndexAction(idxfile, idxfile, env) - if result != 0: - return result - must_rerun_latex = 1 + targetbase = os.path.join(targetdir, basefile) + + # if there is a \makeindex there will be a .idx and thus + # we have to run makeindex at least once to keep the build + # happy even if there is no index. + # Same for glossaries and nomenclature + src_content = source[0].get_contents() + run_makeindex = makeindex_re.search(src_content) and not os.path.exists(targetbase + '.idx') + run_nomenclature = makenomenclature_re.search(src_content) and not os.path.exists(targetbase + '.nlo') + run_glossary = makeglossary_re.search(src_content) and not os.path.exists(targetbase + '.glo') + + saved_hashes = {} + suffix_nodes = {} + + for suffix in all_suffixes: + theNode = env.fs.File(targetbase + suffix) + suffix_nodes[suffix] = theNode + saved_hashes[suffix] = theNode.get_csig() + + if Verbose: + print "hashes: ",saved_hashes + + must_rerun_latex = True + + # + # routine to update MD5 hash and compare + # + def check_MD5(filenode, suffix): + global must_rerun_latex + # two calls to clear old csig + filenode.clear_memoized_values() + filenode.ninfo = filenode.new_ninfo() + new_md5 = filenode.get_csig() + + if saved_hashes[suffix] == new_md5: + if Verbose: + print "file %s not changed" % (targetbase+suffix) + return False # unchanged + saved_hashes[suffix] = new_md5 + must_rerun_latex = True + if Verbose: + print "file %s changed, rerunning Latex, new hash = " % (targetbase+suffix), new_md5 + return True # changed - if must_rerun_latex == 1: - result = XXXLaTeXAction(target, source, env) - if result != 0: - return result + # generate the file name that latex will generate + resultfilename = targetbase + callerSuffix - # Now decide if latex needs to be run yet again to resolve warnings. - logfilename = targetbase + '.log' - for _ in range(int(env.subst('$LATEXRETRIES'))): - if not os.path.exists(logfilename): - break - content = open(logfilename, "rb").read() - if not warning_rerun_re.search(content) and \ - not rerun_citations_re.search(content) and \ - not undefined_references_re.search(content): - break + count = 0 + + while (must_rerun_latex and count < int(env.subst('$LATEXRETRIES'))) : result = XXXLaTeXAction(target, source, env) if result != 0: return result + count = count + 1 + + must_rerun_latex = False + # Decide if various things need to be run, or run again. + + # Read the log file to find all .aux files + logfilename = targetbase + '.log' + logContent = '' + auxfiles = [] + if os.path.exists(logfilename): + logContent = open(logfilename, "rb").read() + auxfiles = openout_aux_re.findall(logContent) + + # Now decide if bibtex will need to be run. + # The information that bibtex reads from the .aux file is + # pass-independent. If we find (below) that the .bbl file is unchanged, + # then the last latex saw a correct bibliography. + # Therefore only do this on the first pass + if count == 1: + for auxfilename in auxfiles: + target_aux = os.path.join(targetdir, auxfilename) + if os.path.exists(target_aux): + content = open(target_aux, "rb").read() + if string.find(content, "bibdata") != -1: + if Verbose: + print "Need to run bibtex" + bibfile = env.fs.File(targetbase) + result = BibTeXAction(bibfile, bibfile, env) + if result != 0: + return result + must_rerun_latex = check_MD5(suffix_nodes['.bbl'],'.bbl') + break + + # Now decide if latex will need to be run again due to index. + if check_MD5(suffix_nodes['.idx'],'.idx') or (count == 1 and run_makeindex): + # We must run makeindex + if Verbose: + print "Need to run makeindex" + idxfile = suffix_nodes['.idx'] + result = MakeIndexAction(idxfile, idxfile, env) + if result != 0: + return result + + # TO-DO: need to add a way for the user to extend this list for whatever + # auxiliary files they create in other (or their own) packages + # Harder is case is where an action needs to be called -- that should be rare (I hope?) + + for index in check_suffixes: + check_MD5(suffix_nodes[index],index) + + # Now decide if latex will need to be run again due to nomenclature. + if check_MD5(suffix_nodes['.nlo'],'.nlo') or (count == 1 and run_nomenclature): + # We must run makeindex + if Verbose: + print "Need to run makeindex for nomenclature" + nclfile = suffix_nodes['.nlo'] + result = MakeNclAction(nclfile, nclfile, env) + if result != 0: + return result + + # Now decide if latex will need to be run again due to glossary. + if check_MD5(suffix_nodes['.glo'],'.glo') or (count == 1 and run_glossary): + # We must run makeindex + if Verbose: + print "Need to run makeindex for glossary" + glofile = suffix_nodes['.glo'] + result = MakeGlossaryAction(glofile, glofile, env) + if result != 0: + return result + + # Now decide if latex needs to be run yet again to resolve warnings. + if warning_rerun_re.search(logContent): + must_rerun_latex = True + if Verbose: + print "rerun Latex due to latex or package rerun warning" + + if rerun_citations_re.search(logContent): + must_rerun_latex = True + if Verbose: + print "rerun Latex due to 'Rerun to get citations correct' warning" + + if undefined_references_re.search(logContent): + must_rerun_latex = True + if Verbose: + print "rerun Latex due to undefined references or citations" + + if (count >= int(env.subst('$LATEXRETRIES')) and must_rerun_latex): + print "reached max number of retries on Latex ,",int(env.subst('$LATEXRETRIES')) +# end of while loop + # rename Latex's output to what the target name is if not (str(target[0]) == resultfilename and os.path.exists(resultfilename)): if os.path.exists(resultfilename): print "move %s to %s" % (resultfilename, str(target[0]), ) shutil.move(resultfilename,str(target[0])) - # if the user gave some other extension try PDFSUFFIX and then .dvi - # not sure how to tell if we got here from a PDF or DVI builder. - else: - resultfilename = os.path.splitext(resultfilename)[0] + callerSuffix - if os.path.exists(resultfilename): - print "move %s to %s" % (resultfilename, str(target[0]), ) - shutil.move(resultfilename,str(target[0])) - - env['ENV']['TEXINPUTS'] = texinputs_save - env['ENV']['BIBINPUTS'] = bibinputs_save - env['ENV']['BSTINPUTS'] = bibinputs_save + # Original comment (when TEXPICTS was not restored): # The TEXPICTS enviroment variable is needed by a dvi -> pdf step - # later on Mac OSX so leave it, - # env['ENV']['TEXPICTS'] = texpicts_save + # later on Mac OSX so leave it + # + # It is also used when searching for pictures (implicit dependencies). + # Why not set the variable again in the respective builder instead + # of leaving local modifications in the environment? What if multiple + # latex builds in different directories need different TEXPICTS? + for var in SCons.Scanner.LaTeX.LaTeX.env_variables: + if var == 'TEXPICTS': + continue + if saved_env[var] is _null: + try: + env['ENV'].pop(var) + except KeyError: + pass # was never set + else: + env['ENV'][var] = saved_env[var] return result @@ -262,42 +385,65 @@ def TeXLaTeXStrFunction(target = None, source= None, env=None): return result def tex_emitter(target, source, env): - base = SCons.Util.splitext(str(source[0]))[0] + """An emitter for TeX and LaTeX sources. + For LaTeX sources we try and find the common created files that + are needed on subsequent runs of latex to finish tables of contents, + bibliographies, indices, lists of figures, and hyperlink references. + """ targetbase = SCons.Util.splitext(str(target[0]))[0] + basename = SCons.Util.splitext(str(source[0]))[0] + basefile = os.path.split(str(basename))[1] - target.append(targetbase + '.aux') - env.Precious(targetbase + '.aux') - target.append(targetbase + '.log') - for f in source: - content = f.get_contents() - if tableofcontents_re.search(content): - target.append(targetbase + '.toc') - env.Precious(targetbase + '.toc') - if makeindex_re.search(content): - target.append(targetbase + '.ilg') - target.append(targetbase + '.ind') - target.append(targetbase + '.idx') - env.Precious(targetbase + '.idx') - if bibliography_re.search(content): - target.append(targetbase + '.bbl') - env.Precious(targetbase + '.bbl') - target.append(targetbase + '.blg') - - # read log file to get all .aux files + # + # file names we will make use of in searching the sources and log file + # + emit_suffixes = ['.aux', '.log', '.ilg', '.blg', '.nls', '.nlg', '.gls', '.glg'] + all_suffixes + auxfilename = targetbase + '.aux' logfilename = targetbase + '.log' - dir, base_nodir = os.path.split(targetbase) + + env.SideEffect(auxfilename,target[0]) + env.SideEffect(logfilename,target[0]) + env.Clean(target[0],auxfilename) + env.Clean(target[0],logfilename) + + content = source[0].get_contents() + idx_exists = os.path.exists(targetbase + '.idx') + nlo_exists = os.path.exists(targetbase + '.nlo') + glo_exists = os.path.exists(targetbase + '.glo') + + file_tests = [(auxfile_re.search(content),['.aux']), + (makeindex_re.search(content) or idx_exists,['.idx', '.ind', '.ilg']), + (bibliography_re.search(content),['.bbl', '.blg']), + (tableofcontents_re.search(content),['.toc']), + (listoffigures_re.search(content),['.lof']), + (listoftables_re.search(content),['.lot']), + (hyperref_re.search(content),['.out']), + (makenomenclature_re.search(content) or nlo_exists,['.nlo', '.nls', '.nlg']), + (makeglossary_re.search(content) or glo_exists,['.glo', '.gls', '.glg']), + (beamer_re.search(content),['.nav', '.snm', '.out', '.toc']) ] + # Note we add the various makeindex files if the file produced by latex exists (.idx, .glo, .nlo) + # This covers the case where the \makeindex, \makenomenclature, or \makeglossary + # is not in the main file but we want to clean the files and those made by makeindex + + # TO-DO: need to add a way for the user to extend this list for whatever + # auxiliary files they create in other (or their own) packages + + for (theSearch,suffix_list) in file_tests: + if theSearch: + for suffix in suffix_list: + env.SideEffect(targetbase + suffix,target[0]) + env.Clean(target[0],targetbase + suffix) + + # read log file to get all other files that latex creates and will read on the next pass if os.path.exists(logfilename): content = open(logfilename, "rb").read() out_files = openout_re.findall(content) - out_files = filter(lambda f, b=base_nodir+'.aux': f != b, out_files) - if dir != '': - out_files = map(lambda f, d=dir: d+os.sep+f, out_files) - target.extend(out_files) - for f in out_files: - env.Precious( f ) + env.SideEffect(out_files,target[0]) + env.Clean(target[0],out_files) return (target, source) + TeXLaTeXAction = None def generate(env): @@ -324,6 +470,16 @@ def generate(env): if MakeIndexAction is None: MakeIndexAction = SCons.Action.Action("$MAKEINDEXCOM", "$MAKEINDEXCOMSTR") + # Define an action to run MakeIndex on a file for nomenclatures. + global MakeNclAction + if MakeNclAction is None: + MakeNclAction = SCons.Action.Action("$MAKENCLCOM", "$MAKENCLCOMSTR") + + # Define an action to run MakeIndex on a file for glossaries. + global MakeGlossaryAction + if MakeGlossaryAction is None: + MakeGlossaryAction = SCons.Action.Action("$MAKEGLOSSARYCOM", "$MAKEGLOSSARYCOMSTR") + global TeXLaTeXAction if TeXLaTeXAction is None: TeXLaTeXAction = SCons.Action.Action(TeXLaTeXFunction, @@ -354,5 +510,20 @@ def generate(env): env['MAKEINDEXFLAGS'] = SCons.Util.CLVar('') env['MAKEINDEXCOM'] = 'cd ${TARGET.dir} && $MAKEINDEX $MAKEINDEXFLAGS ${SOURCE.file}' + env['MAKEGLOSSARY'] = 'makeindex' + env['MAKEGLOSSARYSTYLE'] = '${SOURCE.filebase}.ist' + env['MAKEGLOSSARYFLAGS'] = SCons.Util.CLVar('-s ${MAKEGLOSSARYSTYLE} -t ${SOURCE.filebase}.glg') + env['MAKEGLOSSARYCOM'] = 'cd ${TARGET.dir} && $MAKEGLOSSARY ${SOURCE.filebase}.glo $MAKEGLOSSARYFLAGS -o ${SOURCE.filebase}.gls' + + env['MAKENCL'] = 'makeindex' + env['MAKENCLSTYLE'] = '$nomencl.ist' + env['MAKENCLFLAGS'] = '-s ${MAKENCLSTYLE} -t ${SOURCE.filebase}.nlg' + env['MAKENCLCOM'] = 'cd ${TARGET.dir} && $MAKENCL ${SOURCE.filebase}.nlo $MAKENCLFLAGS -o ${SOURCE.filebase}.nls' + + # Duplicate from pdflatex.py. If latex.py goes away, then this is still OK. + env['PDFLATEX'] = 'pdflatex' + env['PDFLATEXFLAGS'] = SCons.Util.CLVar('-interaction=nonstopmode') + env['PDFLATEXCOM'] = 'cd ${TARGET.dir} && $PDFLATEX $PDFLATEXFLAGS ${SOURCE.file}' + def exists(env): return env.Detect('tex') |