summaryrefslogtreecommitdiffstats
path: root/src
diff options
context:
space:
mode:
authorRussel Winder <russel@winder.org.uk>2012-09-16 17:11:28 (GMT)
committerRussel Winder <russel@winder.org.uk>2012-09-16 17:11:28 (GMT)
commit116470c324258201dd0e3d670b80de3ab96ed890 (patch)
tree0beda9bd3fd997deb44de2c0fde9ad2ed0163efd /src
parent87bf700266e276ec4b1d200b5d8787ed24da21af (diff)
parentedbb8113bf8737e7bc77a4f23d7fd41a44dca5a6 (diff)
downloadSCons-116470c324258201dd0e3d670b80de3ab96ed890.zip
SCons-116470c324258201dd0e3d670b80de3ab96ed890.tar.gz
SCons-116470c324258201dd0e3d670b80de3ab96ed890.tar.bz2
Merge mainline tip.
Diffstat (limited to 'src')
-rw-r--r--src/CHANGES.txt4
-rw-r--r--src/engine/SCons/SubstTests.py2
-rw-r--r--src/engine/SCons/Tool/JavaCommon.py4
-rw-r--r--src/engine/SCons/Tool/gettext.py7
-rw-r--r--src/engine/SCons/Tool/javacTests.py6
-rw-r--r--src/engine/SCons/Tool/msvsTests.py11
-rw-r--r--src/engine/SCons/Tool/tex.py52
7 files changed, 70 insertions, 16 deletions
diff --git a/src/CHANGES.txt b/src/CHANGES.txt
index 01a2aa2..7f9242f 100644
--- a/src/CHANGES.txt
+++ b/src/CHANGES.txt
@@ -23,6 +23,10 @@ RELEASE 2.X.X -
From Gary Oberbrunner:
- Fix MSVS solution generation for VS11, and fixed tests.
+ From Rob Managan:
+ - Updated the TeX builder to support the \newglossary command
+ in LaTeX's glossaries package and the files it creates.
+
RELEASE 2.2.0 - Mon, 05 Aug 2012 15:37:48 +0000
From dubcanada on Bitbucket:
diff --git a/src/engine/SCons/SubstTests.py b/src/engine/SCons/SubstTests.py
index 4568528..420fd73 100644
--- a/src/engine/SCons/SubstTests.py
+++ b/src/engine/SCons/SubstTests.py
@@ -554,6 +554,8 @@ class scons_subst_TestCase(SubstTestCase):
"TypeError `'NoneType' object is unsubscriptable' trying to evaluate `${NONE[2]}'",
# Python 2.7 and later
"TypeError `'NoneType' object is not subscriptable' trying to evaluate `${NONE[2]}'",
+ # Python 2.7 and later under Fedora
+ "TypeError `'NoneType' object has no attribute '__getitem__'' trying to evaluate `${NONE[2]}'",
]
assert str(e) in expect, e
else:
diff --git a/src/engine/SCons/Tool/JavaCommon.py b/src/engine/SCons/Tool/JavaCommon.py
index c64d254..156ef97 100644
--- a/src/engine/SCons/Tool/JavaCommon.py
+++ b/src/engine/SCons/Tool/JavaCommon.py
@@ -64,7 +64,7 @@ if java_parsing:
interfaces, and anonymous inner classes."""
def __init__(self, version=default_java_version):
- if not version in ('1.1', '1.2', '1.3','1.4', '1.5', '1.6',
+ if not version in ('1.1', '1.2', '1.3','1.4', '1.5', '1.6', '1.7',
'5', '6'):
msg = "Java version %s not supported" % version
raise NotImplementedError(msg)
@@ -171,7 +171,7 @@ if java_parsing:
if self.version in ('1.1', '1.2', '1.3', '1.4'):
clazz = self.listClasses[0]
self.listOutputs.append('%s$%d' % (clazz, self.nextAnon))
- elif self.version in ('1.5', '1.6', '5', '6'):
+ elif self.version in ('1.5', '1.6', '1.7', '5', '6'):
self.stackAnonClassBrackets.append(self.brackets)
className = []
className.extend(self.listClasses)
diff --git a/src/engine/SCons/Tool/gettext.py b/src/engine/SCons/Tool/gettext.py
index dd336b6..6031e49 100644
--- a/src/engine/SCons/Tool/gettext.py
+++ b/src/engine/SCons/Tool/gettext.py
@@ -40,6 +40,9 @@ def exists(env):
from SCons.Tool.GettextCommon \
import _xgettext_exists, _msginit_exists, \
_msgmerge_exists, _msgfmt_exists
- return _xgettext_exists(env) and _msginit_exists(env) \
- and _msgmerge_exists(env) and _msgfmt_exists(env)
+ try:
+ return _xgettext_exists(env) and _msginit_exists(env) \
+ and _msgmerge_exists(env) and _msgfmt_exists(env)
+ except:
+ return False
#############################################################################
diff --git a/src/engine/SCons/Tool/javacTests.py b/src/engine/SCons/Tool/javacTests.py
index fc7f271..4631c8a 100644
--- a/src/engine/SCons/Tool/javacTests.py
+++ b/src/engine/SCons/Tool/javacTests.py
@@ -73,18 +73,18 @@ class pathoptTestCase(unittest.TestCase):
DummyNode('/foo'))
def test_list_node(self):
- self.assert_pathopt(['-foopath', '/foo:/bar'],
+ self.assert_pathopt(['-foopath', os.pathsep.join(['/foo','/bar'])],
['/foo', DummyNode('/bar')])
def test_default_str(self):
self.assert_pathopt_default(
- ['-foopath', '/foo:/bar:/baz'],
+ ['-foopath', os.pathsep.join(['/foo','/bar','/baz'])],
['/foo', '/bar'],
'/baz')
def test_default_list(self):
self.assert_pathopt_default(
- ['-foopath', '/foo:/bar:/baz'],
+ ['-foopath', os.pathsep.join(['/foo','/bar','/baz'])],
['/foo', '/bar'],
['/baz'])
diff --git a/src/engine/SCons/Tool/msvsTests.py b/src/engine/SCons/Tool/msvsTests.py
index 495fc24..232963c 100644
--- a/src/engine/SCons/Tool/msvsTests.py
+++ b/src/engine/SCons/Tool/msvsTests.py
@@ -551,11 +551,12 @@ class msvsTestCase(unittest.TestCase):
debug("Testing for default version %s"%self.default_version)
env = DummyEnv()
v1 = get_default_version(env)
- assert env['MSVS_VERSION'] == self.default_version, \
- ("env['MSVS_VERSION'] != self.default_version",self.default_version, env['MSVS_VERSION'])
- assert env['MSVS']['VERSION'] == self.default_version, \
- ("env['MSVS']['VERSION'] != self.default_version",self.default_version, env['MSVS']['VERSION'])
- assert v1 == self.default_version, (self.default_version, v1)
+ if v1:
+ assert env['MSVS_VERSION'] == self.default_version, \
+ ("env['MSVS_VERSION'] != self.default_version",self.default_version, env['MSVS_VERSION'])
+ assert env['MSVS']['VERSION'] == self.default_version, \
+ ("env['MSVS']['VERSION'] != self.default_version",self.default_version, env['MSVS']['VERSION'])
+ assert v1 == self.default_version, (self.default_version, v1)
env = DummyEnv({'MSVS_VERSION':'7.0'})
v2 = get_default_version(env)
diff --git a/src/engine/SCons/Tool/tex.py b/src/engine/SCons/Tool/tex.py
index ce0a51f..0695755 100644
--- a/src/engine/SCons/Tool/tex.py
+++ b/src/engine/SCons/Tool/tex.py
@@ -100,6 +100,10 @@ makeglossary_re = re.compile(r"^[^%\n]*\\makeglossary", re.MULTILINE)
makeglossaries_re = re.compile(r"^[^%\n]*\\makeglossaries", re.MULTILINE)
makeacronyms_re = re.compile(r"^[^%\n]*\\makeglossaries", re.MULTILINE)
beamer_re = re.compile(r"^[^%\n]*\\documentclass\{beamer\}", re.MULTILINE)
+regex = r'^[^%\n]*\\newglossary\s*\[([^\]]+)\]?\s*\{([^}]*)\}\s*\{([^}]*)\}\s*\{([^}]*)\}\s*\{([^}]*)\}'
+newglossary_re = re.compile(regex, re.MULTILINE)
+
+newglossary_suffix = []
# search to find all files included by Latex
include_re = re.compile(r'^[^%\n]*\\(?:include|input){([^}]*)}', re.MULTILINE)
@@ -137,6 +141,9 @@ MakeGlossaryAction = None
# An action to run MakeIndex (for acronyms) on a file.
MakeAcronymsAction = None
+# An action to run MakeIndex (for newglossary commands) on a file.
+MakeNewGlossaryAction = None
+
# Used as a return value of modify_env_var if the variable is not set.
_null = SCons.Scanner.LaTeX._null
@@ -232,7 +239,8 @@ def InternalLaTeXAuxAction(XXXLaTeXAction, target = None, source= None, env=None
saved_hashes = {}
suffix_nodes = {}
- for suffix in all_suffixes:
+
+ for suffix in all_suffixes+sum(newglossary_suffix, []):
theNode = env.fs.File(targetbase + suffix)
suffix_nodes[suffix] = theNode
saved_hashes[suffix] = theNode.get_csig()
@@ -410,6 +418,21 @@ def InternalLaTeXAuxAction(XXXLaTeXAction, target = None, source= None, env=None
'alg')
return result
+ # Now decide if latex will need to be run again due to newglossary command.
+ for ig in range(len(newglossary_suffix)):
+ if check_MD5(suffix_nodes[newglossary_suffix[ig][2]],newglossary_suffix[ig][2]) or (count == 1):
+ # We must run makeindex
+ if Verbose:
+ print "Need to run makeindex for newglossary"
+ newglfile = suffix_nodes[newglossary_suffix[ig][2]]
+ MakeNewGlossaryAction = SCons.Action.Action("$MAKENEWGLOSSARY ${SOURCE.filebase}%s -s ${SOURCE.filebase}.ist -t ${SOURCE.filebase}%s -o ${SOURCE.filebase}%s" % (newglossary_suffix[ig][2],newglossary_suffix[ig][0],newglossary_suffix[ig][1]), "$MAKENEWGLOSSARYCOMSTR")
+
+ result = MakeNewGlossaryAction(newglfile, newglfile, env)
+ if result != 0:
+ check_file_error_message('%s (newglossary)' % env['MAKENEWGLOSSARY'],
+ newglossary_suffix[ig][0])
+ return result
+
# Now decide if latex needs to be run yet again to resolve warnings.
if warning_rerun_re.search(logContent):
must_rerun_latex = True
@@ -595,9 +618,23 @@ def ScanFiles(theFile, target, paths, file_tests, file_tests_search, env, graphi
for i in range(len(file_tests_search)):
if file_tests[i][0] is None:
+ if Verbose:
+ print "scan i ",i," files_tests[i] ",file_tests[i], file_tests[i][1]
file_tests[i][0] = file_tests_search[i].search(content)
if Verbose and file_tests[i][0]:
- print " found match for ",file_tests[i][-1][-1]
+ print " found match for ",file_tests[i][1][-1]
+ # for newglossary insert the suffixes in file_tests[i]
+ if file_tests[i][0] and file_tests[i][1][-1] == 'newglossary':
+ findresult = file_tests_search[i].findall(content)
+ for l in range(len(findresult)) :
+ (file_tests[i][1]).insert(0,'.'+findresult[l][3])
+ (file_tests[i][1]).insert(0,'.'+findresult[l][2])
+ (file_tests[i][1]).insert(0,'.'+findresult[l][0])
+ suffix_list = ['.'+findresult[l][0],'.'+findresult[l][2],'.'+findresult[l][3] ]
+ newglossary_suffix.append(suffix_list)
+ if Verbose:
+ print " new suffixes for newglossary ",newglossary_suffix
+
incResult = includeOnly_re.search(content)
if incResult:
@@ -676,7 +713,8 @@ def tex_emitter_core(target, source, env, graphics_extensions):
makeglossary_re,
makeglossaries_re,
makeacronyms_re,
- beamer_re ]
+ beamer_re,
+ newglossary_re ]
# set up list with the file suffixes that need emitting
# when a feature is found
file_tests_suff = [['.aux','aux_file'],
@@ -693,7 +731,9 @@ def tex_emitter_core(target, source, env, graphics_extensions):
['.glo', '.gls', '.glg','glossary'],
['.glo', '.gls', '.glg','glossaries'],
['.acn', '.acr', '.alg','acronyms'],
- ['.nav', '.snm', '.out', '.toc','beamer'] ]
+ ['.nav', '.snm', '.out', '.toc','beamer'],
+ ['newglossary',] ]
+ # for newglossary the suffixes are added as we find the command
# build the list of lists
file_tests = []
for i in range(len(file_tests_search)):
@@ -722,6 +762,7 @@ def tex_emitter_core(target, source, env, graphics_extensions):
if Verbose:
print "search path ",paths
+ # scan all sources for side effect files
aux_files = []
file_tests = ScanFiles(source[0], target, paths, file_tests, file_tests_search, env, graphics_extensions, targetdir, aux_files)
@@ -917,6 +958,9 @@ def generate_common(env):
env['MAKENCLFLAGS'] = '-s ${MAKENCLSTYLE} -t ${SOURCE.filebase}.nlg'
env['MAKENCLCOM'] = CDCOM + '${TARGET.dir} && $MAKENCL ${SOURCE.filebase}.nlo $MAKENCLFLAGS -o ${SOURCE.filebase}.nls'
+ env['MAKENEWGLOSSARY'] = 'makeindex'
+ env['MAKENEWGLOSSARYCOM'] = CDCOM + '${TARGET.dir} && $MAKENEWGLOSSARY '
+
def exists(env):
generate_darwin(env)
return env.Detect('tex')