summaryrefslogtreecommitdiffstats
path: root/Tools/scripts
diff options
context:
space:
mode:
authorR David Murray <rdmurray@bitdance.com>2012-04-05 01:29:03 (GMT)
committerR David Murray <rdmurray@bitdance.com>2012-04-05 01:29:03 (GMT)
commitce9806215b82e4a0e8dd37eb9a669477161adfec (patch)
tree02073b791a0f11927d7adb5e2ecf0305e0474b47 /Tools/scripts
parenta701388de1135241b5a8e4c970e06c0e83a66dc0 (diff)
parent54ac832a24a0f40ea3278707420b191be3619c99 (diff)
downloadcpython-ce9806215b82e4a0e8dd37eb9a669477161adfec.zip
cpython-ce9806215b82e4a0e8dd37eb9a669477161adfec.tar.gz
cpython-ce9806215b82e4a0e8dd37eb9a669477161adfec.tar.bz2
Merge #14490, #14491: add 'sundry'-style import tests for Tools/scripts.
This patch changes a few of the scripts to have __name__=='__main__' clauses so that they are importable without running. Also fixes the syntax errors revealed by the tests.
Diffstat (limited to 'Tools/scripts')
-rwxr-xr-xTools/scripts/abitype.py88
-rwxr-xr-xTools/scripts/find_recursionlimit.py24
-rwxr-xr-xTools/scripts/findnocoding.py54
-rwxr-xr-xTools/scripts/fixcid.py2
-rwxr-xr-xTools/scripts/md5sum.py2
-rwxr-xr-xTools/scripts/parseentities.py3
6 files changed, 89 insertions, 84 deletions
diff --git a/Tools/scripts/abitype.py b/Tools/scripts/abitype.py
index 4d96c8b..ab0ba42 100755
--- a/Tools/scripts/abitype.py
+++ b/Tools/scripts/abitype.py
@@ -3,34 +3,6 @@
# Usage: abitype.py < old_code > new_code
import re, sys
-############ Simplistic C scanner ##################################
-tokenizer = re.compile(
- r"(?P<preproc>#.*\n)"
- r"|(?P<comment>/\*.*?\*/)"
- r"|(?P<ident>[a-zA-Z_][a-zA-Z0-9_]*)"
- r"|(?P<ws>[ \t\n]+)"
- r"|(?P<other>.)",
- re.MULTILINE)
-
-tokens = []
-source = sys.stdin.read()
-pos = 0
-while pos != len(source):
- m = tokenizer.match(source, pos)
- tokens.append([m.lastgroup, m.group()])
- pos += len(tokens[-1][1])
- if tokens[-1][0] == 'preproc':
- # continuation lines are considered
- # only in preprocess statements
- while tokens[-1][1].endswith('\\\n'):
- nl = source.find('\n', pos)
- if nl == -1:
- line = source[pos:]
- else:
- line = source[pos:nl+1]
- tokens[-1][1] += line
- pos += len(line)
-
###### Replacement of PyTypeObject static instances ##############
# classify each token, giving it a one-letter code:
@@ -79,7 +51,7 @@ def get_fields(start, real_end):
while tokens[pos][0] in ('ws', 'comment'):
pos += 1
if tokens[pos][1] != 'PyVarObject_HEAD_INIT':
- raise Exception, '%s has no PyVarObject_HEAD_INIT' % name
+ raise Exception('%s has no PyVarObject_HEAD_INIT' % name)
while tokens[pos][1] != ')':
pos += 1
pos += 1
@@ -183,18 +155,48 @@ def make_slots(name, fields):
return '\n'.join(res)
-# Main loop: replace all static PyTypeObjects until
-# there are none left.
-while 1:
- c = classify()
- m = re.search('(SW)?TWIW?=W?{.*?};', c)
- if not m:
- break
- start = m.start()
- end = m.end()
- name, fields = get_fields(start, m)
- tokens[start:end] = [('',make_slots(name, fields))]
+if __name__ == '__main__':
+
+ ############ Simplistic C scanner ##################################
+ tokenizer = re.compile(
+ r"(?P<preproc>#.*\n)"
+ r"|(?P<comment>/\*.*?\*/)"
+ r"|(?P<ident>[a-zA-Z_][a-zA-Z0-9_]*)"
+ r"|(?P<ws>[ \t\n]+)"
+ r"|(?P<other>.)",
+ re.MULTILINE)
+
+ tokens = []
+ source = sys.stdin.read()
+ pos = 0
+ while pos != len(source):
+ m = tokenizer.match(source, pos)
+ tokens.append([m.lastgroup, m.group()])
+ pos += len(tokens[-1][1])
+ if tokens[-1][0] == 'preproc':
+ # continuation lines are considered
+ # only in preprocess statements
+ while tokens[-1][1].endswith('\\\n'):
+ nl = source.find('\n', pos)
+ if nl == -1:
+ line = source[pos:]
+ else:
+ line = source[pos:nl+1]
+ tokens[-1][1] += line
+ pos += len(line)
+
+ # Main loop: replace all static PyTypeObjects until
+ # there are none left.
+ while 1:
+ c = classify()
+ m = re.search('(SW)?TWIW?=W?{.*?};', c)
+ if not m:
+ break
+ start = m.start()
+ end = m.end()
+ name, fields = get_fields(start, m)
+ tokens[start:end] = [('',make_slots(name, fields))]
-# Output result to stdout
-for t, v in tokens:
- sys.stdout.write(v)
+ # Output result to stdout
+ for t, v in tokens:
+ sys.stdout.write(v)
diff --git a/Tools/scripts/find_recursionlimit.py b/Tools/scripts/find_recursionlimit.py
index 443f052..7a86603 100755
--- a/Tools/scripts/find_recursionlimit.py
+++ b/Tools/scripts/find_recursionlimit.py
@@ -106,14 +106,16 @@ def check_limit(n, test_func_name):
else:
print("Yikes!")
-limit = 1000
-while 1:
- check_limit(limit, "test_recurse")
- check_limit(limit, "test_add")
- check_limit(limit, "test_repr")
- check_limit(limit, "test_init")
- check_limit(limit, "test_getattr")
- check_limit(limit, "test_getitem")
- check_limit(limit, "test_cpickle")
- print("Limit of %d is fine" % limit)
- limit = limit + 100
+if __name__ == '__main__':
+
+ limit = 1000
+ while 1:
+ check_limit(limit, "test_recurse")
+ check_limit(limit, "test_add")
+ check_limit(limit, "test_repr")
+ check_limit(limit, "test_init")
+ check_limit(limit, "test_getattr")
+ check_limit(limit, "test_getitem")
+ check_limit(limit, "test_cpickle")
+ print("Limit of %d is fine" % limit)
+ limit = limit + 100
diff --git a/Tools/scripts/findnocoding.py b/Tools/scripts/findnocoding.py
index c42fa7c..5aa1feb 100755
--- a/Tools/scripts/findnocoding.py
+++ b/Tools/scripts/findnocoding.py
@@ -76,29 +76,31 @@ usage = """Usage: %s [-cd] paths...
-c: recognize Python source files trying to compile them
-d: debug output""" % sys.argv[0]
-try:
- opts, args = getopt.getopt(sys.argv[1:], 'cd')
-except getopt.error as msg:
- print(msg, file=sys.stderr)
- print(usage, file=sys.stderr)
- sys.exit(1)
-
-is_python = pysource.looks_like_python
-debug = False
-
-for o, a in opts:
- if o == '-c':
- is_python = pysource.can_be_compiled
- elif o == '-d':
- debug = True
-
-if not args:
- print(usage, file=sys.stderr)
- sys.exit(1)
-
-for fullpath in pysource.walk_python_files(args, is_python):
- if debug:
- print("Testing for coding: %s" % fullpath)
- result = needs_declaration(fullpath)
- if result:
- print(fullpath)
+if __name__ == '__main__':
+
+ try:
+ opts, args = getopt.getopt(sys.argv[1:], 'cd')
+ except getopt.error as msg:
+ print(msg, file=sys.stderr)
+ print(usage, file=sys.stderr)
+ sys.exit(1)
+
+ is_python = pysource.looks_like_python
+ debug = False
+
+ for o, a in opts:
+ if o == '-c':
+ is_python = pysource.can_be_compiled
+ elif o == '-d':
+ debug = True
+
+ if not args:
+ print(usage, file=sys.stderr)
+ sys.exit(1)
+
+ for fullpath in pysource.walk_python_files(args, is_python):
+ if debug:
+ print("Testing for coding: %s" % fullpath)
+ result = needs_declaration(fullpath)
+ if result:
+ print(fullpath)
diff --git a/Tools/scripts/fixcid.py b/Tools/scripts/fixcid.py
index 2d4cd1a..87e2a09 100755
--- a/Tools/scripts/fixcid.py
+++ b/Tools/scripts/fixcid.py
@@ -292,7 +292,7 @@ def addsubst(substfile):
if not words: continue
if len(words) == 3 and words[0] == 'struct':
words[:2] = [words[0] + ' ' + words[1]]
- elif len(words) <> 2:
+ elif len(words) != 2:
err(substfile + '%s:%r: warning: bad line: %r' % (substfile, lineno, line))
continue
if Reverse:
diff --git a/Tools/scripts/md5sum.py b/Tools/scripts/md5sum.py
index 743da72..521960c 100755
--- a/Tools/scripts/md5sum.py
+++ b/Tools/scripts/md5sum.py
@@ -20,7 +20,7 @@ file ... : files to sum; '-' or no files means stdin
import sys
import os
import getopt
-import md5
+from hashlib import md5
def sum(*files):
sts = 0
diff --git a/Tools/scripts/parseentities.py b/Tools/scripts/parseentities.py
index 5b0f1c6..a042d1c 100755
--- a/Tools/scripts/parseentities.py
+++ b/Tools/scripts/parseentities.py
@@ -13,7 +13,6 @@
"""
import re,sys
-import TextTools
entityRE = re.compile('<!ENTITY +(\w+) +CDATA +"([^"]+)" +-- +((?:.|\n)+?) *-->')
@@ -45,7 +44,7 @@ def writefile(f,defs):
charcode = repr(charcode)
else:
charcode = repr(charcode)
- comment = TextTools.collapse(comment)
+ comment = ' '.join(comment.split())
f.write(" '%s':\t%s, \t# %s\n" % (name,charcode,comment))
f.write('\n}\n')