summaryrefslogtreecommitdiffstats
path: root/Lib/importlib/test/benchmark.py
diff options
context:
space:
mode:
Diffstat (limited to 'Lib/importlib/test/benchmark.py')
-rw-r--r--Lib/importlib/test/benchmark.py157
1 files changed, 112 insertions, 45 deletions
diff --git a/Lib/importlib/test/benchmark.py b/Lib/importlib/test/benchmark.py
index b5de6c6..87b1775 100644
--- a/Lib/importlib/test/benchmark.py
+++ b/Lib/importlib/test/benchmark.py
@@ -9,9 +9,11 @@ from .source import util as source_util
import decimal
import imp
import importlib
+import json
import os
import py_compile
import sys
+import tabnanny
import timeit
@@ -59,7 +61,7 @@ def builtin_mod(seconds, repeat):
def source_wo_bytecode(seconds, repeat):
- """Source w/o bytecode: simple"""
+ """Source w/o bytecode: small"""
sys.dont_write_bytecode = True
try:
name = '__importlib_test_benchmark__'
@@ -73,23 +75,30 @@ def source_wo_bytecode(seconds, repeat):
sys.dont_write_bytecode = False
-def decimal_wo_bytecode(seconds, repeat):
- """Source w/o bytecode: decimal"""
- name = 'decimal'
- decimal_bytecode = imp.cache_from_source(decimal.__file__)
- if os.path.exists(decimal_bytecode):
- os.unlink(decimal_bytecode)
- sys.dont_write_bytecode = True
- try:
- for result in bench(name, lambda: sys.modules.pop(name), repeat=repeat,
- seconds=seconds):
- yield result
- finally:
- sys.dont_write_bytecode = False
+def _wo_bytecode(module):
+ name = module.__name__
+ def benchmark_wo_bytecode(seconds, repeat):
+ """Source w/o bytecode: {}"""
+ bytecode_path = imp.cache_from_source(module.__file__)
+ if os.path.exists(bytecode_path):
+ os.unlink(bytecode_path)
+ sys.dont_write_bytecode = True
+ try:
+ for result in bench(name, lambda: sys.modules.pop(name),
+ repeat=repeat, seconds=seconds):
+ yield result
+ finally:
+ sys.dont_write_bytecode = False
+
+ benchmark_wo_bytecode.__doc__ = benchmark_wo_bytecode.__doc__.format(name)
+ return benchmark_wo_bytecode
+
+tabnanny_wo_bytecode = _wo_bytecode(tabnanny)
+decimal_wo_bytecode = _wo_bytecode(decimal)
def source_writing_bytecode(seconds, repeat):
- """Source writing bytecode: simple"""
+ """Source writing bytecode: small"""
assert not sys.dont_write_bytecode
name = '__importlib_test_benchmark__'
with source_util.create_modules(name) as mapping:
@@ -101,19 +110,27 @@ def source_writing_bytecode(seconds, repeat):
yield result
-def decimal_writing_bytecode(seconds, repeat):
- """Source writing bytecode: decimal"""
- assert not sys.dont_write_bytecode
- name = 'decimal'
- def cleanup():
- sys.modules.pop(name)
- os.unlink(imp.cache_from_source(decimal.__file__))
- for result in bench(name, cleanup, repeat=repeat, seconds=seconds):
- yield result
+def _writing_bytecode(module):
+ name = module.__name__
+ def writing_bytecode_benchmark(seconds, repeat):
+ """Source writing bytecode: {}"""
+ assert not sys.dont_write_bytecode
+ def cleanup():
+ sys.modules.pop(name)
+ os.unlink(imp.cache_from_source(module.__file__))
+ for result in bench(name, cleanup, repeat=repeat, seconds=seconds):
+ yield result
+
+ writing_bytecode_benchmark.__doc__ = (
+ writing_bytecode_benchmark.__doc__.format(name))
+ return writing_bytecode_benchmark
+
+tabnanny_writing_bytecode = _writing_bytecode(tabnanny)
+decimal_writing_bytecode = _writing_bytecode(decimal)
def source_using_bytecode(seconds, repeat):
- """Bytecode w/ source: simple"""
+ """Source w/ bytecode: small"""
name = '__importlib_test_benchmark__'
with source_util.create_modules(name) as mapping:
py_compile.compile(mapping[name])
@@ -123,27 +140,56 @@ def source_using_bytecode(seconds, repeat):
yield result
-def decimal_using_bytecode(seconds, repeat):
- """Bytecode w/ source: decimal"""
- name = 'decimal'
- py_compile.compile(decimal.__file__)
- for result in bench(name, lambda: sys.modules.pop(name), repeat=repeat,
- seconds=seconds):
- yield result
+def _using_bytecode(module):
+ name = module.__name__
+ def using_bytecode_benchmark(seconds, repeat):
+ """Source w/ bytecode: {}"""
+ py_compile.compile(module.__file__)
+ for result in bench(name, lambda: sys.modules.pop(name), repeat=repeat,
+ seconds=seconds):
+ yield result
+ using_bytecode_benchmark.__doc__ = (
+ using_bytecode_benchmark.__doc__.format(name))
+ return using_bytecode_benchmark
-def main(import_):
+tabnanny_using_bytecode = _using_bytecode(tabnanny)
+decimal_using_bytecode = _using_bytecode(decimal)
+
+
+def main(import_, options):
+ if options.source_file:
+ with options.source_file:
+ prev_results = json.load(options.source_file)
+ else:
+ prev_results = {}
__builtins__.__import__ = import_
benchmarks = (from_cache, builtin_mod,
- source_using_bytecode, source_wo_bytecode,
source_writing_bytecode,
- decimal_using_bytecode, decimal_writing_bytecode,
- decimal_wo_bytecode,)
+ source_wo_bytecode, source_using_bytecode,
+ tabnanny_writing_bytecode,
+ tabnanny_wo_bytecode, tabnanny_using_bytecode,
+ decimal_writing_bytecode,
+ decimal_wo_bytecode, decimal_using_bytecode,
+ )
+ if options.benchmark:
+ for b in benchmarks:
+ if b.__doc__ == options.benchmark:
+ benchmarks = [b]
+ break
+ else:
+ print('Unknown benchmark: {!r}'.format(options.benchmark,
+ file=sys.stderr))
+ sys.exit(1)
seconds = 1
seconds_plural = 's' if seconds > 1 else ''
repeat = 3
- header = "Measuring imports/second over {} second{}, best out of {}\n"
- print(header.format(seconds, seconds_plural, repeat))
+ header = ('Measuring imports/second over {} second{}, best out of {}\n'
+ 'Entire benchmark run should take about {} seconds\n'
+ 'Using {!r} as __import__\n')
+ print(header.format(seconds, seconds_plural, repeat,
+ len(benchmarks) * seconds * repeat, __import__))
+ new_results = {}
for benchmark in benchmarks:
print(benchmark.__doc__, "[", end=' ')
sys.stdout.flush()
@@ -154,19 +200,40 @@ def main(import_):
sys.stdout.flush()
assert not sys.dont_write_bytecode
print("]", "best is", format(max(results), ',d'))
+ new_results[benchmark.__doc__] = results
+ if prev_results:
+ print('\n\nComparing new vs. old\n')
+ for benchmark in benchmarks:
+ benchmark_name = benchmark.__doc__
+ old_result = max(prev_results[benchmark_name])
+ new_result = max(new_results[benchmark_name])
+ result = '{:,d} vs. {:,d} ({:%})'.format(new_result,
+ old_result,
+ new_result/old_result)
+ print(benchmark_name, ':', result)
+ if options.dest_file:
+ with options.dest_file:
+ json.dump(new_results, options.dest_file, indent=2)
if __name__ == '__main__':
- import optparse
+ import argparse
- parser = optparse.OptionParser()
- parser.add_option('-b', '--builtin', dest='builtin', action='store_true',
+ parser = argparse.ArgumentParser()
+ parser.add_argument('-b', '--builtin', dest='builtin', action='store_true',
default=False, help="use the built-in __import__")
- options, args = parser.parse_args()
- if args:
- raise RuntimeError("unrecognized args: {}".format(args))
+ parser.add_argument('-r', '--read', dest='source_file',
+ type=argparse.FileType('r'),
+ help='file to read benchmark data from to compare '
+ 'against')
+ parser.add_argument('-w', '--write', dest='dest_file',
+ type=argparse.FileType('w'),
+ help='file to write benchmark data to')
+ parser.add_argument('--benchmark', dest='benchmark',
+ help='specific benchmark to run')
+ options = parser.parse_args()
import_ = __import__
if not options.builtin:
import_ = importlib.__import__
- main(import_)
+ main(import_, options)