diff --git a/pyperformance/data-files/benchmarks/bm_pickle_opt/pyproject.toml b/pyperformance/data-files/benchmarks/bm_pickle_opt/pyproject.toml new file mode 100644 index 00000000..07235075 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_pickle_opt/pyproject.toml @@ -0,0 +1,9 @@ +[project] +name = "pyperformance_bm_pickle_opt" +requires-python = ">=3.8" +dependencies = ["pyperf"] +urls = {repository = "https://github.com/python/pyperformance"} +dynamic = ["version"] + +[tool.pyperformance] +name = "pickle_opt" diff --git a/pyperformance/data-files/benchmarks/bm_pickle_opt/run_benchmark.py b/pyperformance/data-files/benchmarks/bm_pickle_opt/run_benchmark.py new file mode 100644 index 00000000..8872fddc --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_pickle_opt/run_benchmark.py @@ -0,0 +1,43 @@ +"""The background for this benchmark is that the garbage collection in +Python 3.14.0 had a performance regression, see + +* https://github.com/python/cpython/issues/140175 +* https://github.com/python/cpython/issues/139951 + +""" + +import tempfile +from pathlib import Path +import pyperf +import pickle +import pickletools + + +def setup(fname, N): + x = {} + for i in range(1, N): + x[i] = f"ii{i:>07}" + + with open(fname, "wb") as fh: + pickle.dump(x, fh, protocol=4) + + +def run(fname): + with open(fname, "rb") as fh: + p = fh.read() + + s = pickletools.optimize(p) + + with open(fname.with_suffix(".out"), "wb") as fh: + fh.write(s) + + +if __name__ == "__main__": + runner = pyperf.Runner() + N = 1_000_000 + with tempfile.TemporaryDirectory() as tmp: + tmp_path = Path(tmp) + fname = tmp_path / "pickle" + setup(fname, N) + runner.metadata["description"] = "Pickletools optimize" + runner.bench_func("pickle_opt", run, fname)