From c24211bb3979952bd62b961898f76a1b328ea925 Mon Sep 17 00:00:00 2001 From: Peyton Murray Date: Wed, 23 Oct 2024 02:13:22 -0700 Subject: [PATCH] Add automated benchmarking (#377) --- .asv/results/benchmarks.json | 352 ++++++++++++++++++ .../52aca7e9-virtualenv-py3.11.json | 1 + .../a1a3cdc7-virtualenv-py3.11.json | 1 + .../machine.json | 9 + .cirun.yml | 7 + .github/workflows/benchmarks.yml | 47 +++ .github/workflows/publish_docs.yml | 20 +- .gitignore | 2 +- .pre-commit-config.yaml | 3 +- asv.conf.json | 13 +- benchmarks/delete_versions.py | 2 +- benchmarks_install.py | 83 ----- docs/index.rst | 3 +- pyproject.toml | 1 + update_benchmarks.sh | 17 - 15 files changed, 447 insertions(+), 114 deletions(-) create mode 100644 .asv/results/benchmarks.json create mode 100644 .asv/results/cirun-peytondmurray--versioned-hdf5-ffd0030/52aca7e9-virtualenv-py3.11.json create mode 100644 .asv/results/cirun-peytondmurray--versioned-hdf5-ffd0030/a1a3cdc7-virtualenv-py3.11.json create mode 100644 .asv/results/cirun-peytondmurray--versioned-hdf5-ffd0030/machine.json create mode 100644 .cirun.yml create mode 100644 .github/workflows/benchmarks.yml delete mode 100644 benchmarks_install.py delete mode 100755 update_benchmarks.sh diff --git a/.asv/results/benchmarks.json b/.asv/results/benchmarks.json new file mode 100644 index 00000000..611d984e --- /dev/null +++ b/.asv/results/benchmarks.json @@ -0,0 +1,352 @@ +{ + "delete_versions.TimeDeleting.time_delete": { + "code": "class TimeDeleting:\n def time_delete(self, n):\n tmp_name = tempfile.mktemp('.h5')\n shutil.copy2(filename, tmp_name)\n try:\n # want to keep only every 10th version\n versions_to_delete = []\n with h5py.File(tmp_name, 'r') as f:\n vf = VersionedHDF5File(f)\n versions = sorted([(v, vf._versions[v].attrs['timestamp']) for v in vf._versions], key=lambda t: t[1])\n for i, v in enumerate(versions):\n if i % 10 != 0:\n versions_to_delete.append(v[0])\n \n with h5py.File(tmp_name, 'r+') as f:\n delete_versions(f, versions_to_delete)\n finally:\n os.remove(tmp_name)\n\n def setup(self, n):\n if not os.path.exists(filename):\n with h5py.File(filename, 'w') as f:\n vf = VersionedHDF5File(f)\n with vf.stage_version('init') as sv:\n sv.create_dataset('values', shape=(0, 0), dtype='float', fillvalue=numpy.nan,\n chunks=(22, 100), maxshape=(None, None), compression='lzf')\n \n # generate some test data with around 1000 versions\n v = 1\n with h5py.File(filename, 'r+') as f:\n vf = VersionedHDF5File(f)\n for d in range(3):\n with vf.stage_version(str(v)) as sv:\n values_ds = sv['values']\n values_ds.resize((values_ds.shape[0] + 1, values_ds.shape[1] + 5000))\n values_ds[-1, -5000] = numpy.random.rand()\n v += 1\n for c in range(n):\n with vf.stage_version(str(v)) as sv:\n values_ds = sv['values']\n idxs = numpy.random.choice(values_ds.shape[1], 50, replace=False)\n values_ds[-1, idxs] = numpy.random.rand(50)\n v += 1", + "min_run_count": 2, + "name": "delete_versions.TimeDeleting.time_delete", + "number": 0, + "param_names": [ + "param1" + ], + "params": [ + [ + "10", + "30", + "50" + ] + ], + "repeat": 0, + "rounds": 2, + "sample_time": 0.01, + "timeout": 1000, + "type": "time", + "unit": "seconds", + "version": "08272cd8ae51a90b06a17e73c3b3546ed492ada2a98fdcd9e070bbb6d95303f3", + "warmup_time": -1 + }, + "hdf5.TimePureHDF5.time_getattr": { + "code": "class TimePureHDF5:\n def time_getattr(self):\n dataset = self.file['data']\n dataset[:, 0, 0:6]\n\n def setup(self):\n self.file = h5py.File('bench.hdf5', 'w')\n self.file.create_dataset('data',\n data=np.arange(10000).reshape((100, 10, 10)),\n chunks=(3, 3, 3), maxshape=(None, None, None))", + "min_run_count": 2, + "name": "hdf5.TimePureHDF5.time_getattr", + "number": 0, + "param_names": [], + "params": [], + "repeat": 0, + "rounds": 2, + "sample_time": 0.01, + "type": "time", + "unit": "seconds", + "version": "2e759d308432d2570cd8b41140c6171cbdac2f79c07f8cf323e54c6be0273446", + "warmup_time": -1 + }, + "hdf5.TimePureHDF5.time_resize_bigger": { + "code": "class TimePureHDF5:\n def time_resize_bigger(self):\n dataset = self.file['data']\n dataset.resize((100, 100, 100))\n\n def setup(self):\n self.file = h5py.File('bench.hdf5', 'w')\n self.file.create_dataset('data',\n data=np.arange(10000).reshape((100, 10, 10)),\n chunks=(3, 3, 3), maxshape=(None, None, None))", + "min_run_count": 2, + "name": "hdf5.TimePureHDF5.time_resize_bigger", + "number": 0, + "param_names": [], + "params": [], + "repeat": 0, + "rounds": 2, + "sample_time": 0.01, + "type": "time", + "unit": "seconds", + "version": "d0a22645103f056daa04af1d58f2abbcf7d4459232db9c22ddb408f1c0213792", + "warmup_time": -1 + }, + "hdf5.TimePureHDF5.time_resize_smaller": { + "code": "class TimePureHDF5:\n def time_resize_smaller(self):\n dataset = self.file['data']\n dataset.resize((10, 10, 10))\n\n def setup(self):\n self.file = h5py.File('bench.hdf5', 'w')\n self.file.create_dataset('data',\n data=np.arange(10000).reshape((100, 10, 10)),\n chunks=(3, 3, 3), maxshape=(None, None, None))", + "min_run_count": 2, + "name": "hdf5.TimePureHDF5.time_resize_smaller", + "number": 0, + "param_names": [], + "params": [], + "repeat": 0, + "rounds": 2, + "sample_time": 0.01, + "type": "time", + "unit": "seconds", + "version": "0ca62d1c3dee9f1b38e907e67525fcf5ae5e2ce489ea8f306231232ce1b83fb9", + "warmup_time": -1 + }, + "hdf5.TimePureHDF5.time_setattr": { + "code": "class TimePureHDF5:\n def time_setattr(self):\n dataset = self.file['data']\n dataset[:, 0, 0:6] = -1\n\n def setup(self):\n self.file = h5py.File('bench.hdf5', 'w')\n self.file.create_dataset('data',\n data=np.arange(10000).reshape((100, 10, 10)),\n chunks=(3, 3, 3), maxshape=(None, None, None))", + "min_run_count": 2, + "name": "hdf5.TimePureHDF5.time_setattr", + "number": 0, + "param_names": [], + "params": [], + "repeat": 0, + "rounds": 2, + "sample_time": 0.01, + "type": "time", + "unit": "seconds", + "version": "800ca98ef22079d63aebf10eb17e9f5771436b237fbf92b5d61adc551d84993c", + "warmup_time": -1 + }, + "inmemoryarraydataset.TimeInMemoryArrayDataset.time_getattr": { + "code": "class TimeInMemoryArrayDataset:\n def time_getattr(self):\n with h5py.File('bench.hdf5', 'w') as f:\n versioned_file = VersionedHDF5File(f)\n with versioned_file.stage_version('version1') as g:\n dataset = g.create_dataset('data',\n data=np.arange(10000).reshape((100, 10, 10)),\n chunks=(3, 3, 3))\n assert isinstance(dataset, InMemoryArrayDataset) or isinstance(dataset, DatasetWrapper) and isinstance(dataset.dataset, InMemoryArrayDataset)\n dataset[:, 0, 0:6]", + "min_run_count": 2, + "name": "inmemoryarraydataset.TimeInMemoryArrayDataset.time_getattr", + "number": 0, + "param_names": [], + "params": [], + "repeat": 0, + "rounds": 2, + "sample_time": 0.01, + "timeout": 1000, + "type": "time", + "unit": "seconds", + "version": "03bc703ccc7f5109bd09fb364d994f8b407dd1c5ef0688fecbfb98bd31f401d7", + "warmup_time": -1 + }, + "inmemoryarraydataset.TimeInMemoryArrayDataset.time_resize_bigger": { + "code": "class TimeInMemoryArrayDataset:\n def time_resize_bigger(self):\n with h5py.File('bench.hdf5', 'w') as f:\n versioned_file = VersionedHDF5File(f)\n with versioned_file.stage_version('version1') as g:\n dataset = g.create_dataset('data',\n data=np.arange(10000).reshape((100, 10, 10)),\n chunks=(3, 3, 3))\n assert isinstance(dataset, InMemoryArrayDataset) or isinstance(dataset, DatasetWrapper) and isinstance(dataset.dataset, InMemoryArrayDataset)\n dataset.resize((100, 100, 100))", + "min_run_count": 2, + "name": "inmemoryarraydataset.TimeInMemoryArrayDataset.time_resize_bigger", + "number": 0, + "param_names": [], + "params": [], + "repeat": 0, + "rounds": 2, + "sample_time": 0.01, + "timeout": 1000, + "type": "time", + "unit": "seconds", + "version": "fdc09f867950d3ea63ce715311e8204e45a055959d732b4ac531a9afbc5054eb", + "warmup_time": -1 + }, + "inmemoryarraydataset.TimeInMemoryArrayDataset.time_resize_smaller": { + "code": "class TimeInMemoryArrayDataset:\n def time_resize_smaller(self):\n with h5py.File('bench.hdf5', 'w') as f:\n versioned_file = VersionedHDF5File(f)\n with versioned_file.stage_version('version1') as g:\n dataset = g.create_dataset('data',\n data=np.arange(10000).reshape((100, 10, 10)),\n chunks=(3, 3, 3))\n assert isinstance(dataset, InMemoryArrayDataset) or isinstance(dataset, DatasetWrapper) and isinstance(dataset.dataset, InMemoryArrayDataset)\n dataset.resize((10, 10, 10))", + "min_run_count": 2, + "name": "inmemoryarraydataset.TimeInMemoryArrayDataset.time_resize_smaller", + "number": 0, + "param_names": [], + "params": [], + "repeat": 0, + "rounds": 2, + "sample_time": 0.01, + "timeout": 1000, + "type": "time", + "unit": "seconds", + "version": "082fed10fc0ef1f09ef29258cfc6d044727ca67460b78944ced94a1b40d065ef", + "warmup_time": -1 + }, + "inmemoryarraydataset.TimeInMemoryArrayDataset.time_setattr": { + "code": "class TimeInMemoryArrayDataset:\n def time_setattr(self):\n with h5py.File('bench.hdf5', 'w') as f:\n versioned_file = VersionedHDF5File(f)\n with versioned_file.stage_version('version1') as g:\n dataset = g.create_dataset('data',\n data=np.arange(10000).reshape((100, 10, 10)),\n chunks=(3, 3, 3))\n assert isinstance(dataset, InMemoryArrayDataset) or isinstance(dataset, DatasetWrapper) and isinstance(dataset.dataset, InMemoryArrayDataset)\n dataset[:, 0, 0:6] = -1", + "min_run_count": 2, + "name": "inmemoryarraydataset.TimeInMemoryArrayDataset.time_setattr", + "number": 0, + "param_names": [], + "params": [], + "repeat": 0, + "rounds": 2, + "sample_time": 0.01, + "timeout": 1000, + "type": "time", + "unit": "seconds", + "version": "a6f9aa34a9a23e90a219560b66ec33cd105e73f8004bfe43c54bb75805269541", + "warmup_time": -1 + }, + "inmemorydataset.TimeInMemoryDataset.time_getitem": { + "code": "class TimeInMemoryDataset:\n def time_getitem(self):\n dataset = self.versioned_file['version1']['data']\n assert isinstance(dataset, InMemoryDataset) or isinstance(dataset, DatasetWrapper) and isinstance(dataset.dataset, InMemoryDataset)\n dataset[:, 0, 0:6]\n\n def setup(self):\n if hasattr(self, 'file'):\n self.file.close()\n if os.path.exists('bench.hdf5'):\n os.remove('bench.hdf5')\n \n with h5py.File('bench.hdf5', 'w') as f:\n versioned_file = VersionedHDF5File(f)\n \n with versioned_file.stage_version('version1') as g:\n g.create_dataset('data',\n data=np.arange(10000).reshape((100, 10, 10)),\n chunks=(3, 3, 3))\n \n self.file = h5py.File('bench.hdf5', 'a')\n self.versioned_file = VersionedHDF5File(self.file)", + "min_run_count": 2, + "name": "inmemorydataset.TimeInMemoryDataset.time_getitem", + "number": 0, + "param_names": [], + "params": [], + "repeat": 0, + "rounds": 2, + "sample_time": 0.01, + "timeout": 1000, + "type": "time", + "unit": "seconds", + "version": "3d9054259fd156ce88e37d3c08f3952b7ea524394ba8b0fb2e5ba9af2f618f69", + "warmup_time": -1 + }, + "inmemorydataset.TimeInMemoryDataset.time_resize_bigger": { + "code": "class TimeInMemoryDataset:\n def time_resize_bigger(self):\n # https://github.com/airspeed-velocity/asv/issues/966\n self.setup()\n with self.versioned_file.stage_version('version2') as g:\n dataset = g['data']\n assert isinstance(dataset, InMemoryDataset) or isinstance(dataset, DatasetWrapper) and isinstance(dataset.dataset, InMemoryDataset)\n dataset.resize((100, 100, 100))\n\n def setup(self):\n if hasattr(self, 'file'):\n self.file.close()\n if os.path.exists('bench.hdf5'):\n os.remove('bench.hdf5')\n \n with h5py.File('bench.hdf5', 'w') as f:\n versioned_file = VersionedHDF5File(f)\n \n with versioned_file.stage_version('version1') as g:\n g.create_dataset('data',\n data=np.arange(10000).reshape((100, 10, 10)),\n chunks=(3, 3, 3))\n \n self.file = h5py.File('bench.hdf5', 'a')\n self.versioned_file = VersionedHDF5File(self.file)", + "min_run_count": 2, + "name": "inmemorydataset.TimeInMemoryDataset.time_resize_bigger", + "number": 0, + "param_names": [], + "params": [], + "repeat": 0, + "rounds": 2, + "sample_time": 0.01, + "timeout": 1000, + "type": "time", + "unit": "seconds", + "version": "b778cca65c85402b72c0220afe38e2598e8a2d08d09a6c4e37562b8880752851", + "warmup_time": -1 + }, + "inmemorydataset.TimeInMemoryDataset.time_resize_smaller": { + "code": "class TimeInMemoryDataset:\n def time_resize_smaller(self):\n # https://github.com/airspeed-velocity/asv/issues/966\n self.setup()\n with self.versioned_file.stage_version('version2') as g:\n dataset = g['data']\n assert isinstance(dataset, InMemoryDataset) or isinstance(dataset, DatasetWrapper) and isinstance(dataset.dataset, InMemoryDataset)\n dataset.resize((10, 10, 10))\n\n def setup(self):\n if hasattr(self, 'file'):\n self.file.close()\n if os.path.exists('bench.hdf5'):\n os.remove('bench.hdf5')\n \n with h5py.File('bench.hdf5', 'w') as f:\n versioned_file = VersionedHDF5File(f)\n \n with versioned_file.stage_version('version1') as g:\n g.create_dataset('data',\n data=np.arange(10000).reshape((100, 10, 10)),\n chunks=(3, 3, 3))\n \n self.file = h5py.File('bench.hdf5', 'a')\n self.versioned_file = VersionedHDF5File(self.file)", + "min_run_count": 2, + "name": "inmemorydataset.TimeInMemoryDataset.time_resize_smaller", + "number": 0, + "param_names": [], + "params": [], + "repeat": 0, + "rounds": 2, + "sample_time": 0.01, + "timeout": 1000, + "type": "time", + "unit": "seconds", + "version": "c9c051c4af010816abc469883320dc647f7ac6acd0ed78f11f314c0dacf65e1c", + "warmup_time": -1 + }, + "inmemorydataset.TimeInMemoryDataset.time_setitem": { + "code": "class TimeInMemoryDataset:\n def time_setitem(self):\n # https://github.com/airspeed-velocity/asv/issues/966\n self.setup()\n with self.versioned_file.stage_version('version2') as g:\n dataset = g['data']\n assert isinstance(dataset, InMemoryDataset) or isinstance(dataset, DatasetWrapper) and isinstance(dataset.dataset, InMemoryDataset)\n dataset[:, 0, 0:6] = -1\n\n def setup(self):\n if hasattr(self, 'file'):\n self.file.close()\n if os.path.exists('bench.hdf5'):\n os.remove('bench.hdf5')\n \n with h5py.File('bench.hdf5', 'w') as f:\n versioned_file = VersionedHDF5File(f)\n \n with versioned_file.stage_version('version1') as g:\n g.create_dataset('data',\n data=np.arange(10000).reshape((100, 10, 10)),\n chunks=(3, 3, 3))\n \n self.file = h5py.File('bench.hdf5', 'a')\n self.versioned_file = VersionedHDF5File(self.file)", + "min_run_count": 2, + "name": "inmemorydataset.TimeInMemoryDataset.time_setitem", + "number": 0, + "param_names": [], + "params": [], + "repeat": 0, + "rounds": 2, + "sample_time": 0.01, + "timeout": 1000, + "type": "time", + "unit": "seconds", + "version": "e96199385d96cb13ddb7a257ded590b70a1b7a1fd1523460f2131e94d10b9f5c", + "warmup_time": -1 + }, + "many_chunks.time_many_chunks": { + "code": "def time_many_chunks():\n d0 = 2\n d1 = 15220\n d2 = 2\n shape = (d0, d1, d2)\n chunks = (600, 2, 4)\n with h5py.File('foo.h5', 'w') as f:\n vf = VersionedHDF5File(f)\n with vf.stage_version('0') as sv:\n sv.create_dataset('bar', shape=shape, maxshape=(None, None, None),\n chunks=chunks, dtype=dt,\n data=np.full(shape, 0, dtype=dt))\n\n i = 1\n with h5py.File('foo.h5', 'r+') as f:\n vf = VersionedHDF5File(f)\n with vf.stage_version(str(i)) as sv:\n sv['bar'][:] = np.full(shape, i, dtype=dt)", + "min_run_count": 2, + "name": "many_chunks.time_many_chunks", + "number": 0, + "param_names": [], + "params": [], + "repeat": 0, + "rounds": 2, + "sample_time": 0.01, + "type": "time", + "unit": "seconds", + "version": "cabb39a1324ee887948aaa0a43500922a2b54721821fefdec9eaff931456a27e", + "warmup_time": -1 + }, + "many_chunks.time_many_chunks_arange": { + "code": "def time_many_chunks_arange():\n d0 = 2\n d1 = 15220\n d2 = 2\n shape = (d0, d1, d2)\n chunks = (600, 2, 4)\n with h5py.File('foo.h5', 'w') as f:\n vf = VersionedHDF5File(f)\n with vf.stage_version('0') as sv:\n sv.create_dataset('bar', shape=shape, maxshape=(None, None, None),\n chunks=chunks, dtype=dt,\n data=np.arange(np.prod(shape), dtype=dt).reshape(shape))", + "min_run_count": 2, + "name": "many_chunks.time_many_chunks_arange", + "number": 0, + "param_names": [], + "params": [], + "repeat": 0, + "rounds": 2, + "sample_time": 0.01, + "type": "time", + "unit": "seconds", + "version": "f2c60d70cd09f267a90204fcc7f961f9ea05b68d268e4e2e56d64a6dac3296e6", + "warmup_time": -1 + }, + "many_chunks.time_many_chunks_integer_index": { + "code": "def time_many_chunks_integer_index():\n d0 = 2\n d1 = 15220\n d2 = 2\n shape = (d0, d1, d2)\n chunks = (600, 2, 4)\n with h5py.File('foo.h5', 'w') as f:\n vf = VersionedHDF5File(f)\n with vf.stage_version('0') as sv:\n sv.create_dataset('bar', shape=shape, maxshape=(None, None, None),\n chunks=chunks, dtype=dt,\n data=np.full(shape, 0, dtype=dt))\n\n i = 1\n with h5py.File('foo.h5', 'r+') as f:\n vf = VersionedHDF5File(f)\n with vf.stage_version(str(i)) as sv:\n i2 = np.random.choice(d1, 30, replace=False)\n i2 = np.sort(i2)\n sv['bar'][:, i2, :] = np.full((d0, len(i2), d2), i, dtype=dt)", + "min_run_count": 2, + "name": "many_chunks.time_many_chunks_integer_index", + "number": 0, + "param_names": [], + "params": [], + "repeat": 0, + "rounds": 2, + "sample_time": 0.01, + "type": "time", + "unit": "seconds", + "version": "8c51f263d02dccf5bd0bcab3038302a963240197a7e8d4db7a1ff450b7a51fc4", + "warmup_time": -1 + }, + "resize.time_resize": { + "code": "def time_resize():\n with h5py.File('foo.h5', 'w') as f:\n vf = VersionedHDF5File(f)\n with vf.stage_version('0') as sv:\n sv.create_dataset('bar', (2, 15220, 2),\n chunks=(300, 100, 2),\n dtype=dt, data=np.full((2, 15220, 2), 0, dtype=dt))\n\n with h5py.File('foo.h5', 'r+') as f:\n vf = VersionedHDF5File(f)\n with vf.stage_version('1') as sv:\n bar = sv['bar']\n bar.resize((3, 15222, 2))", + "min_run_count": 2, + "name": "resize.time_resize", + "number": 0, + "param_names": [], + "params": [], + "repeat": 0, + "rounds": 2, + "sample_time": 0.01, + "timeout": 1200, + "type": "time", + "unit": "seconds", + "version": "139acbc7aac740ddd5fc4148856dd9091cd651897ae668a8777a3192b67cdedf", + "warmup_time": -1 + }, + "resize.time_resize_and_write": { + "code": "def time_resize_and_write():\n with h5py.File('foo.h5', 'w') as f:\n vf = VersionedHDF5File(f)\n with vf.stage_version('0') as sv:\n sv.create_dataset('bar', (1, 10, 2),\n chunks=(600, 2, 4),\n dtype=dt, data=np.full((1, 10, 2), 0, dtype=dt))\n\n for i in range(1, 100):\n with h5py.File('foo.h5', 'r+') as f:\n vf = VersionedHDF5File(f)\n with vf.stage_version(str(i)) as sv:\n bar = sv['bar']\n bar.resize((1, (i+1) * 10, 2))\n bar[:, -10:, :] = np.full((1, 10, 2), i, dtype=dt)", + "min_run_count": 2, + "name": "resize.time_resize_and_write", + "number": 0, + "param_names": [], + "params": [], + "repeat": 0, + "rounds": 2, + "sample_time": 0.01, + "timeout": 1200, + "type": "time", + "unit": "seconds", + "version": "b4b4259cb46ee209f997c5832896749d06178290828afc8d7beb0874e813b44c", + "warmup_time": -1 + }, + "resize.time_resize_and_write_hdf5": { + "code": "def time_resize_and_write_hdf5():\n with h5py.File('foo.h5', 'w') as f:\n f.create_dataset('bar0', (1, 10, 2),\n chunks=(600, 2, 4),\n dtype=dt, data=np.full((1, 10, 2), 0, dtype=dt),\n maxshape=(None, None, None))\n\n for i in range(1, 100):\n with h5py.File('foo.h5', 'r+') as f:\n bar = f.create_dataset('bar%d' % i, chunks=(600, 2, 4), dtype=dt,\n data=f['bar%d' % (i-1)],\n maxshape=(None, None, None))\n bar.resize((1, (i+1) * 10, 2))\n bar[:, -10:, :] = np.full((1, 10, 2), i, dtype=dt)", + "min_run_count": 2, + "name": "resize.time_resize_and_write_hdf5", + "number": 0, + "param_names": [], + "params": [], + "repeat": 0, + "rounds": 2, + "sample_time": 0.01, + "type": "time", + "unit": "seconds", + "version": "a4386c79036bab0a0fd626cd38ad9871c13d9504cf70b4791c1b45c8ac2c2dcb", + "warmup_time": -1 + }, + "resize.time_resize_and_write_hdf5_no_copy": { + "code": "def time_resize_and_write_hdf5_no_copy():\n with h5py.File('foo.h5', 'w') as f:\n f.create_dataset('bar', (1, 10, 2),\n chunks=(600, 2, 4),\n dtype=dt, data=np.full((1, 10, 2), 0, dtype=dt),\n maxshape=(None, None, None))\n\n for i in range(1, 100):\n with h5py.File('foo.h5', 'r+') as f:\n bar = f['bar']\n bar.resize((1, (i+1) * 10, 2))\n bar[:, -10:, :] = np.full((1, 10, 2), i, dtype=dt)", + "min_run_count": 2, + "name": "resize.time_resize_and_write_hdf5_no_copy", + "number": 0, + "param_names": [], + "params": [], + "repeat": 0, + "rounds": 2, + "sample_time": 0.01, + "type": "time", + "unit": "seconds", + "version": "f94f592d285299e8c5c7a2fbfb86b129371e9d58c71d13f55f14a6176fc66808", + "warmup_time": -1 + }, + "resize.time_resize_hdf5": { + "code": "def time_resize_hdf5():\n with h5py.File('foo.h5', 'w') as f:\n f.create_dataset('bar', (2, 15220, 2),\n chunks=(300, 100, 2),\n dtype=dt, data=np.full((2, 15220, 2), 0,\n dtype=dt),\n maxshape=(None, None, None))\n\n with h5py.File('foo.h5', 'r+') as f:\n bar = f['bar']\n bar.resize((3, 15222, 2))", + "min_run_count": 2, + "name": "resize.time_resize_hdf5", + "number": 0, + "param_names": [], + "params": [], + "repeat": 0, + "rounds": 2, + "sample_time": 0.01, + "type": "time", + "unit": "seconds", + "version": "4e9ed3924a20c3eb88bff7199efce32a2f66cf82247e481040b00f67b6e21ed9", + "warmup_time": -1 + }, + "version": 2, + "versionedhdf5file.TimeDatetimeAccess.time_version_by_datetime": { + "code": "class TimeDatetimeAccess:\n def time_version_by_datetime(self):\n # Based on https://github.com/deshaw/versioned-hdf5/issues/170\n with h5py.File('foo.h5', 'r') as f:\n vf = VersionedHDF5File(f)\n for _ in range(100):\n _ = vf[self.dt]['bar'][:]\n\n def setup(self):\n with h5py.File('foo.h5', 'w') as f:\n vf = VersionedHDF5File(f)\n with vf.stage_version('0') as sv:\n sv.create_dataset('bar', data=np.random.rand(10))\n \n for i in range(1, 100):\n with vf.stage_version(str(i)) as sv:\n sv['bar'][:] = np.random.rand(10)\n self.dt = np.datetime64(vf[str(50)].attrs['timestamp'])", + "min_run_count": 2, + "name": "versionedhdf5file.TimeDatetimeAccess.time_version_by_datetime", + "number": 0, + "param_names": [], + "params": [], + "repeat": 0, + "rounds": 2, + "sample_time": 0.01, + "type": "time", + "unit": "seconds", + "version": "68650944530784b3fda1ba4817d8e831fda665cd7ff0a32c4e20e92aaac32816", + "warmup_time": -1 + } +} \ No newline at end of file diff --git a/.asv/results/cirun-peytondmurray--versioned-hdf5-ffd0030/52aca7e9-virtualenv-py3.11.json b/.asv/results/cirun-peytondmurray--versioned-hdf5-ffd0030/52aca7e9-virtualenv-py3.11.json new file mode 100644 index 00000000..f5969b36 --- /dev/null +++ b/.asv/results/cirun-peytondmurray--versioned-hdf5-ffd0030/52aca7e9-virtualenv-py3.11.json @@ -0,0 +1 @@ +{"commit_hash": "52aca7e99aa537a4ccfce425802b96195c8fa858", "env_name": "virtualenv-py3.11", "date": 1729193273000, "params": {"arch": "x86_64", "cpu": "Intel(R) Xeon(R) CPU @ 2.30GHz", "machine": "cirun-peytondmurray--versioned-hdf5-ffd0030", "num_cpu": "8", "os": "Linux 6.8.0-1014-gcp", "ram": "30799060", "python": "3.11"}, "python": "3.11", "requirements": {}, "env_vars": {}, "result_columns": ["result", "params", "version", "started_at", "duration", "stats_ci_99_a", "stats_ci_99_b", "stats_q_25", "stats_q_75", "stats_number", "stats_repeat", "samples", "profile"], "results": {"delete_versions.TimeDeleting.time_delete": [[0.2387862350000205, 0.7329137450000189, 1.2331673324999883], [["10", "30", "50"]], "08272cd8ae51a90b06a17e73c3b3546ed492ada2a98fdcd9e070bbb6d95303f3", 1729194470443, 38.195, [0.22559, 0.71119, 1.1444], [0.26136, 0.76667, 1.3554], [0.23255, 0.72199, 1.2216], [0.25328, 0.73721, 1.2615], [1, 1, 1], [10, 6, 4], [[0.22684615299999678, 0.23232741999999007, 0.23962396600001057, 0.23322989100000768, 0.23794850400003043, 0.22559306300001936, 0.2503261179999754, 0.26136014999997315, 0.2552152720001004, 0.25425913799995215], [0.7372879000000125, 0.7197072330000083, 0.7193895290000114, 0.7288365829999748, 0.7666680940000106, 0.7369909070000631], [1.224204732999965, 1.3195533410000166, 1.2138834910000469, 1.2421299320000116]]], "hdf5.TimePureHDF5.time_getattr": [[0.0003942591666659441], [], "2e759d308432d2570cd8b41140c6171cbdac2f79c07f8cf323e54c6be0273446", 1729194508639, 0.19662, [0.00038849], [0.0004212], [0.00039034], [0.00039783], [27], [10], [[0.00039619333333533815, 0.0003884935185177946, 0.00038929637037199427, 0.0004212011111109051, 0.0004033040370380284, 0.0003983808888876781, 0.00039311092592487075, 0.00039182618518309565, 0.0003954074074070175, 0.0003898412962952433]]], "hdf5.TimePureHDF5.time_resize_bigger": [[6.653056748469273e-05], [], "d0a22645103f056daa04af1d58f2abbcf7d4459232db9c22ddb408f1c0213792", 1729194508836, 0.19638, [6.4743e-05], [6.8769e-05], [6.5307e-05], [6.7292e-05], [163], [10], [[6.790186503090484e-05, 6.693451533734344e-05, 6.627877914114906e-05, 6.67823558282364e-05, 6.741117791406301e-05, 6.513113496887636e-05, 6.876904907978783e-05, 6.474290184048236e-05, 6.508755828227365e-05, 6.583488957096575e-05]]], "hdf5.TimePureHDF5.time_resize_smaller": [[7.041915838502866e-05], [], "0ca62d1c3dee9f1b38e907e67525fcf5ae5e2ce489ea8f306231232ce1b83fb9", 1729194509033, 0.20141, [6.948e-05], [7.2266e-05], [6.9746e-05], [7.1571e-05], [161], [10], [[7.128045962719897e-05, 7.226633540391919e-05, 7.166718633538525e-05, 7.201267080731188e-05, 7.035378260852893e-05, 7.048453416152839e-05, 6.960445962772186e-05, 6.960028571433304e-05, 6.948034782631704e-05, 7.017188819834768e-05]]], "hdf5.TimePureHDF5.time_setattr": [[0.0024464268000087943], [], "800ca98ef22079d63aebf10eb17e9f5771436b237fbf92b5d61adc551d84993c", 1729194509234, 0.20586, [0.002299], [0.002574], [0.0023895], [0.0024998], [5], [10], [[0.0025739563999991333, 0.0025279911999973592, 0.002484335999997711, 0.0024842410000019298, 0.0025049589999980525, 0.0023900787999991736, 0.0023893260000022567, 0.002352696200000537, 0.0022989798000025985, 0.0024086126000156583]]], "inmemoryarraydataset.TimeInMemoryArrayDataset.time_getattr": [[0.11531553600002553], [], "03bc703ccc7f5109bd09fb364d994f8b407dd1c5ef0688fecbfb98bd31f401d7", 1729194509440, 0.72657, [0.11431], [0.11585], [0.11486], [0.11575], [1], [10], [[0.115849819999994, 0.11504425300000776, 0.11534959100004016, 0.11459697700001925, 0.11431455600001073, 0.11578785100005007, 0.11570030100006079, 0.11576550000006591, 0.1152814810000109, 0.11479830899997978]]], "inmemoryarraydataset.TimeInMemoryArrayDataset.time_resize_bigger": [[3.8563915309999857], [], "fdc09f867950d3ea63ce715311e8204e45a055959d732b4ac531a9afbc5054eb", 1729194510167, 12.741, [3.6233], [4.1948], [3.8353], [3.9301], [1], [4], [[3.8711238500000036, 3.816314346000013, 4.107020741000042, 3.8416592119999677]]], "inmemoryarraydataset.TimeInMemoryArrayDataset.time_resize_smaller": [[0.01763106800001424], [], "082fed10fc0ef1f09ef29258cfc6d044727ca67460b78944ced94a1b40d065ef", 1729194522908, 0.21142, [0.017002], [0.019505], [0.017398], [0.017825], [1], [10], [[0.017843479000021034, 0.017769910000026812, 0.017395925000016632, 0.017002130999969722, 0.017405025000016394, 0.01787308400002985, 0.017505333000030987, 0.01775680299999749, 0.017056120999995983, 0.01950507200001539]]], "inmemoryarraydataset.TimeInMemoryArrayDataset.time_setattr": [[0.11596620699998539], [], "a6f9aa34a9a23e90a219560b66ec33cd105e73f8004bfe43c54bb75805269541", 1729194523120, 0.76164, [0.11448], [0.12788], [0.11533], [0.11646], [1], [10], [[0.11930848899999091, 0.11617260899998882, 0.11523312799999985, 0.11637322899997571, 0.11575980499998195, 0.11648319400001128, 0.11470437700006642, 0.1156297640000048, 0.11447846099997605, 0.12788130499995987]]], "inmemorydataset.TimeInMemoryDataset.time_getitem": [[0.0021607324999954676], [], "3d9054259fd156ce88e37d3c08f3952b7ea524394ba8b0fb2e5ba9af2f618f69", 1729194523882, 0.98266, [0.0020973], [0.0022369], [0.0021156], [0.0021991], [11], [10], [[0.002194935454543735, 0.0021814082727267055, 0.0021248869090920216, 0.0022005281818192916, 0.0022081071818175506, 0.0022368669090879353, 0.0021049310000039027, 0.0021400567272642297, 0.0020972983636407994, 0.002112536909087546]]], "inmemorydataset.TimeInMemoryDataset.time_resize_bigger": [[0.2597775084999512], [], "b778cca65c85402b72c0220afe38e2598e8a2d08d09a6c4e37562b8880752851", 1729194524864, 2.3866, [0.25705], [0.29579], [0.258], [0.2695], [1], [10], [[0.26941096299998435, 0.29578817300000537, 0.27119977499995684, 0.2579850519999809, 0.2588224219999802, 0.25802955899996505, 0.2695258420000073, 0.25705457200001547, 0.2607325949999222, 0.2577398420000918]]], "inmemorydataset.TimeInMemoryDataset.time_resize_smaller": [[0.1684105109999905], [], "c9c051c4af010816abc469883320dc647f7ac6acd0ed78f11f314c0dacf65e1c", 1729194527251, 1.8515, [0.16091], [0.1907], [0.16556], [0.17348], [1], [10], [[0.17581308100000115, 0.17090890700001182, 0.1675973069999941, 0.1692237149999869, 0.1743343310000114, 0.1653175100000226, 0.16420011599996087, 0.1609138950000215, 0.16628647200002433, 0.190700080000056]]], "inmemorydataset.TimeInMemoryDataset.time_setitem": [[0.19841210050000768], [], "e96199385d96cb13ddb7a257ded590b70a1b7a1fd1523460f2131e94d10b9f5c", 1729194529103, 1.9607, [0.19606], [0.20393], [0.19684], [0.19985], [1], [10], [[0.19712283000001207, 0.19881405600000335, 0.20002629500004332, 0.20393443999995498, 0.20091060300001118, 0.1964500889999954, 0.1967395580001039, 0.1960613100000046, 0.19801014500001202, 0.19931393900003513]]], "many_chunks.time_many_chunks": [[1.7628785860000562], [], "cabb39a1324ee887948aaa0a43500922a2b54721821fefdec9eaff931456a27e", 1729194531064, 10.737, [1.7261], [1.8333], [1.7503], [1.7741], [1], [10], [[1.7946897709999803, 1.8332553770000004, 1.7677279100000192, 1.7569758399999955, 1.7762074980000193, 1.7318765480000593, 1.7261438230000294, 1.7608645610000622, 1.7648926110000502, 1.7480416729999888]]], "many_chunks.time_many_chunks_arange": [[4.208695124499997], [], "f2c60d70cd09f267a90204fcc7f961f9ea05b68d268e4e2e56d64a6dac3296e6", 1729194541801, 10.653, [3.9727], [4.4196], [4.1403], [4.2645], [1], [4], [[4.158122134999985, 4.259268114000008, 4.086984641999948, 4.280233924000072]]], "many_chunks.time_many_chunks_integer_index": [[1.8079228034999915], [], "8c51f263d02dccf5bd0bcab3038302a963240197a7e8d4db7a1ff450b7a51fc4", 1729194552454, 11.32, [1.7765], [1.8639], [1.798], [1.8242], [1], [10], [[1.7893192860000227, 1.7764915259999725, 1.8638841260000163, 1.82836305699999, 1.8116193789999784, 1.8292031329999645, 1.8053019149999727, 1.8028542609999931, 1.8105436920000102, 1.7963344099999858]]], "resize.time_resize": [[0.08075373849996481], [], "139acbc7aac740ddd5fc4148856dd9091cd651897ae668a8777a3192b67cdedf", 1729194563775, 0.60264, [0.079668], [0.0837], [0.08037], [0.082628], [1], [10], [[0.08320058100002825, 0.0809116050000398, 0.08325841600003514, 0.0805248959999858, 0.08090923200001043, 0.08370022099995822, 0.08059824499991919, 0.0802814449999687, 0.0803189169999996, 0.07966765700007272]]], "resize.time_resize_and_write": [[5.258660953499998], [], "b4b4259cb46ee209f997c5832896749d06178290828afc8d7beb0874e813b44c", 1729194564378, 10.548, [3.7551], [6.7622], [5.2436], [5.2737], [1], [2], [[5.228590112000006, 5.28873179499999]]], "resize.time_resize_and_write_hdf5": [[1.4467696794999654], [], "a4386c79036bab0a0fd626cd38ad9871c13d9504cf70b4791c1b45c8ac2c2dcb", 1729194574926, 8.546, [1.4028], [1.5], [1.4125], [1.4758], [1], [10], [[1.4960773739999809, 1.473862741000005, 1.4763050779999958, 1.474455675999991, 1.4999686689999976, 1.4121954890000552, 1.4122497160000194, 1.4028182949999746, 1.4196766179999258, 1.4133328520000532]]], "resize.time_resize_and_write_hdf5_no_copy": [[0.07681585650004763], [], "f94f592d285299e8c5c7a2fbfb86b129371e9d58c71d13f55f14a6176fc66808", 1729194583472, 0.5579, [0.075741], [0.080987], [0.076092], [0.077583], [1], [10], [[0.07593489499998896, 0.07597559800001363, 0.07574061299999357, 0.07728835799997569, 0.08098718000002236, 0.07768118900003174, 0.07661628300002121, 0.07801443899995775, 0.07701543000007405, 0.07643924400008473]]], "resize.time_resize_hdf5": [[0.6056949905000124], [], "4e9ed3924a20c3eb88bff7199efce32a2f66cf82247e481040b00f67b6e21ed9", 1729194584030, 3.6298, [0.58675], [0.61544], [0.60392], [0.61032], [1], [10], [[0.5867515289999687, 0.6070298679999837, 0.6058430210000552, 0.6121450240000286, 0.6037039770000092, 0.5888744210000141, 0.6045584729999973, 0.6154410769999004, 0.6055469599999697, 0.6114153470000474]]], "versionedhdf5file.TimeDatetimeAccess.time_version_by_datetime": [[0.031923934999980474], [], "68650944530784b3fda1ba4817d8e831fda665cd7ff0a32c4e20e92aaac32816", 1729194587660, 3.0892, [0.03106], [0.035014], [0.031699], [0.032789], [1], [10], [[0.031059697999978653, 0.031718804000036016, 0.03201599400000532, 0.03303554100000383, 0.035013658000025316, 0.03183187599995563, 0.03169215900004474, 0.03205045900006098, 0.03160499600005551, 0.03330933200004438]]]}, "durations": {"": 23.980255842208862}, "version": 2} \ No newline at end of file diff --git a/.asv/results/cirun-peytondmurray--versioned-hdf5-ffd0030/a1a3cdc7-virtualenv-py3.11.json b/.asv/results/cirun-peytondmurray--versioned-hdf5-ffd0030/a1a3cdc7-virtualenv-py3.11.json new file mode 100644 index 00000000..49e73ebf --- /dev/null +++ b/.asv/results/cirun-peytondmurray--versioned-hdf5-ffd0030/a1a3cdc7-virtualenv-py3.11.json @@ -0,0 +1 @@ +{"commit_hash": "a1a3cdc76befde88fec35879c906aade7e7b82e6", "env_name": "virtualenv-py3.11", "date": 1729193740000, "params": {"arch": "x86_64", "cpu": "Intel(R) Xeon(R) CPU @ 2.30GHz", "machine": "cirun-peytondmurray--versioned-hdf5-ffd0030", "num_cpu": "8", "os": "Linux 6.8.0-1014-gcp", "ram": "30799060", "python": "3.11"}, "python": "3.11", "requirements": {}, "env_vars": {}, "result_columns": ["result", "params", "version", "started_at", "duration", "stats_ci_99_a", "stats_ci_99_b", "stats_q_25", "stats_q_75", "stats_number", "stats_repeat", "samples", "profile"], "results": {"delete_versions.TimeDeleting.time_delete": [[0.23652891500000806, 0.7244375565000212, 1.2962952755000288], [["10", "30", "50"]], "08272cd8ae51a90b06a17e73c3b3546ed492ada2a98fdcd9e070bbb6d95303f3", 1729194349484, 37.265, [0.23122, 0.70952, 1.1606], [0.26035, 0.73734, 1.4227], [0.23327, 0.72353, 1.2759], [0.24694, 0.73123, 1.312], [1, 1, 1], [10, 6, 4], [[0.25247899599997936, 0.23121909499997173, 0.2603540229999908, 0.23275897999997142, 0.24875271500002327, 0.24152096700004222, 0.23495823799999016, 0.23127422400000341, 0.23809959200002595, 0.2348056659999429], [0.7095236699999532, 0.7248436909999896, 0.7345332269999858, 0.7333542769999895, 0.7240314220000528, 0.7233668479999551], [1.2953929210000297, 1.356576199000017, 1.2174713220000513, 1.2971976300000279]]], "hdf5.TimePureHDF5.time_getattr": [[0.00038659770370414934], [], "2e759d308432d2570cd8b41140c6171cbdac2f79c07f8cf323e54c6be0273446", 1729194386750, 0.19282, [0.00037546], [0.00039573], [0.0003837], [0.00038971], [27], [10], [[0.0003864169259254932, 0.0003957255185182644, 0.0003945797037048309, 0.00038372433333380387, 0.00038677848148280545, 0.00038917555555481495, 0.0003898870740731245, 0.00037912837037176224, 0.00037545655555748926, 0.0003836895925899255]]], "hdf5.TimePureHDF5.time_resize_bigger": [[6.516317168656183e-05], [], "d0a22645103f056daa04af1d58f2abbcf7d4459232db9c22ddb408f1c0213792", 1729194386943, 0.19713, [6.2625e-05], [6.7792e-05], [6.478e-05], [6.6197e-05], [166], [10], [[6.659083132513481e-05, 6.620910240949835e-05, 6.471024096402245e-05, 6.51063192768528e-05, 6.498919879529338e-05, 6.779210843408593e-05, 6.61597891568149e-05, 6.262545783153864e-05, 6.522002409627087e-05, 6.427995783135347e-05]]], "hdf5.TimePureHDF5.time_resize_smaller": [[6.939944910198142e-05], [], "0ca62d1c3dee9f1b38e907e67525fcf5ae5e2ce489ea8f306231232ce1b83fb9", 1729194387140, 0.20597, [6.8919e-05], [7.1969e-05], [6.91e-05], [6.9585e-05], [167], [10], [[6.907022754500056e-05, 6.918837125735201e-05, 6.955338922145943e-05, 6.959579041927508e-05, 6.991789221550691e-05, 6.94417964071303e-05, 6.935710179683254e-05, 6.891861676671353e-05, 6.903587425136164e-05, 7.196936526937436e-05]]], "hdf5.TimePureHDF5.time_setattr": [[0.002436319799994635], [], "800ca98ef22079d63aebf10eb17e9f5771436b237fbf92b5d61adc551d84993c", 1729194387346, 0.20884, [0.0023809], [0.002468], [0.0024055], [0.0024494], [5], [10], [[0.002451581000002534, 0.0024304177999965757, 0.0024523157999965404, 0.002442221799992694, 0.0024680231999923306, 0.0023918669999829943, 0.0024034260000007635, 0.002411830200003351, 0.0023809157999949092, 0.0024426960000027975]]], "inmemoryarraydataset.TimeInMemoryArrayDataset.time_getattr": [[0.12451379600000223], [], "03bc703ccc7f5109bd09fb364d994f8b407dd1c5ef0688fecbfb98bd31f401d7", 1729194387555, 0.7381, [0.1131], [0.13545], [0.11508], [0.13002], [1], [10], [[0.13545224599999983, 0.13097304000001486, 0.12901434299999437, 0.1280605259999561, 0.12096706600004836, 0.11455586400006723, 0.1303553349999902, 0.11665724700003466, 0.11323100099991734, 0.1131047059999446]]], "inmemoryarraydataset.TimeInMemoryArrayDataset.time_resize_bigger": [[3.902661331499985], [], "fdc09f867950d3ea63ce715311e8204e45a055959d732b4ac531a9afbc5054eb", 1729194388294, 12.581, [3.6171], [4.1987], [3.8597], [3.9509], [1], [4], [[3.912288812999975, 3.893033849999995, 4.06655455300006, 3.7597469330000877]]], "inmemoryarraydataset.TimeInMemoryArrayDataset.time_resize_smaller": [[0.017704222499958178], [], "082fed10fc0ef1f09ef29258cfc6d044727ca67460b78944ced94a1b40d065ef", 1729194400875, 0.20686, [0.017382], [0.018049], [0.017571], [0.017904], [1], [10], [[0.018048604999989948, 0.017925308999963363, 0.017928206000021873, 0.01752292999998417, 0.01757328700000471, 0.0178387160000284, 0.017715566999982002, 0.017692877999934353, 0.017382211000040115, 0.017570153000065147]]], "inmemoryarraydataset.TimeInMemoryArrayDataset.time_setattr": [[0.11460381250003593], [], "a6f9aa34a9a23e90a219560b66ec33cd105e73f8004bfe43c54bb75805269541", 1729194401082, 0.72039, [0.11335], [0.12278], [0.11422], [0.11547], [1], [10], [[0.11524975099996482, 0.11464924800003473, 0.11455837700003713, 0.1227753769999822, 0.11553917300000194, 0.11422113900005115, 0.11334965699995792, 0.11379735100001653, 0.11423560200000793, 0.11620159599999624]]], "inmemorydataset.TimeInMemoryDataset.time_getitem": [[0.0021903618181821375], [], "3d9054259fd156ce88e37d3c08f3952b7ea524394ba8b0fb2e5ba9af2f618f69", 1729194401802, 0.98287, [0.0020733], [0.0023317], [0.0021292], [0.0022576], [11], [10], [[0.0021795137272730162, 0.0022680737272728698, 0.0023169794545455343, 0.002201209909091259, 0.0023316738181775195, 0.002076146181821059, 0.002073348727280559, 0.0021772796363641396, 0.0021131270909067594, 0.0022263433636350205]]], "inmemorydataset.TimeInMemoryDataset.time_resize_bigger": [[0.2591263595000157], [], "b778cca65c85402b72c0220afe38e2598e8a2d08d09a6c4e37562b8880752851", 1729194402785, 2.3645, [0.2563], [0.26165], [0.25872], [0.25935], [1], [10], [[0.2588564240000437, 0.261649852000005, 0.25925337200004606, 0.25922608799999125, 0.2590266310000402, 0.2586733309999545, 0.26057351800000106, 0.25764261500000885, 0.2562952430000678, 0.2593770419999828]]], "inmemorydataset.TimeInMemoryDataset.time_resize_smaller": [[0.16647417049998126], [], "c9c051c4af010816abc469883320dc647f7ac6acd0ed78f11f314c0dacf65e1c", 1729194405150, 1.7875, [0.16047], [0.18155], [0.16326], [0.16785], [1], [10], [[0.16509137999997847, 0.16798226600002408, 0.16046661299998277, 0.166218262999962, 0.18155338499997242, 0.1626496279999401, 0.1667300780000005, 0.16177509299996018, 0.16746530000000348, 0.16988384799992673]]], "inmemorydataset.TimeInMemoryDataset.time_setitem": [[0.1982008654999845], [], "e96199385d96cb13ddb7a257ded590b70a1b7a1fd1523460f2131e94d10b9f5c", 1729194406938, 2.0052, [0.19401], [0.21251], [0.1974], [0.19981], [1], [10], [[0.19829145099998868, 0.19541686200000186, 0.19811027999998032, 0.2125059499999793, 0.2115178160000255, 0.1977646550000145, 0.1988659059999236, 0.1940111369999613, 0.19727215099999285, 0.2001309699999183]]], "many_chunks.time_many_chunks": [[1.7265181040000357], [], "cabb39a1324ee887948aaa0a43500922a2b54721821fefdec9eaff931456a27e", 1729194408943, 10.632, [1.7117], [1.8305], [1.7184], [1.7764], [1], [10], [[1.8304624360000616, 1.729792413000041, 1.770295244999943, 1.7784428349999644, 1.7174824710000394, 1.7150871750000078, 1.7232437950000303, 1.721318838000002, 1.711719482000035, 1.7957581680000203]]], "many_chunks.time_many_chunks_arange": [[4.170708000000047], [], "f2c60d70cd09f267a90204fcc7f961f9ea05b68d268e4e2e56d64a6dac3296e6", 1729194419576, 10.573, [3.985], [4.3547], [4.123], [4.2176], [1], [4], [[4.084145557000056, 4.253975462999961, 4.135934543000076, 4.205481457000019]]], "many_chunks.time_many_chunks_integer_index": [[1.8286458359999642], [], "8c51f263d02dccf5bd0bcab3038302a963240197a7e8d4db7a1ff450b7a51fc4", 1729194430149, 11.377, [1.7824], [1.8974], [1.7963], [1.8651], [1], [10], [[1.850025589999973, 1.7824052450000636, 1.7843902660000595, 1.870153324999933, 1.8974447209999425, 1.7937862600000472, 1.8038162990000046, 1.8831119579999722, 1.825452569999925, 1.8318391020000035]]], "resize.time_resize": [[0.08067197249999936], [], "139acbc7aac740ddd5fc4148856dd9091cd651897ae668a8777a3192b67cdedf", 1729194441527, 0.59676, [0.079322], [0.082863], [0.079757], [0.081199], [1], [10], [[0.08075626499999089, 0.0793860249999625, 0.08124980900004175, 0.0793215250000685, 0.08162894899999174, 0.0828626139999642, 0.08058768000000782, 0.07972161800000777, 0.07986432300003798, 0.08104565499991168]]], "resize.time_resize_and_write": [[5.268442347499956], [], "b4b4259cb46ee209f997c5832896749d06178290828afc8d7beb0874e813b44c", 1729194442124, 10.561, [3.409], [7.1279], [5.2498], [5.287], [1], [2], [[5.305630988999951, 5.231253705999961]]], "resize.time_resize_and_write_hdf5": [[1.4820658080000157], [], "a4386c79036bab0a0fd626cd38ad9871c13d9504cf70b4791c1b45c8ac2c2dcb", 1729194452685, 8.9751, [1.4558], [1.5348], [1.4758], [1.4965], [1], [10], [[1.4757084310000437, 1.4618447739999283, 1.4557735539999612, 1.4761915170000748, 1.4978000710000288, 1.5348473589999685, 1.5064883270000564, 1.4926152539999293, 1.48143965700001, 1.4826919590000216]]], "resize.time_resize_and_write_hdf5_no_copy": [[0.07469807900002934], [], "f94f592d285299e8c5c7a2fbfb86b129371e9d58c71d13f55f14a6176fc66808", 1729194461661, 0.5537, [0.07364], [0.078576], [0.073906], [0.075668], [1], [10], [[0.07383507999998074, 0.07425944400006301, 0.07395661099997142, 0.07364027800008444, 0.07388954299995021, 0.07691315700003543, 0.07857588899992152, 0.07570344800001294, 0.07556254500002524, 0.07513671399999566]]], "resize.time_resize_hdf5": [[0.6062708244999726], [], "4e9ed3924a20c3eb88bff7199efce32a2f66cf82247e481040b00f67b6e21ed9", 1729194462215, 3.6309, [0.5957], [0.61379], [0.60103], [0.60981], [1], [10], [[0.5956958719999648, 0.612445192999985, 0.6038025990000051, 0.6082417229999919, 0.6137857459999623, 0.6001044210000828, 0.5981074139999691, 0.6080057979999083, 0.6103281460000289, 0.6045358510000369]]], "versionedhdf5file.TimeDatetimeAccess.time_version_by_datetime": [[0.030506722499978878], [], "68650944530784b3fda1ba4817d8e831fda665cd7ff0a32c4e20e92aaac32816", 1729194465846, 3.0652, [0.029011], [0.034608], [0.030204], [0.030821], [1], [10], [[0.029010863000053178, 0.029823297999996612, 0.030511867999962305, 0.03016196399994442, 0.031752104000020154, 0.03050157699999545, 0.03033054100001209, 0.03083037199996852, 0.030791493999913655, 0.03460806499992941]]]}, "durations": {"": 1.1195125579833984}, "version": 2} \ No newline at end of file diff --git a/.asv/results/cirun-peytondmurray--versioned-hdf5-ffd0030/machine.json b/.asv/results/cirun-peytondmurray--versioned-hdf5-ffd0030/machine.json new file mode 100644 index 00000000..39872ca1 --- /dev/null +++ b/.asv/results/cirun-peytondmurray--versioned-hdf5-ffd0030/machine.json @@ -0,0 +1,9 @@ +{ + "arch": "x86_64", + "cpu": "Intel(R) Xeon(R) CPU @ 2.30GHz", + "machine": "cirun-peytondmurray--versioned-hdf5-ffd0030", + "num_cpu": "8", + "os": "Linux 6.8.0-1014-gcp", + "ram": "30799060", + "version": 1 +} \ No newline at end of file diff --git a/.cirun.yml b/.cirun.yml new file mode 100644 index 00000000..a89d015d --- /dev/null +++ b/.cirun.yml @@ -0,0 +1,7 @@ +runners: + - name: "benchmark-runner" + cloud: "gcp" + instance_type: "n1-standard-8" + machine_image: "ubuntu-minimal-2404-noble-amd64-v20240910" + labels: + - "cirun-benchmark-runner" diff --git a/.github/workflows/benchmarks.yml b/.github/workflows/benchmarks.yml new file mode 100644 index 00000000..d87d3de4 --- /dev/null +++ b/.github/workflows/benchmarks.yml @@ -0,0 +1,47 @@ +name: Benchmarks + +on: [workflow_call, workflow_dispatch] + +# Sets permissions of the GITHUB_TOKEN to allow writing back to `master` +permissions: + contents: write + +jobs: + run-benchmarks: + runs-on: "cirun-benchmark-runner--${{ github.run_id }}" + steps: + # Install git first; otherwise actions/checkout silently falls back + # to github REST API for downloading the repo + - name: Install dependencies + run: | + sudo apt update -y + sudo apt install git zlib1g-dev build-essential pkg-config -y + + - uses: actions/checkout@v4 + with: + ref: master + fetch-depth: 0 + + - name: Setup python with miniconda + uses: conda-incubator/setup-miniconda@v3 + with: + python-version: 3.11 + channels: conda-forge + + - name: Install versioned-hdf5 + run: | + conda install -n test pip hdf5 openmpi -c conda-forge -y + conda run -n test pip install '.[bench]' + + # Compare the most recent commit with the previous one + - name: Run benchmarks + run: | + conda run -n test asv machine --yes + # Don't return exit code 1 if results are slower + conda run -n test asv continuous HEAD^ HEAD || true; + + - name: Add and commit benchmarks + uses: EndBug/add-and-commit@v9 + with: + add: .asv/ + message: "Update benchmarks for commit ${{ github.sha }}" diff --git a/.github/workflows/publish_docs.yml b/.github/workflows/publish_docs.yml index cf4c8269..7f6e94a9 100644 --- a/.github/workflows/publish_docs.yml +++ b/.github/workflows/publish_docs.yml @@ -9,22 +9,26 @@ on: # Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages permissions: - contents: read + contents: write pages: write id-token: write # Allow only one concurrent deployment, skipping runs queued between the run in-progress and latest queued. -# However, do NOT cancel in-progress runs as we want to allow these production deployments to complete. +# Cancel in-progress deployment jobs so only the latest one succeeds. concurrency: group: "pages" - cancel-in-progress: false + cancel-in-progress: true jobs: + benchmarks: + uses: ./.github/workflows/benchmarks.yml + build-and-deploy: environment: name: github-pages url: ${{ steps.deployment.outputs.page_url }} runs-on: ubuntu-latest + needs: [benchmarks] steps: # Always checkout master branch, even if triggered by a tag. # Otherwise sphinx-multiversion does not pull the master branch. @@ -43,7 +47,7 @@ jobs: - name: Install versioned-hdf5 run: | - pip install .[doc] + pip install '.[doc,bench]' - name: Update git tags for sphinx-multiversion run: | @@ -72,6 +76,14 @@ jobs: " >> docs/_build/html/index.html + - name: Publish results + run: | + asv publish + + - name: Move benchmarks to static pages + run: | + mv .asv/html docs/_build/html/benchmarks + - name: Setup Pages uses: actions/configure-pages@v3 diff --git a/.gitignore b/.gitignore index e16bf440..6fb864ef 100644 --- a/.gitignore +++ b/.gitignore @@ -2,7 +2,7 @@ *.hdf5 # airspeed velocity (benchmarks) -.asv/ +commits.txt *.lprof diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 0c622d38..4f2fc271 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -7,7 +7,8 @@ exclude: | github_deploy_key_deshaw_versioned_hdf5.enc| _versioneer.py| benchmarks_install.py| - versioned_hdf5/_version.py + versioned_hdf5/_version.py| + .asv/ ) repos: - repo: https://github.com/pre-commit/pre-commit-hooks diff --git a/asv.conf.json b/asv.conf.json index a93735e5..ef5ae261 100644 --- a/asv.conf.json +++ b/asv.conf.json @@ -21,15 +21,18 @@ // Customizable commands for building, installing, and // uninstalling the project. See asv.conf.json documentation. // - "install_command": ["in-dir={env_dir} python {conf_dir}/benchmarks_install.py {commit} {env_dir} {build_dir}"], + // "install_command": [ + // "in-dir={env_dir} python {conf_dir}/benchmarks_install.py {commit} {env_dir} {build_dir}" + // ], // "uninstall_command": ["return-code=any python -mpip uninstall -y {project}"], "build_command": [ + "python -m pip install build", + "python -m build --wheel -o {build_cache_dir} {build_dir}", ], // List of branches to benchmark. If not provided, defaults to "master" // (for git) or "default" (for mercurial). - "branches": ["master", "performance2", "performance3", "new-delete"], // for git - // "branches": ["default"], // for mercurial + "branches": ["master"], // for git // The DVCS being used. If not set, it will be automatically // determined from "repo" by looking at the protocol in the URL @@ -42,7 +45,7 @@ // If missing or the empty string, the tool will be automatically // determined by looking for tools on the PATH environment // variable. - "environment_type": "conda", + "environment_type": "virtualenv", // timeout in seconds for installing any dependencies in environment // defaults to 10 min @@ -57,7 +60,7 @@ // The list of conda channel names to be searched for benchmark // dependency packages in the specified order - // "conda_channels": ["conda-forge", "defaults"], + // "conda_channels": ["conda-forge"], // The matrix of dependencies to test. Each key is the name of a // package (in PyPI) and the values are version numbers. An empty diff --git a/benchmarks/delete_versions.py b/benchmarks/delete_versions.py index 8fa6b8ec..ea766003 100644 --- a/benchmarks/delete_versions.py +++ b/benchmarks/delete_versions.py @@ -54,7 +54,7 @@ def setup(self, n): v = 1 with h5py.File(filename, 'r+') as f: vf = VersionedHDF5File(f) - for d in range(22): + for d in range(3): with vf.stage_version(str(v)) as sv: values_ds = sv['values'] values_ds.resize((values_ds.shape[0] + 1, values_ds.shape[1] + 5000)) diff --git a/benchmarks_install.py b/benchmarks_install.py deleted file mode 100644 index 131f1876..00000000 --- a/benchmarks_install.py +++ /dev/null @@ -1,83 +0,0 @@ -""" -This is the script used by asv run to install dependencies. It should not be -called directly. - -This is needed because we have to install specific versions of ndindex -depending on what commit we are on, because some backwards incompatible -changes in ndindex were made in tandem with corresponding commits in -versioned-hdf5. -""" - -import builtins -import sys -import os -import subprocess - -# The first commit in versioned-hdf5 that is not compatible with ndindex 1.5 -ndindex_16_commit = 'af9ba2313c73cf00c10f490407956ed3c0e6467e' - -def print(*args): - # If we don't flush stdout, print output is out of order with run() - # output in the asv run -v log. - builtins.print(*args) - sys.stdout.flush() - -def run(command, *args, **kwargs): - print(' '.join(command)) - kwargs.setdefault('check', True) - return subprocess.run(command, *args, **kwargs) - -def main(): - commit, env_dir, build_dir = sys.argv[1:] - - copy_env_dir(env_dir, commit) - install_dependencies(commit, env_dir) - - install_versioned_hdf5(build_dir) - -def copy_env_dir(env_dir, commit): - # asv reuses the env dir between runs. But it's simpler for us if we just - # restart from scratch, rather than trying to build an uninstall script. - # So what we do is copy the raw env dir into a template directory, then - # each time we install, we replace the env dir with that template - # directory. - template_dir = env_dir + '-template' - if not os.path.exists(template_dir): - # This is the first time we've run - print("Creating template env directory", template_dir) - run(['cp', '-R', env_dir, template_dir]) - run(['rm', '-rf', env_dir]) - run(['cp', '-R', template_dir, env_dir]) - # asv checks out the project in the env directory, which we just reset. So - # checkout it out to the correct commit. - os.chdir(os.path.join(env_dir, 'project')) - run(['git', 'checkout', commit]) - os.chdir(env_dir) - -def install_dependencies(commit, env_dir): - # Check if HEAD is after the ndindex_16_commit. - # See https://stackoverflow.com/questions/3005392/how-can-i-tell-if-one-commit-is-a-descendant-of-another-commit - p = run(['git', 'merge-base', '--is-ancestor', ndindex_16_commit, commit], - check=False) - if p.returncode == 1: - print("Installing ndindex 1.5") - install(env_dir, ndindex_version='==1.5') - elif p.returncode == 0: - print("Installing ndindex >=1.5.1") - install(env_dir, ndindex_version='>=1.5.1') - else: - raise RuntimeError(f"Error checking commit history for benchmarks install (git gave return code {p.returncode})") - -def install_versioned_hdf5(build_dir): - print("Installing versioned HDF5") - run(['python', '-m', 'pip', 'install', build_dir]) - -def install(env_dir, ndindex_version='>=1.5', h5py_version='<3'): - deps = [ - 'h5py' + h5py_version, - 'ndindex' + ndindex_version, - ] - run(['conda', 'install', '-c', 'conda-forge', '-p', env_dir, *deps], check=True) - -if __name__ == '__main__': - main() diff --git a/docs/index.rst b/docs/index.rst index e53b0fcf..46bfd19f 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -18,8 +18,7 @@ Versioned HDF5 provides a versioned abstraction on top of `h5py `_. +Benchmarks for Versioned HDF5 are published `here <../benchmarks/index.html>`_. Source ------ diff --git a/pyproject.toml b/pyproject.toml index 0ddd495b..7bd17079 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -99,6 +99,7 @@ ignore = [ dev = ["pre-commit>=3.6.0", 'cython', 'meson-python', 'setuptools-scm'] test = ["pytest", "pytest-env", "hypothesis", "packaging"] doc = ["sphinx", "sphinx-multiversion", "myst-parser"] +bench = ["asv"] [tool.setuptools_scm] diff --git a/update_benchmarks.sh b/update_benchmarks.sh deleted file mode 100755 index 935e62d8..00000000 --- a/update_benchmarks.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/bash -# Run and update the benchmarks in the gh-pages branch. The resulting -# benchmarks are served at -# https://deshaw.github.io/versioned-hdf5/benchmarks/index.html -set -e -set -x - -asv run -k -e ALL -asv publish -git checkout gh-pages -git pull -rm -rf benchmarks -cp -R .asv/html benchmarks -git add benchmarks -git commit -m "Update benchmarks" -git checkout - -git push origin gh-pages