Skip to content

Commit

Permalink
deprecate asdf.open(copy_arrays=True) in favor of `asdf.open(memmap…
Browse files Browse the repository at this point in the history
…=False)` (#157)

* deprecate `asdf.open(copy_arrays=True)` in favor of `asdf.open(memmap=False)`

* fix incorrect boolean flip

* pin asdf>=3.1.0

* Drop Python 3.8 to support asdf 3.1. Add changelog.

* hod: remove unneeded flag

* [pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci

---------

Co-authored-by: Lehman Garrison <[email protected]>
Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
  • Loading branch information
3 people authored Nov 1, 2024
1 parent 6e18f79 commit 65ab028
Show file tree
Hide file tree
Showing 10 changed files with 23 additions and 18 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ jobs:
}}
strategy:
matrix:
python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"]
python-version: ["3.9", "3.10", "3.11", "3.12"]

steps:
- uses: actions/checkout@v4
Expand Down
5 changes: 4 additions & 1 deletion CHANGES.rst
Original file line number Diff line number Diff line change
@@ -1,14 +1,17 @@
Changelog
=========

2.0.2 (upcoming)
2.1.0 (upcoming)
----------------

Supported Python versions are 3.9-3.12.

Enhancements
~~~~~~~~~~~~
- ``hod.prepare_sim``: detect and report when a ``prepare_slab`` subprocess fails [#151]
- ci: add python 3.12 and enhance ``NUMBA_DISABLE_JIT`` support [#153]
- Improve CompaSO subsample loading [#154]
- deprecate ``asdf.open(copy_arrays=True)`` in favor of ``asdf.open(memmap=False)`` [#157]

2.0.1 (2024-03-01)
------------------
Expand Down
10 changes: 5 additions & 5 deletions abacusnbody/data/compaso_halo_catalog.py
Original file line number Diff line number Diff line change
Expand Up @@ -566,9 +566,9 @@ def _read_halo_info(

# Open all the files, validate them, and count the halos
# Lazy load, but don't use mmap
afs = [asdf.open(hfn, lazy_load=True, copy_arrays=True) for hfn in halo_fns]
afs = [asdf.open(hfn, lazy_load=True, memmap=False) for hfn in halo_fns]
cleaned_afs = [
asdf.open(hfn, lazy_load=True, copy_arrays=True) for hfn in cleaned_fns
asdf.open(hfn, lazy_load=True, memmap=False) for hfn in cleaned_fns
]

N_halo_per_file = np.array(
Expand Down Expand Up @@ -1038,7 +1038,7 @@ def _load_subsamples(
/ 'cleaned_rvpid'
/ f'cleaned_rvpid_{i:03d}.asdf',
lazy_load=True,
copy_arrays=True,
memmap=False,
)
for i in self.superslab_inds
]
Expand All @@ -1052,7 +1052,7 @@ def _load_subsamples(
/ f'halo_{rvpid}_{AB}'
/ f'halo_{rvpid}_{AB}_{self.superslab_inds[i]:03d}.asdf'
)
with asdf.open(fn, lazy_load=True, copy_arrays=True) as af:
with asdf.open(fn, lazy_load=True, memmap=False) as af:
slab_particles = af[self.data_key][colname][:]
if cleaned:
clean_af = clean_afs[i]
Expand Down Expand Up @@ -1282,7 +1282,7 @@ def _load_halo_lc_subsamples(self, which=['pos', 'vel', 'pid'], unpack_bits=Fals

fn = Path(self.groupdir) / 'lc_pid_rv.asdf'

with asdf.open(fn, lazy_load=True, copy_arrays=True) as af:
with asdf.open(fn, lazy_load=True, memmap=False) as af:
for w in which:
self.subsamples.add_column(af[self.data_key][w][:], name=w, copy=False)

Expand Down
2 changes: 1 addition & 1 deletion abacusnbody/data/pipe_asdf.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,7 @@ def unpack_to_pipe(
raise FileNotFoundError(fn)
afs = []
for fn in asdf_fns:
afs += [asdf.open(fn, mode='r', copy_arrays=True, lazy_load=True)]
afs += [asdf.open(fn, mode='r', memmap=False, lazy_load=True)]
for af in afs:
for field in fields:
if field not in af.tree[data_key]:
Expand Down
2 changes: 1 addition & 1 deletion abacusnbody/data/read_abacus.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ def read_asdf(fn, load=None, colname=None, dtype=np.float32, verbose=True, **kwa
data_key = kwargs.get('data_key', ASDF_DATA_KEY)
header_key = kwargs.get('header_key', ASDF_HEADER_KEY)

with asdf.open(fn, lazy_load=True, copy_arrays=True) as af:
with asdf.open(fn, lazy_load=True, memmap=False) as af:
if colname is None:
_colnames = ['rvint', 'pack9', 'packedpid', 'pid']
for cn in _colnames:
Expand Down
2 changes: 1 addition & 1 deletion abacusnbody/hod/abacus_hod.py
Original file line number Diff line number Diff line change
Expand Up @@ -273,7 +273,7 @@ def staging(self):
sim_dir / simname / 'halos' / ('z%4.3f' % self.z_mock) / 'halo_info'
).glob('*.asdf')
)
f = asdf.open(halo_info_fns[0], lazy_load=True, copy_arrays=False)
f = asdf.open(halo_info_fns[0], lazy_load=True)
header = f['header']

# constants
Expand Down
7 changes: 4 additions & 3 deletions abacusnbody/hod/zcv/ic_fields.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,9 +44,10 @@ def compress_asdf(asdf_fn, table, header):
blosc_block_size=3 * 1024**2,
nthreads=4,
)
with asdf.AsdfFile(data_tree) as af, open(
asdf_fn, 'wb'
) as fp: # where data_tree is the ASDF dict tree structure
with (
asdf.AsdfFile(data_tree) as af,
open(asdf_fn, 'wb') as fp,
): # where data_tree is the ASDF dict tree structure
af.write_to(
fp, all_array_compression='blsc', compression_kwargs=compression_kwargs
)
Expand Down
5 changes: 3 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ dependencies = [
'astropy>=4.0.0',
'scipy>=1.5.0',
'numba>=0.56',
'asdf>=2.8',
'asdf>=3.1.0',
'h5py',
'pyyaml',
'msgpack>=1',
Expand All @@ -31,7 +31,7 @@ dependencies = [
"importlib_resources; python_version<'3.9'",
]
readme = "README.md"
requires-python = ">=3.7"
requires-python = ">=3.9"
urls = {Repository = "https://github.com/abacusorg/abacusutils"}
classifiers = [
"Programming Language :: Python :: 3",
Expand All @@ -49,6 +49,7 @@ abacusutils = "abacusnbody.data.asdf:AbacusExtension"
all = [
'zenbu-fftw>=1',
'classy',
'numpy<2', # for classy
'Corrfunc>=2',
'emcee',
'schwimmbad',
Expand Down
2 changes: 1 addition & 1 deletion scripts/metadata/compress.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ def compress(fn, rmstate, rmpk, dopickle, domsgpack, dojson):
"""Compress metadata file FN"""
fn = Path(fn)

with asdf.open(fn, copy_arrays=True, lazy_load=False) as af:
with asdf.open(fn, memmap=False, lazy_load=False) as af:
meta = dict(af.tree)

del meta['history'], meta['asdf_library']
Expand Down
4 changes: 2 additions & 2 deletions scripts/metadata/gather_metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ def main(small=False):
for zdir in (sim / 'halos').glob('z*'):
try:
afn = next(zdir.glob('*/*.asdf')) # any asdf file
with asdf.open(afn, lazy_load=True, copy_arrays=True) as af:
with asdf.open(afn, lazy_load=True, memmap=False) as af:
zheader = af['header'].copy()
except StopIteration:
# maybe a header?
Expand All @@ -65,7 +65,7 @@ def main(small=False):
_icdir = icdir

with asdf.open(
_icdir / sim.name / 'ic_dens_N576.asdf', lazy_load=True, copy_arrays=True
_icdir / sim.name / 'ic_dens_N576.asdf', lazy_load=True, memmap=False
) as af:
icparam = af['header'].copy()
class_pk = af['CLASS_power_spectrum'].copy()
Expand Down

0 comments on commit 65ab028

Please sign in to comment.