From 2519f0f7ef13273169299ca8e213cc1241ca62ad Mon Sep 17 00:00:00 2001 From: Matthew Turk Date: Sun, 19 Jul 2020 14:13:35 -0500 Subject: [PATCH] Changing to a property --- yt/frontends/flash/io.py | 9 ++++++--- yt/frontends/sdf/io.py | 4 ++-- yt/frontends/tipsy/io.py | 10 +++++----- yt/geometry/particle_geometry_handler.py | 18 ++++++++++-------- 4 files changed, 23 insertions(+), 18 deletions(-) diff --git a/yt/frontends/flash/io.py b/yt/frontends/flash/io.py index e72ca882750..fdcc66cf11c 100644 --- a/yt/frontends/flash/io.py +++ b/yt/frontends/flash/io.py @@ -164,7 +164,10 @@ def __init__(self, ds): self._position_fields = [ self._particle_fields["particle_pos%s" % ax] for ax in "xyz" ] - self._chunksize = 32 ** 3 + + @property + def chunksize(self): + return 32 ** 3 def _read_fluid_selection(self, chunks, selector, fields, size): raise NotImplementedError @@ -224,7 +227,7 @@ def _initialize_index(self, data_file, regions): morton = np.empty(pcount, dtype="uint64") ind = 0 while ind < pcount: - npart = min(self._chunksize, pcount - ind) + npart = min(self.chunksize, pcount - ind) pos = np.empty((npart, 3), dtype="=f8") pos[:, 0] = p_fields[ind : ind + npart, px] pos[:, 1] = p_fields[ind : ind + npart, py] @@ -237,7 +240,7 @@ def _initialize_index(self, data_file, regions): data_file.ds.domain_left_edge, data_file.ds.domain_right_edge, ) - ind += self._chunksize + ind += self.chunksize return morton _pcount = None diff --git a/yt/frontends/sdf/io.py b/yt/frontends/sdf/io.py index b2ef9306333..f1e5795365f 100644 --- a/yt/frontends/sdf/io.py +++ b/yt/frontends/sdf/io.py @@ -65,7 +65,7 @@ def _initialize_index(self, data_file, regions): morton = np.empty(pcount, dtype="uint64") ind = 0 while ind < pcount: - npart = min(self.ds.index._chunksize, pcount - ind) + npart = min(self.ds.index.chunksize, pcount - ind) pos = np.empty((npart, 3), dtype=x.dtype) pos[:, 0] = x[ind : ind + npart] pos[:, 1] = y[ind : ind + npart] @@ -78,7 +78,7 @@ def _initialize_index(self, data_file, regions): data_file.ds.domain_left_edge, data_file.ds.domain_right_edge, ) - ind += self.ds.index._chunksize + ind += self.ds.index.chunksize return morton def _identify_fields(self, data_file): diff --git a/yt/frontends/tipsy/io.py b/yt/frontends/tipsy/io.py index bc369251157..a6be6b9e407 100644 --- a/yt/frontends/tipsy/io.py +++ b/yt/frontends/tipsy/io.py @@ -90,7 +90,7 @@ def _read_particle_coords(self, chunks, ptf): for chunk in chunks: for obj in chunk.objs: data_files.update(obj.data_files) - chunksize = self.ds.index._chunksize + chunksize = self.ds.index.chunksize for data_file in sorted(data_files, key=lambda x: (x.filename, x.start)): poff = data_file.field_offsets tp = data_file.total_particles @@ -176,7 +176,7 @@ def _read_particle_fields(self, chunks, ptf, selector): continue f.seek(poff[ptype]) afields = list(set(field_list).intersection(self._aux_fields)) - count = min(self.ds.index._chunksize, tp[ptype]) + count = min(self.ds.index.chunksize, tp[ptype]) p = np.fromfile(f, self._pdtypes[ptype], count=count) auxdata = [] for afield in afields: @@ -252,7 +252,7 @@ def _update_domain(self, data_file): continue stop = ind + count while ind < stop: - c = min(self.ds.index._chunksize, stop - ind) + c = min(self.ds.index.chunksize, stop - ind) pp = np.fromfile(f, dtype=self._pdtypes[ptype], count=c) np.minimum( mi, @@ -467,10 +467,10 @@ def _calculate_particle_offsets_aux(self, data_file): for i, ptype in enumerate(self._ptypes): if data_file.total_particles[ptype] == 0: continue - elif params[npart_mapping[ptype]] > self.ds.index._chunksize: + elif params[npart_mapping[ptype]] > self.ds.index.chunksize: for j in range(i): npart = params[npart_mapping[self._ptypes[j]]] - if npart > self.ds.index._chunksize: + if npart > self.ds.index.chunksize: pos += npart * size pos += data_file.start * size aux_fields_offsets[afield][ptype] = pos diff --git a/yt/geometry/particle_geometry_handler.py b/yt/geometry/particle_geometry_handler.py index c6729f392f9..4a0769c0007 100644 --- a/yt/geometry/particle_geometry_handler.py +++ b/yt/geometry/particle_geometry_handler.py @@ -13,12 +13,9 @@ from yt.utilities.lib.fnv_hash import fnv_hash from yt.utilities.logger import ytLogger as mylog -CHUNKSIZE = 64 ** 3 - class ParticleIndex(Index): """The Index subclass for particle datasets""" - _chunksize = 64**3 def __init__(self, ds, dataset_type): self.dataset_type = dataset_type @@ -46,6 +43,11 @@ def _get_particle_type_counts(self): def convert(self, unit): return self.dataset.conversion_factors[unit] + @property + def chunksize(self): + # This can be overridden in subclasses + return 64 ** 3 + def _setup_filenames(self): template = self.dataset.filename_template ndoms = self.dataset.file_count @@ -54,20 +56,20 @@ def _setup_filenames(self): fi = 0 for i in range(int(ndoms)): start = 0 - if self._chunksize > 0: - end = start + self._chunksize + if self.chunksize > 0: + end = start + self.chunksize else: end = None while True: - df = cls(self.dataset, self.io, template % {'num':i}, fi, (start, end)) + df = cls(self.dataset, self.io, template % {"num": i}, fi, (start, end)) if max(df.total_particles.values()) == 0: break fi += 1 self.data_files.append(df) - if self._chunksize <= 0: + if self.chunksize <= 0: break start = end - end += self._chunksize + end += self.chunksize self.total_particles = sum( sum(d.total_particles.values()) for d in self.data_files )