[yt-svn] commit/yt: ngoldbaum: Merged in jzuhone/yt (pull request #2157)

commits-noreply at bitbucket.org commits-noreply at bitbucket.org
Wed May 18 11:11:22 PDT 2016


1 new commit in yt:

https://bitbucket.org/yt_analysis/yt/commits/506d890f8554/
Changeset:   506d890f8554
Branch:      yt
User:        ngoldbaum
Date:        2016-05-18 18:11:15+00:00
Summary:     Merged in jzuhone/yt (pull request #2157)

FLASHParticleDataset
Affected #:  6 files

diff -r f562b7d66268e013dbd227ce912cba0137f9ef2f -r 506d890f8554f9d7c95480410f8add32981b7e7f doc/source/examining/loading_data.rst
--- a/doc/source/examining/loading_data.rst
+++ b/doc/source/examining/loading_data.rst
@@ -775,32 +775,38 @@
 ----------
 
 FLASH HDF5 data is *mostly* supported and cared for by John ZuHone.  To load a
-FLASH dataset, you can use the ``yt.load`` command and provide it the file name of a plot file or checkpoint file, but particle
-files are not currently directly loadable by themselves, due to the fact that
-they typically lack grid information. For instance, if you were in a directory
-with the following files:
+FLASH dataset, you can use the ``yt.load`` command and provide it the file name of 
+a plot file, checkpoint file, or particle file. Particle files require special handling
+depending on the situation, the main issue being that they typically lack grid information. 
+The first case is when you have a plotfile and a particle file that you would like to 
+load together. In the simplest case, this occurs automatically. For instance, if you
+were in a directory with the following files:
 
 .. code-block:: none
 
-   cosmoSim_coolhdf5_chk_0026
+   radio_halo_1kpc_hdf5_plt_cnt_0100 # plotfile
+   radio_halo_1kpc_hdf5_part_0100 # particle file
 
-You would feed it the filename ``cosmoSim_coolhdf5_chk_0026``:
+where the plotfile and the particle file were created at the same time (therefore having 
+particle data consistent with the grid structure of the former). Notice also that the 
+prefix ``"radio_halo_1kpc_"`` and the file number ``100`` are the same. In this special case,
+the particle file will be loaded automatically when ``yt.load`` is called on the plotfile.
+This also works when loading a number of files in a time series.
 
-.. code-block:: python
-
-   import yt
-   ds = yt.load("cosmoSim_coolhdf5_chk_0026")
-
-If you have a FLASH particle file that was created at the same time as
-a plotfile or checkpoint file (therefore having particle data
-consistent with the grid structure of the latter), its data may be loaded with the
-``particle_filename`` optional argument:
+If the two files do not have the same prefix and number, but they nevertheless have the same
+grid structure and are at the same simulation time, the particle data may be loaded with the
+``particle_filename`` optional argument to ``yt.load``:
 
 .. code-block:: python
 
     import yt
     ds = yt.load("radio_halo_1kpc_hdf5_plt_cnt_0100", particle_filename="radio_halo_1kpc_hdf5_part_0100")
 
+However, if you don't have a corresponding plotfile for a particle file, but would still 
+like to load the particle data, you can still call ``yt.load`` on the file. However, the 
+grid information will not be available, and the particle data will be loaded in a fashion
+similar to SPH data. 
+
 .. rubric:: Caveats
 
 * Please be careful that the units are correctly utilized; yt assumes cgs by default, but conversion to

diff -r f562b7d66268e013dbd227ce912cba0137f9ef2f -r 506d890f8554f9d7c95480410f8add32981b7e7f tests/tests.yaml
--- a/tests/tests.yaml
+++ b/tests/tests.yaml
@@ -14,7 +14,7 @@
   local_fits_000:
     - yt/frontends/fits/tests/test_outputs.py
 
-  local_flash_000:
+  local_flash_001:
     - yt/frontends/flash/tests/test_outputs.py
 
   local_gadget_000:

diff -r f562b7d66268e013dbd227ce912cba0137f9ef2f -r 506d890f8554f9d7c95480410f8add32981b7e7f yt/frontends/flash/api.py
--- a/yt/frontends/flash/api.py
+++ b/yt/frontends/flash/api.py
@@ -16,12 +16,14 @@
 from .data_structures import \
       FLASHGrid, \
       FLASHHierarchy, \
-      FLASHDataset
+      FLASHDataset, \
+      FLASHParticleDataset
 
 from .fields import \
       FLASHFieldInfo
 
 from .io import \
-      IOHandlerFLASH
+      IOHandlerFLASH, \
+      IOHandlerFLASHParticle
 
 from . import tests

diff -r f562b7d66268e013dbd227ce912cba0137f9ef2f -r 506d890f8554f9d7c95480410f8add32981b7e7f yt/frontends/flash/data_structures.py
--- a/yt/frontends/flash/data_structures.py
+++ b/yt/frontends/flash/data_structures.py
@@ -18,13 +18,15 @@
 import numpy as np
 import weakref
 
-from yt.funcs import mylog
 from yt.data_objects.grid_patch import \
     AMRGridPatch
+from yt.data_objects.static_output import \
+    Dataset, ParticleFile
+from yt.funcs import mylog
 from yt.geometry.grid_geometry_handler import \
     GridIndex
-from yt.data_objects.static_output import \
-    Dataset
+from yt.geometry.particle_geometry_handler import \
+    ParticleIndex
 from yt.utilities.file_handler import \
     HDF5FileHandler
 from yt.utilities.physical_ratios import cm_per_mpc
@@ -251,8 +253,6 @@
         self.time_unit = self.quan(1.0, "s")
         self.velocity_unit = self.quan(1.0, "cm/s")
         self.temperature_unit = self.quan(temperature_factor, "K")
-        # Still need to deal with:
-        #self.conversion_factors['temp'] = (1.0 + self.current_redshift)**-2.0
         self.unit_registry.modify("code_magnetic", self.magnetic_unit)
         
     def set_code_units(self):
@@ -437,3 +437,52 @@
 
     def close(self):
         self._handle.close()
+
+class FLASHParticleFile(ParticleFile):
+    pass
+
+class FLASHParticleDataset(FLASHDataset):
+    _index_class = ParticleIndex
+    over_refine_factor = 1
+    filter_bbox = False
+    _file_class = FLASHParticleFile
+
+    def __init__(self, filename, dataset_type='flash_particle_hdf5',
+                 storage_filename = None,
+                 units_override = None,
+                 n_ref = 64, unit_system = "cgs"):
+
+        if self._handle is not None: return
+        self._handle = HDF5FileHandler(filename)
+        self.n_ref = n_ref
+        self.refine_by = 2
+        Dataset.__init__(self, filename, dataset_type, units_override=units_override,
+                         unit_system=unit_system)
+        self.storage_filename = storage_filename
+
+    def _parse_parameter_file(self):
+        # Let the superclass do all the work but then
+        # fix the domain dimensions
+        super(FLASHParticleDataset, self)._parse_parameter_file()
+        nz = 1 << self.over_refine_factor
+        self.domain_dimensions = np.zeros(3, "int32")
+        self.domain_dimensions[:self.dimensionality] = nz
+        self.filename_template = self.parameter_filename
+        self.file_count = 1
+
+    @classmethod
+    def _is_valid(self, *args, **kwargs):
+        try:
+            fileh = HDF5FileHandler(args[0])
+            if "bounding box" not in fileh["/"].keys() \
+                and "localnp" in fileh["/"].keys():
+                return True
+        except IOError:
+            pass
+        return False
+
+    @classmethod
+    def _guess_candidates(cls, base, directories, files):
+        candidates = [_ for _ in files if "_hdf5_part_" in _]
+        # Typically, Flash won't have nested outputs.
+        return candidates, (len(candidates) == 0)

diff -r f562b7d66268e013dbd227ce912cba0137f9ef2f -r 506d890f8554f9d7c95480410f8add32981b7e7f yt/frontends/flash/io.py
--- a/yt/frontends/flash/io.py
+++ b/yt/frontends/flash/io.py
@@ -20,6 +20,8 @@
     BaseIOHandler
 from yt.utilities.logger import ytLogger as mylog
 from yt.geometry.selection_routines import AlwaysSelector
+from yt.utilities.lib.geometry_utils import \
+    compute_morton
 
 # http://stackoverflow.com/questions/2361945/detecting-consecutive-integers-in-a-list
 def particle_sequences(grids):
@@ -34,6 +36,16 @@
         seq = list(v[1] for v in g)
         yield seq
 
+def determine_particle_fields(handle):
+    try:
+        particle_fields = [s[0].decode("ascii","ignore").strip()
+                           for s in handle["/particle names"][:]]
+        _particle_fields = dict([("particle_" + s, i) for i, s in
+                                 enumerate(particle_fields)])
+    except KeyError:
+        _particle_fields = {}
+    return _particle_fields
+
 class IOHandlerFLASH(BaseIOHandler):
     _particle_reader = False
     _dataset_type = "flash_hdf5"
@@ -43,15 +55,7 @@
         # Now we cache the particle fields
         self._handle = ds._handle
         self._particle_handle = ds._particle_handle
-        
-        try :
-            particle_fields = [s[0].decode("ascii","ignore").strip()
-                               for s in
-                               self._particle_handle["/particle names"][:]]
-            self._particle_fields = dict([("particle_" + s, i) for i, s in
-                                          enumerate(particle_fields)])
-        except KeyError:
-            self._particle_fields = {}
+        self._particle_fields = determine_particle_fields(self._particle_handle)
 
     def _read_particles(self, fields_to_read, type, args, grid_list,
             count_list, conv_factors):
@@ -154,3 +158,96 @@
                     rv[g.id][field] = np.asarray(data[...,i], "=f8")
         return rv
 
+class IOHandlerFLASHParticle(BaseIOHandler):
+    _particle_reader = True
+    _dataset_type = "flash_particle_hdf5"
+
+    def __init__(self, ds):
+        super(IOHandlerFLASHParticle, self).__init__(ds)
+        # Now we cache the particle fields
+        self._handle = ds._handle
+        self._particle_fields = determine_particle_fields(self._handle)
+        self._position_fields = [self._particle_fields["particle_pos%s" % ax]
+                                 for ax in 'xyz']
+        self._chunksize = 32**3
+
+    def _read_fluid_selection(self, chunks, selector, fields, size):
+        raise NotImplementedError
+
+    def _read_particle_coords(self, chunks, ptf):
+        chunks = list(chunks)
+        data_files = set([])
+        assert(len(ptf) == 1)
+        for chunk in chunks:
+            for obj in chunk.objs:
+                data_files.update(obj.data_files)
+        px, py, pz = self._position_fields
+        p_fields = self._handle["/tracer particles"]
+        assert(len(data_files) == 1)
+        for data_file in sorted(data_files):
+            pcount = self._count_particles(data_file)["io"]
+            for ptype, field_list in sorted(ptf.items()):
+                total = 0
+                while total < pcount:
+                    count = min(self._chunksize, pcount - total)
+                    x = np.asarray(p_fields[total:total+count, px], dtype="=f8")
+                    y = np.asarray(p_fields[total:total+count, py], dtype="=f8")
+                    z = np.asarray(p_fields[total:total+count, pz], dtype="=f8")
+                    total += count
+                    yield ptype, (x, y, z)
+
+    def _read_particle_fields(self, chunks, ptf, selector):
+        chunks = list(chunks)
+        data_files = set([])
+        assert(len(ptf) == 1)
+        for chunk in chunks:
+            for obj in chunk.objs:
+                data_files.update(obj.data_files)
+        px, py, pz = self._position_fields
+        p_fields = self._handle["/tracer particles"]
+        assert(len(data_files) == 1)
+        for data_file in sorted(data_files):
+            pcount = self._count_particles(data_file)["io"]
+            for ptype, field_list in sorted(ptf.items()):
+                total = 0
+                while total < pcount:
+                    count = min(self._chunksize, pcount - total)
+                    x = np.asarray(p_fields[total:total+count, px], dtype="=f8")
+                    y = np.asarray(p_fields[total:total+count, py], dtype="=f8")
+                    z = np.asarray(p_fields[total:total+count, pz], dtype="=f8")
+                    total += count
+                    mask = selector.select_points(x, y, z, 0.0)
+                    del x, y, z
+                    if mask is None: continue
+                    for field in field_list:
+                        fi = self._particle_fields[field]
+                        data = p_fields[total-count:total, fi]
+                        yield (ptype, field), data[mask]
+
+    def _initialize_index(self, data_file, regions):
+        p_fields = self._handle["/tracer particles"]
+        px, py, pz = self._position_fields
+        pcount = self._count_particles(data_file)["io"]
+        morton = np.empty(pcount, dtype='uint64')
+        ind = 0
+        while ind < pcount:
+            npart = min(self._chunksize, pcount - ind)
+            pos = np.empty((npart, 3), dtype="=f8")
+            pos[:,0] = p_fields[ind:ind+npart, px]
+            pos[:,1] = p_fields[ind:ind+npart, py]
+            pos[:,2] = p_fields[ind:ind+npart, pz]
+            regions.add_data_file(pos, data_file.file_id)
+            morton[ind:ind+npart] = \
+                compute_morton(pos[:,0], pos[:,1], pos[:,2],
+                               data_file.ds.domain_left_edge,
+                               data_file.ds.domain_right_edge)
+            ind += self._chunksize
+        return morton
+
+    def _count_particles(self, data_file):
+        pcount = {"io": self._handle["/localnp"][:].sum()}
+        return pcount
+
+    def _identify_fields(self, data_file):
+        fields = [("io", field) for field in self._particle_fields]
+        return fields, {}

diff -r f562b7d66268e013dbd227ce912cba0137f9ef2f -r 506d890f8554f9d7c95480410f8add32981b7e7f yt/frontends/flash/tests/test_outputs.py
--- a/yt/frontends/flash/tests/test_outputs.py
+++ b/yt/frontends/flash/tests/test_outputs.py
@@ -20,8 +20,11 @@
 from yt.utilities.answer_testing.framework import \
     requires_ds, \
     small_patch_amr, \
-    data_dir_load
-from yt.frontends.flash.api import FLASHDataset
+    data_dir_load, \
+    sph_answer
+from yt.frontends.flash.api import FLASHDataset, \
+    FLASHParticleDataset
+from collections import OrderedDict
 
 _fields = ("temperature", "density", "velocity_magnitude")
 
@@ -45,7 +48,6 @@
         test_wind_tunnel.__name__ = test.description
         yield test
 
-
 @requires_file(wt)
 def test_FLASHDataset():
     assert isinstance(data_dir_load(wt), FLASHDataset)
@@ -54,3 +56,28 @@
 def test_units_override():
     for test in units_override_check(sloshing):
         yield test
+
+fid_1to3_b1 = "fiducial_1to3_b1/fiducial_1to3_b1_hdf5_part_0080"
+
+fid_1to3_b1_fields = OrderedDict(
+    [
+        (("deposit", "all_density"), None),
+        (("deposit", "all_count"), None),
+        (("deposit", "all_cic"), None),
+        (("deposit", "all_cic_velocity_x"), ("deposit", "all_cic")),
+        (("deposit", "all_cic_velocity_y"), ("deposit", "all_cic")),
+        (("deposit", "all_cic_velocity_z"), ("deposit", "all_cic")),
+    ]
+)
+
+
+ at requires_file(fid_1to3_b1)
+def test_FLASHParticleDataset():
+    assert isinstance(data_dir_load(fid_1to3_b1), FLASHParticleDataset)
+
+ at requires_ds(fid_1to3_b1, big_data=True)
+def test_fid_1to3_b1():
+    ds = data_dir_load(fid_1to3_b1)
+    for test in sph_answer(ds, 'fiducial_1to3_b1_hdf5_part_0080', 6684119, fid_1to3_b1_fields):
+        test_fid_1to3_b1.__name__ = test.description
+        yield test

Repository URL: https://bitbucket.org/yt_analysis/yt/

--

This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://lists.spacepope.org/pipermail/yt-svn-spacepope.org/attachments/20160518/0798e280/attachment.html>


More information about the yt-svn mailing list