[Yt-svn] yt: Some minor fixes to the various frontends, and preliminary s...

hg at spacepope.org hg at spacepope.org
Wed Sep 1 23:13:40 PDT 2010


hg Repository: yt
details:   yt/rev/48274493af4e
changeset: 3366:48274493af4e
user:      Matthew Turk <matthewturk at gmail.com>
date:
Wed Sep 01 23:13:35 2010 -0700
description:
Some minor fixes to the various frontends, and preliminary support for
particles in FLASH.

The particle support in FLASH is incredibly primitive in this first pass, and I
don't recommend using it yet.

diffstat:

 yt/frontends/flash/data_structures.py  |  34 +++++++++++++---
 yt/frontends/flash/io.py               |  42 +++++++++++++++++++--
 yt/frontends/ramses/data_structures.py |   2 +
 3 files changed, 67 insertions(+), 11 deletions(-)

diffs (156 lines):

diff -r 08c621951f4d -r 48274493af4e yt/frontends/flash/data_structures.py
--- a/yt/frontends/flash/data_structures.py	Wed Sep 01 23:22:39 2010 -0400
+++ b/yt/frontends/flash/data_structures.py	Wed Sep 01 23:13:35 2010 -0700
@@ -35,7 +35,10 @@
     AMRHierarchy
 from yt.data_objects.static_output import \
     StaticOutput
-from yt.utilities.definitions import mpc_conversion
+from yt.utilities.definitions import \
+    mpc_conversion
+from yt.utilities.io_handler import \
+    io_registry
 
 from .fields import \
     FLASHFieldContainer, \
@@ -81,6 +84,8 @@
     def _detect_fields(self):
         ncomp = self._handle["/unknown names"].shape[0]
         self.field_list = [s.strip() for s in self._handle["/unknown names"][:].flat]
+        self.field_list += ["particle_" + s[0].strip() for s
+                            in self._handle["/particle names"][:]]
     
     def _setup_classes(self):
         dd = self._get_data_reader_dict()
@@ -109,11 +114,21 @@
             nxb, nyb, nzb = [int(f["/simulation parameters"]['n%sb' % ax])
                               for ax in 'xyz']
         self.grid_dimensions[:] *= (nxb, nyb, nzb)
-        # particle count will need to be fixed somehow:
-        #   by getting access to the particle file we can get the number of
-        #   particles in each brick.  but how do we handle accessing the
-        #   particle file?
-
+        # particle_count is harder.  We stride over the particle file and count
+        # up per grid.
+        npart = f["/tracer particles"].shape[0]
+        blki = [s[0].strip() for s in f["/particle names"][:]].index("blk")
+        start = 0
+        stride = 1e6
+        while start < npart:
+            end = min(start + stride - 1, npart)
+            blks = f["/tracer particles"][start:end,blki].astype("int64")
+            ta = na.bincount(blks)
+            old_size = ta.size
+            ta = na.resize(ta, self.num_grids)
+            ta[old_size:] = 0
+            self.grid_particle_count += ta[:,None]
+            start = end
         # This will become redundant, as _prepare_grid will reset it to its
         # current value.  Note that FLASH uses 1-based indexing for refinement
         # levels, but we do not, so we reduce the level by 1.
@@ -141,6 +156,7 @@
     def _setup_unknown_fields(self):
         for field in self.field_list:
             if field in self.parameter_file.field_info: continue
+            pfield = field.startswith("particle_")
             mylog.info("Adding %s to list of fields", field)
             cf = None
             if self.parameter_file.has_key(field):
@@ -150,11 +166,15 @@
                     return _convert_function
                 cf = external_wrapper(field)
             add_field(field, lambda a, b: None,
-                      convert_function=cf, take_log=False)
+                      convert_function=cf, take_log=False,
+                      particle_type=pfield)
 
     def _setup_derived_fields(self):
         self.derived_field_list = []
 
+    def _setup_data_io(self):
+        self.io = io_registry[self.data_style](self.parameter_file)
+
 class FLASHStaticOutput(StaticOutput):
     _hierarchy_class = FLASHHierarchy
     _fieldinfo_class = FLASHFieldContainer
diff -r 08c621951f4d -r 48274493af4e yt/frontends/flash/io.py
--- a/yt/frontends/flash/io.py	Wed Sep 01 23:22:39 2010 -0400
+++ b/yt/frontends/flash/io.py	Wed Sep 01 23:13:35 2010 -0700
@@ -23,26 +23,60 @@
   along with this program.  If not, see <http://www.gnu.org/licenses/>.
 """
 
+import numpy as na
 import h5py
 
 from yt.utilities.io_handler import \
     BaseIOHandler
 
 class IOHandlerFLASH(BaseIOHandler):
+    _particle_reader = False
     _data_style = "flash_hdf5"
 
-    def __init__(self, *args, **kwargs):
+    def __init__(self, pf, *args, **kwargs):
+        self._num_per_stride = kwargs.pop("num_per_stride", 1000000)
         BaseIOHandler.__init__(self, *args, **kwargs)
+        # Now we cache the particle fields
+        self.pf = pf
+        self._handle = h5py.File(self.pf.parameter_filename, "r")
+        particle_fields = [s[0].strip() for s in
+                           self._handle["/particle names"][:]]
+        self._particle_fields = dict([("particle_" + s, i) for i, s in
+                                     enumerate(particle_fields)])
+
+    def _read_particles(self, fields_to_read, type, args, grid_list,
+            count_list, conv_factors):
+        pass
+
+    def _select_particles(self, grid, field):
+        f = self._handle
+        npart = f["/tracer particles"].shape[0]
+        total_selected = 0
+        start = 0
+        stride = 1e6
+        blki = self._particle_fields["particle_blk"]
+        bi = grid.id - grid._id_offset
+        fi = self._particle_fields[field]
+        tr = []
+        while start < npart:
+            end = min(start + stride - 1, npart)
+            gi = f["/tracer particles"][start:end,blki] == bi
+            tr.append(f["/tracer particles"][gi,fi])
+            start = end
+        return na.concatenate(tr)
 
     def _read_data_set(self, grid, field):
-        f = h5py.File(grid.pf.parameter_filename, "r")
-        tr = f["/%s" % field][grid.id - grid._id_offset,:,:,:].transpose()
+        f = self._handle
+        if field in self._particle_fields:
+            tr = self._select_particles(grid, field)
+        else:
+            tr = f["/%s" % field][grid.id - grid._id_offset,:,:,:].transpose()
         return tr.astype("float64")
 
     def _read_data_slice(self, grid, field, axis, coord):
         sl = [slice(None), slice(None), slice(None)]
         sl[axis] = slice(coord, coord + 1)
-        f = h5py.File(grid.pf.parameter_filename, "r")
+        f = self._handle
         tr = f["/%s" % field][grid.id - grid._id_offset].transpose()[sl]
         return tr.astype("float64")
 
diff -r 08c621951f4d -r 48274493af4e yt/frontends/ramses/data_structures.py
--- a/yt/frontends/ramses/data_structures.py	Wed Sep 01 23:22:39 2010 -0400
+++ b/yt/frontends/ramses/data_structures.py	Wed Sep 01 23:13:35 2010 -0700
@@ -32,6 +32,8 @@
       StaticOutput
 import _ramses_reader
 from .fields import RAMSESFieldContainer
+from yt.utilities.io_handler import \
+    io_registry
 
 def num_deep_inc(f):
     def wrap(self, *args, **kwargs):



More information about the yt-svn mailing list