[yt-svn] commit/yt: 2 new changesets

Bitbucket commits-noreply at bitbucket.org
Wed Nov 14 13:18:46 PST 2012


2 new commits in yt:


https://bitbucket.org/yt_analysis/yt/changeset/547be7c09542/
changeset:   547be7c09542
branch:      yt
user:        jzuhone
date:        2012-11-14 21:59:48
summary:     Adding support for standalone particle files in FLASH, provided that they have a corresponding plotfile or checkpoint file that has a matching grid structure (in other words, both files written at the same simulation time).

One would access particle data in this fashion:

{{{
#!python

pf = load("radio_halo_1kpc_hdf5_plt_cnt_0120", particle_filename="radio_halo_1kpc_hdf5_part_0120")

}}}

Then the particle data may be accessed as if they were part of the main file.
affected #:  2 files

diff -r 917e70c630b36db1f9a821023a3a26b4c9454ac9 -r 547be7c0954206d1e7f969eba4547517d7156180 yt/frontends/flash/data_structures.py
--- a/yt/frontends/flash/data_structures.py
+++ b/yt/frontends/flash/data_structures.py
@@ -69,7 +69,8 @@
         self.hierarchy_filename = self.parameter_file.parameter_filename
         self.directory = os.path.dirname(self.hierarchy_filename)
         self._handle = pf._handle
-
+        self._particle_handle = pf._particle_handle
+        
         self.float_type = np.float64
         AMRHierarchy.__init__(self,pf,data_style)
 
@@ -79,9 +80,9 @@
     def _detect_fields(self):
         ncomp = self._handle["/unknown names"].shape[0]
         self.field_list = [s for s in self._handle["/unknown names"][:].flat]
-        if ("/particle names" in self._handle) :
+        if ("/particle names" in self._particle_handle) :
             self.field_list += ["particle_" + s[0].strip() for s
-                                in self._handle["/particle names"][:]]
+                                in self._particle_handle["/particle names"][:]]
     
     def _setup_classes(self):
         dd = self._get_data_reader_dict()
@@ -98,6 +99,7 @@
     def _parse_hierarchy(self):
         f = self._handle # shortcut
         pf = self.parameter_file # shortcut
+        f_part = self._particle_handle # shortcut
         
         # Initialize to the domain left / domain right
         ND = self.parameter_file.dimensionality
@@ -120,7 +122,7 @@
                               for ax in 'xyz']
         self.grid_dimensions[:] *= (nxb, nyb, nzb)
         try:
-            self.grid_particle_count[:] = f["/localnp"][:][:,None]
+            self.grid_particle_count[:] = f_part["/localnp"][:][:,None]
         except KeyError:
             self.grid_particle_count[:] = 0.0
         self._particle_indices = np.zeros(self.num_grids + 1, dtype='int64')
@@ -209,6 +211,7 @@
     
     def __init__(self, filename, data_style='flash_hdf5',
                  storage_filename = None,
+                 particle_filename = None, 
                  conversion_override = None):
 
         if self._handle is not None: return
@@ -216,6 +219,16 @@
         if conversion_override is None: conversion_override = {}
         self._conversion_override = conversion_override
 
+        self.particle_filename = particle_filename
+
+        if self.particle_filename is None :
+            self._particle_handle = self._handle
+        else :
+            try :
+                self._particle_handle = h5py.File(self.particle_filename, "r")
+            except :
+                raise IOError(self.particle_filename)
+                                                                
         StaticOutput.__init__(self, filename, data_style)
         self.storage_filename = storage_filename
 


diff -r 917e70c630b36db1f9a821023a3a26b4c9454ac9 -r 547be7c0954206d1e7f969eba4547517d7156180 yt/frontends/flash/io.py
--- a/yt/frontends/flash/io.py
+++ b/yt/frontends/flash/io.py
@@ -39,9 +39,11 @@
         # Now we cache the particle fields
         self.pf = pf
         self._handle = pf._handle
+        self._particle_handle = pf._particle_handle
+        
         try :
             particle_fields = [s[0].strip() for s in
-                               self._handle["/particle names"][:]]
+                               self._particle_handle["/particle names"][:]]
             self._particle_fields = dict([("particle_" + s, i) for i, s in
                                           enumerate(particle_fields)])
         except KeyError:
@@ -53,12 +55,13 @@
 
     def _read_data_set(self, grid, field):
         f = self._handle
+        f_part = self._particle_handle
         if field in self._particle_fields:
             if grid.NumberOfParticles == 0: return np.array([], dtype='float64')
             start = self.pf.h._particle_indices[grid.id - grid._id_offset]
             end = self.pf.h._particle_indices[grid.id - grid._id_offset + 1]
             fi = self._particle_fields[field]
-            tr = f["/tracer particles"][start:end, fi]
+            tr = f_part["/tracer particles"][start:end, fi]
         else:
             tr = f["/%s" % field][grid.id - grid._id_offset,:,:,:].transpose()
         return tr.astype("float64")



https://bitbucket.org/yt_analysis/yt/changeset/64ab22a8de33/
changeset:   64ab22a8de33
branch:      yt
user:        MatthewTurk
date:        2012-11-14 22:18:44
summary:     Merged in jzuhone/yt (pull request #342)
affected #:  2 files

diff -r 9fafae943a699a5e2740699f610048c57eea7b83 -r 64ab22a8de3385fbee60afd6927b5b25ad7541e4 yt/frontends/flash/data_structures.py
--- a/yt/frontends/flash/data_structures.py
+++ b/yt/frontends/flash/data_structures.py
@@ -69,7 +69,8 @@
         self.hierarchy_filename = self.parameter_file.parameter_filename
         self.directory = os.path.dirname(self.hierarchy_filename)
         self._handle = pf._handle
-
+        self._particle_handle = pf._particle_handle
+        
         self.float_type = np.float64
         AMRHierarchy.__init__(self,pf,data_style)
 
@@ -79,9 +80,9 @@
     def _detect_fields(self):
         ncomp = self._handle["/unknown names"].shape[0]
         self.field_list = [s for s in self._handle["/unknown names"][:].flat]
-        if ("/particle names" in self._handle) :
+        if ("/particle names" in self._particle_handle) :
             self.field_list += ["particle_" + s[0].strip() for s
-                                in self._handle["/particle names"][:]]
+                                in self._particle_handle["/particle names"][:]]
     
     def _setup_classes(self):
         dd = self._get_data_reader_dict()
@@ -98,6 +99,7 @@
     def _parse_hierarchy(self):
         f = self._handle # shortcut
         pf = self.parameter_file # shortcut
+        f_part = self._particle_handle # shortcut
         
         # Initialize to the domain left / domain right
         ND = self.parameter_file.dimensionality
@@ -120,7 +122,7 @@
                               for ax in 'xyz']
         self.grid_dimensions[:] *= (nxb, nyb, nzb)
         try:
-            self.grid_particle_count[:] = f["/localnp"][:][:,None]
+            self.grid_particle_count[:] = f_part["/localnp"][:][:,None]
         except KeyError:
             self.grid_particle_count[:] = 0.0
         self._particle_indices = np.zeros(self.num_grids + 1, dtype='int64')
@@ -209,6 +211,7 @@
     
     def __init__(self, filename, data_style='flash_hdf5',
                  storage_filename = None,
+                 particle_filename = None, 
                  conversion_override = None):
 
         if self._handle is not None: return
@@ -216,6 +219,16 @@
         if conversion_override is None: conversion_override = {}
         self._conversion_override = conversion_override
 
+        self.particle_filename = particle_filename
+
+        if self.particle_filename is None :
+            self._particle_handle = self._handle
+        else :
+            try :
+                self._particle_handle = h5py.File(self.particle_filename, "r")
+            except :
+                raise IOError(self.particle_filename)
+                                                                
         StaticOutput.__init__(self, filename, data_style)
         self.storage_filename = storage_filename
 


diff -r 9fafae943a699a5e2740699f610048c57eea7b83 -r 64ab22a8de3385fbee60afd6927b5b25ad7541e4 yt/frontends/flash/io.py
--- a/yt/frontends/flash/io.py
+++ b/yt/frontends/flash/io.py
@@ -39,9 +39,11 @@
         # Now we cache the particle fields
         self.pf = pf
         self._handle = pf._handle
+        self._particle_handle = pf._particle_handle
+        
         try :
             particle_fields = [s[0].strip() for s in
-                               self._handle["/particle names"][:]]
+                               self._particle_handle["/particle names"][:]]
             self._particle_fields = dict([("particle_" + s, i) for i, s in
                                           enumerate(particle_fields)])
         except KeyError:
@@ -53,12 +55,13 @@
 
     def _read_data_set(self, grid, field):
         f = self._handle
+        f_part = self._particle_handle
         if field in self._particle_fields:
             if grid.NumberOfParticles == 0: return np.array([], dtype='float64')
             start = self.pf.h._particle_indices[grid.id - grid._id_offset]
             end = self.pf.h._particle_indices[grid.id - grid._id_offset + 1]
             fi = self._particle_fields[field]
-            tr = f["/tracer particles"][start:end, fi]
+            tr = f_part["/tracer particles"][start:end, fi]
         else:
             tr = f["/%s" % field][grid.id - grid._id_offset,:,:,:].transpose()
         return tr.astype("float64")

Repository URL: https://bitbucket.org/yt_analysis/yt/

--

This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.



More information about the yt-svn mailing list