[yt-svn] commit/yt: ngoldbaum: Merged in MatthewTurk/yt/yt-3.0 (pull request #1025)

commits-noreply at bitbucket.org commits-noreply at bitbucket.org
Sat Aug 2 12:52:40 PDT 2014


1 new commit in yt:

https://bitbucket.org/yt_analysis/yt/commits/ef1183617671/
Changeset:   ef1183617671
Branch:      yt-3.0
User:        ngoldbaum
Date:        2014-08-02 21:52:31
Summary:     Merged in MatthewTurk/yt/yt-3.0 (pull request #1025)

NMSU-ART: Attempt to fix dark matter masses
Affected #:  2 files

diff -r 4a8c93735cdf58c42d1f60dbd3fb323e227ab982 -r ef1183617671b95978d736cbbc4402e07683aa1a yt/frontends/art/data_structures.py
--- a/yt/frontends/art/data_structures.py
+++ b/yt/frontends/art/data_structures.py
@@ -39,6 +39,8 @@
     io_registry
 from yt.utilities.lib.misc_utilities import \
     get_box_grids_level
+from yt.data_objects.particle_unions import \
+    ParticleUnion
 
 from yt.frontends.art.definitions import *
 import yt.utilities.fortran_utils as fpu
@@ -104,17 +106,7 @@
         self.particle_field_list = [f for f in particle_fields]
         self.field_list = [("art", f) for f in fluid_fields]
         # now generate all of the possible particle fields
-        if "wspecies" in self.dataset.parameters.keys():
-            wspecies = self.dataset.parameters['wspecies']
-            nspecies = len(wspecies)
-            self.dataset.particle_types = ["darkmatter", "stars"]
-            for specie in range(nspecies):
-                self.dataset.particle_types.append("specie%i" % specie)
-            self.dataset.particle_types_raw = tuple(
-                self.dataset.particle_types)
-        else:
-            self.dataset.particle_types = []
-        for ptype in self.dataset.particle_types:
+        for ptype in self.dataset.particle_types_raw:
             for pfield in self.particle_field_list:
                 pfn = (ptype, pfield)
                 self.field_list.append(pfn)
@@ -313,6 +305,8 @@
             self.root_level = root_level
             mylog.info("Using root level of %02i", self.root_level)
         # read the particle header
+        self.particle_types = []
+        self.particle_types_raw = ()
         if not self.skip_particles and self._file_particle_header:
             with open(self._file_particle_header, "rb") as fh:
                 particle_header_vals = fpu.read_attrs(
@@ -323,6 +317,10 @@
                 lspecies = np.fromfile(fh, dtype='>i', count=10)
             self.parameters['wspecies'] = wspecies[:n]
             self.parameters['lspecies'] = lspecies[:n]
+            for specie in range(n):
+                self.particle_types.append("specie%i" % specie)
+            self.particle_types_raw = tuple(
+                self.particle_types)
             ls_nonzero = np.diff(lspecies)[:n-1]
             ls_nonzero = np.append(lspecies[0], ls_nonzero)
             self.star_type = len(ls_nonzero)
@@ -360,6 +358,16 @@
         self.gamma = self.parameters["gamma"]
         mylog.info("Max level is %02i", self.max_level)
 
+    def create_field_info(self):
+        super(ARTDataset, self).create_field_info()
+        if "wspecies" in self.parameters:
+            # We create dark_matter and stars unions.
+            ptr = self.particle_types_raw
+            pu = ParticleUnion("darkmatter", list(ptr[:-1]))
+            self.add_particle_union(pu)
+            pu = ParticleUnion("stars", list(ptr[-1:]))
+            self.add_particle_union(pu)
+
     @classmethod
     def _is_valid(self, *args, **kwargs):
         """

diff -r 4a8c93735cdf58c42d1f60dbd3fb323e227ab982 -r ef1183617671b95978d736cbbc4402e07683aa1a yt/frontends/art/io.py
--- a/yt/frontends/art/io.py
+++ b/yt/frontends/art/io.py
@@ -39,6 +39,11 @@
         self.cache = {}
         self.masks = {}
         super(IOHandlerART, self).__init__(*args, **kwargs)
+        self.ws = self.ds.parameters["wspecies"]
+        self.ls = self.ds.parameters["lspecies"]
+        self.file_particle = self.ds._file_particle_data
+        self.file_stars = self.ds._file_particle_stars
+        self.Nrow = self.ds.parameters["Nrow"]
 
     def _read_fluid_selection(self, chunks, selector, fields, size):
         # Chunks in this case will have affiliated domain subset objects
@@ -70,8 +75,6 @@
         if key in self.masks.keys() and self.caching:
             return self.masks[key]
         ds = self.ds
-        ptmax = self.ws[-1]
-        pbool, idxa, idxb = _determine_field_size(ds, ftype, self.ls, ptmax)
         pstr = 'particle_position_%s'
         x,y,z = [self._get_field((ftype, pstr % ax)) for ax in 'xyz']
         mask = selector.select_points(x, y, z, 0.0)
@@ -81,6 +84,26 @@
         else:
             return mask
 
+    def _read_particle_coords(self, chunks, ptf):
+        for chunk in chunks:
+            for ptype, field_list in sorted(ptf.items()):
+                x = self._get_field((ptype, "particle_position_x"))
+                y = self._get_field((ptype, "particle_position_y"))
+                z = self._get_field((ptype, "particle_position_z"))
+                yield ptype, (x, y, z)
+
+    def _read_particle_fields(self, chunks, ptf, selector):
+        for chunk in chunks:
+            for ptype, field_list in sorted(ptf.items()):
+                x = self._get_field((ptype, "particle_position_x"))
+                y = self._get_field((ptype, "particle_position_y"))
+                z = self._get_field((ptype, "particle_position_z"))
+                mask = selector.select_points(x, y, z, 0.0)
+                if mask is None: continue
+                for field in field_list:
+                    data = self._get_field((ptype, field))
+                    yield (ptype, field), data[mask]
+
     def _get_field(self,  field):
         if field in self.cache.keys() and self.caching:
             mylog.debug("Cached %s", str(field))
@@ -139,6 +162,13 @@
             temp[-nstars:] = data
             tr[field] = temp
             del data
+        # We check again, after it's been filled
+        if fname == "particle_mass":
+            # We now divide by NGrid in order to make this match up.  Note that
+            # this means that even when requested in *code units*, we are
+            # giving them as modified by the ng value.  This only works for
+            # dark_matter -- stars are regular matter.
+            tr[field] /= self.ds.domain_dimensions.prod()
         if tr == {}:
             tr = dict((f, np.array([])) for f in fields)
         if self.caching:
@@ -147,35 +177,15 @@
         else:
             return tr[field]
 
-    def _read_particle_selection(self, chunks, selector, fields):
-        chunk = chunks.next()
-        self.ds = chunk.objs[0].domain.ds
-        self.ws = self.ds.parameters["wspecies"]
-        self.ls = self.ds.parameters["lspecies"]
-        self.file_particle = self.ds._file_particle_data
-        self.file_stars = self.ds._file_particle_stars
-        self.Nrow = self.ds.parameters["Nrow"]
-        data = {f:np.array([]) for f in fields}
-        for f in fields:
-            ftype, fname = f
-            mask = self._get_mask(selector, ftype)
-            arr = self._get_field(f)[mask].astype('f8')
-            data[f] = np.concatenate((arr, data[f]))
-        return data
-
-def _determine_field_size(ds, field, lspecies, ptmax):
+def _determine_field_size(pf, field, lspecies, ptmax):
     pbool = np.zeros(len(lspecies), dtype="bool")
     idxas = np.concatenate(([0, ], lspecies[:-1]))
     idxbs = lspecies
     if "specie" in field:
         index = int(field.replace("specie", ""))
         pbool[index] = True
-    elif field == "stars":
-        pbool[-1] = True
-    elif field == "darkmatter":
-        pbool[0:-1] = True
     else:
-        pbool[:] = True
+        raise RuntimeError
     idxa, idxb = idxas[pbool][0], idxbs[pbool][-1]
     return pbool, idxa, idxb

Repository URL: https://bitbucket.org/yt_analysis/yt/

--

This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.



More information about the yt-svn mailing list