[yt-svn] commit/yt-3.0: 2 new changesets

Bitbucket commits-noreply at bitbucket.org
Tue Aug 14 08:54:58 PDT 2012


2 new commits in yt-3.0:


https://bitbucket.org/yt_analysis/yt-3.0/changeset/b269f79b7a6f/
changeset:   b269f79b7a6f
branch:      yt-3.0
user:        MatthewTurk
date:        2012-08-14 17:18:39
summary:     Bringing the RAMSES field infos up to date.
affected #:  1 file

diff -r e0b36f6a8da207a2c179fa541a9193ec0cba5649 -r b269f79b7a6f23bc11aed7c37d7fa22fd73bbbe6 yt/frontends/ramses/fields.py
--- a/yt/frontends/ramses/fields.py
+++ b/yt/frontends/ramses/fields.py
@@ -52,20 +52,20 @@
 ]
 
 for f in known_ramses_fields:
-    if f not in RAMSESFieldInfo:
-        add_field(f, function=lambda a,b: None, take_log=True,
+    if f not in KnownRAMSESFields:
+        add_ramses_field(f, function=NullFunc, take_log=True,
                   validators = [ValidateDataField(f)])
 
 def _convertDensity(data):
     return data.convert("Density")
-RAMSESFieldInfo["Density"]._units = r"\rm{g}/\rm{cm}^3"
-RAMSESFieldInfo["Density"]._projected_units = r"\rm{g}/\rm{cm}^2"
-RAMSESFieldInfo["Density"]._convert_function=_convertDensity
+KnownRAMSESFields["Density"]._units = r"\rm{g}/\rm{cm}^3"
+KnownRAMSESFields["Density"]._projected_units = r"\rm{g}/\rm{cm}^2"
+KnownRAMSESFields["Density"]._convert_function=_convertDensity
 
 def _convertVelocity(data):
     return data.convert("x-velocity")
 for ax in ['x','y','z']:
-    f = RAMSESFieldInfo["%s-velocity" % ax]
+    f = KnownRAMSESFields["%s-velocity" % ax]
     f._units = r"\rm{cm}/\rm{s}"
     f._convert_function = _convertVelocity
     f.take_log = False
@@ -83,8 +83,7 @@
 ]
 
 for f in known_ramses_particle_fields:
-    if f not in RAMSESFieldInfo:
-        add_field(f, function=NullFunc, take_log=True,
+    if f not in KnownRAMSESFields:
+        add_ramses_field(f, function=NullFunc, take_log=True,
                   validators = [ValidateDataField(f)],
                   particle_type = True)
-



https://bitbucket.org/yt_analysis/yt-3.0/changeset/1d476c1af29f/
changeset:   1d476c1af29f
branch:      yt-3.0
user:        MatthewTurk
date:        2012-08-14 17:54:27
summary:     Particle reading for RAMSES frontend now functional in its first stages.
affected #:  1 file

diff -r b269f79b7a6f23bc11aed7c37d7fa22fd73bbbe6 -r 1d476c1af29f294821ace48d0488425fdfce7e59 yt/frontends/ramses/io.py
--- a/yt/frontends/ramses/io.py
+++ b/yt/frontends/ramses/io.py
@@ -29,6 +29,7 @@
 from yt.utilities.io_handler import \
     BaseIOHandler
 from yt.utilities.logger import ytLogger as mylog
+import yt.utilities.fortran_utils as fpu
 import cStringIO
 
 class IOHandlerRAMSES(BaseIOHandler):
@@ -38,7 +39,6 @@
         # Chunks in this case will have affiliated domain subset objects
         # Each domain subset will contain a hydro_offset array, which gives
         # pointers to level-by-level hydro information
-        n = 0
         tr = dict((f, na.empty(size, dtype='float64')) for f in fields)
         cp = 0
         for chunk in chunks:
@@ -57,44 +57,36 @@
                 cp += subset.cell_count
         return tr
 
-    def _read_data_set(self, grid, field):
-        tr = na.zeros(grid.ActiveDimensions, dtype='float64')
-        filled = na.zeros(grid.ActiveDimensions, dtype='int32')
-        to_fill = grid.ActiveDimensions.prod()
-        grids = [grid]
-        l_delta = 0
-        varindex = self.ramses_tree.field_ind[field]
-        while to_fill > 0 and len(grids) > 0:
-            next_grids = []
-            for g in grids:
-                to_fill -= self.ramses_tree.read_grid(varindex, field,
-                        grid.get_global_startindex(), grid.ActiveDimensions,
-                        tr, filled, g.Level, 2**l_delta, g.locations)
-                next_grids += g.Parent
-            grids = next_grids
-            l_delta += 1
+    def _read_particle_selection(self, chunks, selector, fields):
+        size = 0
+        masks = {}
+        for chunk in chunks:
+            for subset in chunk.objs:
+                # We read the whole thing, then feed it back to the selector
+                offsets = []
+                f = open(subset.domain.part_fn, "rb")
+                foffsets = subset.domain.particle_field_offsets
+                selection = {}
+                for ax in 'xyz':
+                    field = "particle_position_%s" % ax
+                    f.seek(foffsets[field])
+                    selection[ax] = fpu.read_vector(f, 'd')
+                mask = selector.select_points(selection['x'],
+                            selection['y'], selection['z'])
+                if mask is None: continue
+                size += mask.sum()
+                masks[id(subset)] = mask
+        # Now our second pass
+        tr = dict((f, na.empty(size, dtype="float64")) for f in fields)
+        for chunk in chunks:
+            for subset in chunk.objs:
+                f = open(subset.domain.part_fn, "rb")
+                mask = masks.pop(id(subset), None)
+                if mask is None: continue
+                for ftype, fname in fields:
+                    offsets.append((foffsets[fname], (ftype,fname)))
+                for offset, field in sorted(offsets):
+                    f.seek(offset)
+                    tr[field] = fpu.read_vector(f, 'd')[mask]
         return tr
 
-    def _read_data_slice(self, grid, field, axis, coord):
-        sl = [slice(None), slice(None), slice(None)]
-        sl[axis] = slice(coord, coord + 1)
-        return self._read_data_set(grid, field)[sl]
-
-    def preload(self, grids, sets):
-        if len(grids) == 0: return
-        domain_keys = defaultdict(list)
-        pf_field_list = grids[0].pf.h.field_list
-        sets = [dset for dset in list(sets) if dset in pf_field_list]
-        exc = self._read_exception
-        for g in grids:
-            domain_keys[g.domain].append(g)
-        for domain, grids in domain_keys.items():
-            mylog.debug("Starting read of domain %s (%s)", domain, sets)
-            for field in sets:
-                for g in grids:
-                    self.queue[g.id][field] = self._read_data_set(g, field)
-                print "Clearing", field, domain
-                self.ramses_tree.clear_tree(field, domain - 1)
-        mylog.debug("Finished read of %s", sets)
-
-    def modify(self, data): return data

Repository URL: https://bitbucket.org/yt_analysis/yt-3.0/

--

This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.



More information about the yt-svn mailing list