[yt-svn] commit/yt: 6 new changesets

commits-noreply at bitbucket.org commits-noreply at bitbucket.org
Wed Jun 11 14:33:51 PDT 2014


6 new commits in yt:

https://bitbucket.org/yt_analysis/yt/commits/d8b7e24485cf/
Changeset:   d8b7e24485cf
Branch:      yt-3.0
User:        MatthewTurk
Date:        2014-06-10 23:11:19
Summary:     Clip our particle positions to avoid overflow.
Affected #:  1 file

diff -r a83501174991ae2fae2b7a4aa4f9234296976cf3 -r d8b7e24485cf17259f569f06959650b1835c828b yt/geometry/particle_oct_container.pyx
--- a/yt/geometry/particle_oct_container.pyx
+++ b/yt/geometry/particle_oct_container.pyx
@@ -300,6 +300,7 @@
             # Now we locate the particle
             for i in range(3):
                 ind[i] = <int> ((pos[p, i] - self.left_edge[i])*self.idds[i])
+                ind[i] = iclip(ind[i], 0, self.dims[i])
             mask[ind[0],ind[1],ind[2]] |= val
 
     def identify_data_files(self, SelectorObject selector):


https://bitbucket.org/yt_analysis/yt/commits/2dc75b72d271/
Changeset:   2dc75b72d271
Branch:      yt-3.0
User:        MatthewTurk
Date:        2014-06-10 23:21:49
Summary:     Reduce a few copies.
Affected #:  1 file

diff -r d8b7e24485cf17259f569f06959650b1835c828b -r 2dc75b72d271dcbb1e974d0e5f8449c184f83014 yt/data_objects/octree_subset.py
--- a/yt/data_objects/octree_subset.py
+++ b/yt/data_objects/octree_subset.py
@@ -52,12 +52,6 @@
     _type_name = 'octree_subset'
     _skip_add = True
     _con_args = ('base_region', 'domain', 'pf')
-    _container_fields = (("index", "dx"),
-                         ("index", "dy"),
-                         ("index", "dz"),
-                         ("index", "x"),
-                         ("index", "y"),
-                         ("index", "z"))
     _domain_offset = 0
     _cell_count = -1
 
@@ -78,19 +72,6 @@
         self.base_region = base_region
         self.base_selector = base_region.selector
 
-    def _generate_container_field(self, field):
-        if self._current_chunk is None:
-            self.index._identify_base_chunk(self)
-        if isinstance(field, tuple): field = field[1]
-        if field == "dx":
-            return self._current_chunk.fwidth[:,0]
-        elif field == "dy":
-            return self._current_chunk.fwidth[:,1]
-        elif field == "dz":
-            return self._current_chunk.fwidth[:,2]
-        else:
-            raise RuntimeError
-
     def __getitem__(self, key):
         tr = super(OctreeSubset, self).__getitem__(key)
         try:
@@ -111,16 +92,20 @@
         return self._num_zones + 2*self._num_ghost_zones
 
     def _reshape_vals(self, arr):
-        if len(arr.shape) == 4: return arr
+        if len(arr.shape) == 4 and arr.flags["F_CONTIGUOUS"]:
+            return arr
         nz = self.nz
         n_oct = arr.shape[0] / (nz**3.0)
         if arr.size == nz*nz*nz*n_oct:
-            arr = arr.reshape((nz, nz, nz, n_oct), order="F")
+            new_shape = (nz, nz, nz, n_oct)
         elif arr.size == nz*nz*nz*n_oct * 3:
-            arr = arr.reshape((nz, nz, nz, n_oct, 3), order="F")
+            new_shape = (nz, nz, nz, n_oct, 3)
         else:
             raise RuntimeError
-        arr = np.asfortranarray(arr)
+        # This will retain units now.
+        arr.shape = new_shape
+        if not arr.flags["F_CONTIGUOUS"]:
+            arr = arr.reshape(new_shape, order="F")
         return arr
 
     _domain_ind = None


https://bitbucket.org/yt_analysis/yt/commits/1a78c3c71b7f/
Changeset:   1a78c3c71b7f
Branch:      yt-3.0
User:        MatthewTurk
Date:        2014-06-10 23:42:09
Summary:     Pop out the dependencies when we no longer need them.
Affected #:  1 file

diff -r 2dc75b72d271dcbb1e974d0e5f8449c184f83014 -r 1a78c3c71b7fdf413c36efbcb5adb7c3b64590f9 yt/data_objects/data_containers.py
--- a/yt/data_objects/data_containers.py
+++ b/yt/data_objects/data_containers.py
@@ -599,6 +599,10 @@
             return
         elif self._locked == True:
             raise GenerationInProgress(fields)
+        # Track which ones we want in the end
+        ofields = set(self.field_data.keys()
+                    + fields_to_get
+                    + fields_to_generate)
         # At this point, we want to figure out *all* our dependencies.
         fields_to_get = self._identify_dependencies(fields_to_get,
             self._spatial)
@@ -627,6 +631,9 @@
 
         fields_to_generate += gen_fluids + gen_particles
         self._generate_fields(fields_to_generate)
+        for field in self.field_data.keys():
+            if field not in ofields:
+                self.field_data.pop(field)
 
     def _generate_fields(self, fields_to_generate):
         index = 0


https://bitbucket.org/yt_analysis/yt/commits/cdcbdefc6f06/
Changeset:   cdcbdefc6f06
Branch:      yt-3.0
User:        MatthewTurk
Date:        2014-06-11 16:51:36
Summary:     Counter-intuitively, this reduces memory since we release references.
Affected #:  1 file

diff -r 1a78c3c71b7fdf413c36efbcb5adb7c3b64590f9 -r cdcbdefc6f062c300205015e49268abfbf1f79e0 yt/geometry/coordinate_handler.py
--- a/yt/geometry/coordinate_handler.py
+++ b/yt/geometry/coordinate_handler.py
@@ -34,10 +34,10 @@
 
 def _get_coord_fields(axi, units = "code_length"):
     def _dds(field, data):
-        rv = data.pf.arr(data.fwidth[...,axi], units)
+        rv = data.pf.arr(data.fwidth[...,axi].copy(), units)
         return data._reshape_vals(rv)
     def _coords(field, data):
-        rv = data.pf.arr(data.fcoords[...,axi], units)
+        rv = data.pf.arr(data.fcoords[...,axi].copy(), units)
         return data._reshape_vals(rv)
     return _dds, _coords
 


https://bitbucket.org/yt_analysis/yt/commits/2ce2b78ca299/
Changeset:   2ce2b78ca299
Branch:      yt-3.0
User:        MatthewTurk
Date:        2014-06-11 16:52:17
Summary:     Merging
Affected #:  2 files

diff -r cdcbdefc6f062c300205015e49268abfbf1f79e0 -r 2ce2b78ca299cbf3155b53e54229c9f2fb1843c9 yt/geometry/oct_container.pyx
--- a/yt/geometry/oct_container.pyx
+++ b/yt/geometry/oct_container.pyx
@@ -122,6 +122,8 @@
         cdef int i, j, k, n
         data.global_index = -1
         data.level = 0
+        data.oref = 0
+        data.nz = 1
         assert(ref_mask.shape[0] / float(data.nz) ==
             <int>(ref_mask.shape[0]/float(data.nz)))
         obj.allocate_domains([ref_mask.shape[0] / data.nz])
@@ -496,7 +498,7 @@
             coords[:,i] += self.DLE[i]
         return coords
 
-    def save_octree(self, always_descend = False):
+    def save_octree(self):
         # Get the header
         header = dict(dims = (self.nn[0], self.nn[1], self.nn[2]),
                       left_edge = (self.DLE[0], self.DLE[1], self.DLE[2]),
@@ -507,13 +509,12 @@
         # domain_id = -1 here, because we want *every* oct
         cdef OctVisitorData data
         self.setup_data(&data, -1)
-        data.oref = 1
+        data.oref = 0
+        data.nz = 1
         cdef np.ndarray[np.uint8_t, ndim=1] ref_mask
         ref_mask = np.zeros(self.nocts * data.nz, dtype="uint8") - 1
-        cdef void *p[2]
-        cdef np.uint8_t ad = int(always_descend)
-        p[0] = <void *> &ad
-        p[1] = ref_mask.data
+        cdef void *p[1]
+        p[0] = ref_mask.data
         data.array = p
         # Enforce partial_coverage here
         self.visit_all_octs(selector, oct_visitors.store_octree, &data, 1)

diff -r cdcbdefc6f062c300205015e49268abfbf1f79e0 -r 2ce2b78ca299cbf3155b53e54229c9f2fb1843c9 yt/geometry/oct_visitors.pyx
--- a/yt/geometry/oct_visitors.pyx
+++ b/yt/geometry/oct_visitors.pyx
@@ -179,12 +179,8 @@
     cdef np.uint8_t *arr, res, ii, *always_descend
     ii = cind(data.ind[0], data.ind[1], data.ind[2])
     cdef void **p = <void **> data.array
-    always_descend = <np.uint8_t *> p[0]
-    arr = <np.uint8_t *> p[1]
-    if always_descend[0] == 1 and data.last == o.domain_ind:
-        return
-    data.last = o.domain_ind
-    if o.children == NULL or o.children[ii] == NULL:
+    arr = <np.uint8_t *> p[0]
+    if o.children == NULL:
         # Not refined.
         res = 0
     else:
@@ -216,7 +212,7 @@
             o.children = <Oct **> malloc(sizeof(Oct *) * 8)
             for i in range(8):
                 o.children[i] = NULL
-        for i in range(arr[data.index]):
+        for i in range(8):
             o.children[ii + i] = &octs[nocts[0]]
             o.children[ii + i].domain_ind = nocts[0]
             o.children[ii + i].file_ind = -1


https://bitbucket.org/yt_analysis/yt/commits/af18fce402d7/
Changeset:   af18fce402d7
Branch:      yt-3.0
User:        MatthewTurk
Date:        2014-06-11 23:33:44
Summary:     Merged in MatthewTurk/yt/yt-3.0 (pull request #945)

Reduce memory usage for particle octrees
Affected #:  4 files

diff -r 122c45b92abbc439fa04107c58079932a19462d4 -r af18fce402d7e68f3620b4a0d6b322b50815f573 yt/data_objects/data_containers.py
--- a/yt/data_objects/data_containers.py
+++ b/yt/data_objects/data_containers.py
@@ -599,6 +599,10 @@
             return
         elif self._locked == True:
             raise GenerationInProgress(fields)
+        # Track which ones we want in the end
+        ofields = set(self.field_data.keys()
+                    + fields_to_get
+                    + fields_to_generate)
         # At this point, we want to figure out *all* our dependencies.
         fields_to_get = self._identify_dependencies(fields_to_get,
             self._spatial)
@@ -627,6 +631,9 @@
 
         fields_to_generate += gen_fluids + gen_particles
         self._generate_fields(fields_to_generate)
+        for field in self.field_data.keys():
+            if field not in ofields:
+                self.field_data.pop(field)
 
     def _generate_fields(self, fields_to_generate):
         index = 0

diff -r 122c45b92abbc439fa04107c58079932a19462d4 -r af18fce402d7e68f3620b4a0d6b322b50815f573 yt/data_objects/octree_subset.py
--- a/yt/data_objects/octree_subset.py
+++ b/yt/data_objects/octree_subset.py
@@ -52,12 +52,6 @@
     _type_name = 'octree_subset'
     _skip_add = True
     _con_args = ('base_region', 'domain', 'pf')
-    _container_fields = (("index", "dx"),
-                         ("index", "dy"),
-                         ("index", "dz"),
-                         ("index", "x"),
-                         ("index", "y"),
-                         ("index", "z"))
     _domain_offset = 0
     _cell_count = -1
 
@@ -78,19 +72,6 @@
         self.base_region = base_region
         self.base_selector = base_region.selector
 
-    def _generate_container_field(self, field):
-        if self._current_chunk is None:
-            self.index._identify_base_chunk(self)
-        if isinstance(field, tuple): field = field[1]
-        if field == "dx":
-            return self._current_chunk.fwidth[:,0]
-        elif field == "dy":
-            return self._current_chunk.fwidth[:,1]
-        elif field == "dz":
-            return self._current_chunk.fwidth[:,2]
-        else:
-            raise RuntimeError
-
     def __getitem__(self, key):
         tr = super(OctreeSubset, self).__getitem__(key)
         try:
@@ -111,16 +92,20 @@
         return self._num_zones + 2*self._num_ghost_zones
 
     def _reshape_vals(self, arr):
-        if len(arr.shape) == 4: return arr
+        if len(arr.shape) == 4 and arr.flags["F_CONTIGUOUS"]:
+            return arr
         nz = self.nz
         n_oct = arr.shape[0] / (nz**3.0)
         if arr.size == nz*nz*nz*n_oct:
-            arr = arr.reshape((nz, nz, nz, n_oct), order="F")
+            new_shape = (nz, nz, nz, n_oct)
         elif arr.size == nz*nz*nz*n_oct * 3:
-            arr = arr.reshape((nz, nz, nz, n_oct, 3), order="F")
+            new_shape = (nz, nz, nz, n_oct, 3)
         else:
             raise RuntimeError
-        arr = np.asfortranarray(arr)
+        # This will retain units now.
+        arr.shape = new_shape
+        if not arr.flags["F_CONTIGUOUS"]:
+            arr = arr.reshape(new_shape, order="F")
         return arr
 
     _domain_ind = None

diff -r 122c45b92abbc439fa04107c58079932a19462d4 -r af18fce402d7e68f3620b4a0d6b322b50815f573 yt/geometry/coordinate_handler.py
--- a/yt/geometry/coordinate_handler.py
+++ b/yt/geometry/coordinate_handler.py
@@ -34,10 +34,10 @@
 
 def _get_coord_fields(axi, units = "code_length"):
     def _dds(field, data):
-        rv = data.pf.arr(data.fwidth[...,axi], units)
+        rv = data.pf.arr(data.fwidth[...,axi].copy(), units)
         return data._reshape_vals(rv)
     def _coords(field, data):
-        rv = data.pf.arr(data.fcoords[...,axi], units)
+        rv = data.pf.arr(data.fcoords[...,axi].copy(), units)
         return data._reshape_vals(rv)
     return _dds, _coords
 

diff -r 122c45b92abbc439fa04107c58079932a19462d4 -r af18fce402d7e68f3620b4a0d6b322b50815f573 yt/geometry/particle_oct_container.pyx
--- a/yt/geometry/particle_oct_container.pyx
+++ b/yt/geometry/particle_oct_container.pyx
@@ -300,6 +300,7 @@
             # Now we locate the particle
             for i in range(3):
                 ind[i] = <int> ((pos[p, i] - self.left_edge[i])*self.idds[i])
+                ind[i] = iclip(ind[i], 0, self.dims[i])
             mask[ind[0],ind[1],ind[2]] |= val
 
     def identify_data_files(self, SelectorObject selector):

Repository URL: https://bitbucket.org/yt_analysis/yt/

--

This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.



More information about the yt-svn mailing list