[Yt-svn] commit/yt: MatthewTurk: Numpy 1.6 has issues with dtype='object' calls to numpy.array swallowing

Bitbucket commits-noreply at bitbucket.org
Fri Oct 7 07:54:49 PDT 2011


1 new changeset in yt:

http://bitbucket.org/yt_analysis/yt/changeset/c137ebe9bf98/
changeset:   c137ebe9bf98
branch:      yt
user:        MatthewTurk
date:        2011-10-05 23:00:00
summary:     Numpy 1.6 has issues with dtype='object' calls to numpy.array swallowing
exceptions, which confuse the stack trace, as a result of checking for
sub-sequences.  This should address issues with crazy errors showing up.
Thanks to Thomas Robitaille for finding the bug and helping me track it down.
affected #:  11 files (-1 bytes)

--- a/yt/data_objects/data_containers.py	Thu Sep 15 23:17:17 2011 +0200
+++ b/yt/data_objects/data_containers.py	Wed Oct 05 17:00:00 2011 -0400
@@ -2725,7 +2725,8 @@
                                       grid.RightEdge, grid.dds,
                                       grid.child_mask, 1)
             if v: grids.append(grid)
-        self._grids = na.array(grids, dtype='object')
+        self._grids = na.empty(len(grids), dtype='object')
+        for gi, g in enumerate(grids): self._grids[gi] = g
             
 
     def _is_fully_enclosed(self, grid):
@@ -2938,7 +2939,8 @@
         # Now we sort by level
         grids = grids.tolist()
         grids.sort(key=lambda x: (x.Level, x.LeftEdge[0], x.LeftEdge[1], x.LeftEdge[2]))
-        self._grids = na.array(grids, dtype='object')
+        self._grids = na.empty(len(grids), dtype='object')
+        for gi, g in enumerate(grids): self._grids[gi] = g
 
     def _is_fully_enclosed(self, grid):
         r = na.abs(grid._corners - self.center)


--- a/yt/frontends/art/data_structures.py	Thu Sep 15 23:17:17 2011 +0200
+++ b/yt/frontends/art/data_structures.py	Wed Oct 05 17:00:00 2011 -0400
@@ -324,7 +324,8 @@
                 self.grid_levels[gi,:] = level
                 grids.append(self.grid(gi, self, level, fl, props[0,:]))
                 gi += 1
-        self.grids = na.array(grids, dtype='object')
+        self.grids = na.empty(len(grids), dtype='object')
+        for gi, g in enumerate(grids): self.grids[gi] = g
 
     def _get_grid_parents(self, grid, LE, RE):
         mask = na.zeros(self.num_grids, dtype='bool')


--- a/yt/frontends/chombo/data_structures.py	Thu Sep 15 23:17:17 2011 +0200
+++ b/yt/frontends/chombo/data_structures.py	Wed Oct 05 17:00:00 2011 -0400
@@ -148,7 +148,8 @@
                 self.grid_particle_count[i] = 0
                 self.grid_dimensions[i] = ei - si + 1
                 i += 1
-        self.grids = na.array(self.grids, dtype='object')
+        self.grids = na.empty(len(grids), dtype='object')
+        for gi, g in enumerate(grids): self.grids[gi] = g
 
     def _populate_grid_objects(self):
         for g in self.grids:


--- a/yt/frontends/enzo/data_structures.py	Thu Sep 15 23:17:17 2011 +0200
+++ b/yt/frontends/enzo/data_structures.py	Wed Oct 05 17:00:00 2011 -0400
@@ -544,12 +544,13 @@
                 self.grids[pid-1]._children_ids.append(self.grids[-1].id)
         self.max_level = self.grid_levels.max()
         mylog.debug("Preparing grids")
+        self.grids = na.empty(len(grids), dtype='object')
         for i, grid in enumerate(self.grids):
             if (i%1e4) == 0: mylog.debug("Prepared % 7i / % 7i grids", i, self.num_grids)
             grid.filename = None
             grid._prepare_grid()
             grid.proc_num = self.grid_procs[i,0]
-        self.grids = na.array(self.grids, dtype='object')
+            self.grids[gi] = grid
         mylog.debug("Prepared")
 
     def _initialize_grid_arrays(self):


--- a/yt/frontends/flash/data_structures.py	Thu Sep 15 23:17:17 2011 +0200
+++ b/yt/frontends/flash/data_structures.py	Wed Oct 05 17:00:00 2011 -0400
@@ -131,9 +131,9 @@
         # current value.  Note that FLASH uses 1-based indexing for refinement
         # levels, but we do not, so we reduce the level by 1.
         self.grid_levels.flat[:] = f["/refine level"][:][:] - 1
-        g = [self.grid(i+1, self, self.grid_levels[i,0])
-                for i in xrange(self.num_grids)]
-        self.grids = na.array(g, dtype='object')
+        self.grids = na.empty(self.num_grids, dtype='object')
+        for i in xrange(self.num_grids):
+            self.grids[i] = self.grid(i+1, self, self.grid_levels[i,0])
 
     def _populate_grid_objects(self):
         # We only handle 3D data, so offset is 7 (nfaces+1)


--- a/yt/frontends/gadget/data_structures.py	Thu Sep 15 23:17:17 2011 +0200
+++ b/yt/frontends/gadget/data_structures.py	Wed Oct 05 17:00:00 2011 -0400
@@ -121,9 +121,9 @@
         args = izip(xrange(self.num_grids), self.grid_levels.flat,
                     grid_parent_id, LI,
                     self.grid_dimensions, self.grid_particle_count.flat)
-        self.grids = na.array([self.grid(self,j,d,le,lvl,p,n)
-                               for j,lvl,p, le, d, n in args],
-                           dtype='object')
+        self.grids = na.empty(len(args), dtype='object')
+        for gi, (j,lvl,p, le, d, n) in enumerate(args):
+            self.grids[gi] = self.grid(self,j,d,le,lvl,p,n)
         
     def _populate_grid_objects(self):    
         for g in self.grids:


--- a/yt/frontends/gdf/data_structures.py	Thu Sep 15 23:17:17 2011 +0200
+++ b/yt/frontends/gdf/data_structures.py	Wed Oct 05 17:00:00 2011 -0400
@@ -115,7 +115,9 @@
                 self.grid_particle_count[i] = 0
                 self.grid_dimensions[i] = ei - si + 1
                 i += 1
-        self.grids = na.array(self.grids, dtype='object')
+        temp_grids = na.empty(len(grids), dtype='object')
+        for gi, g in enumerate(self.grids): temp_grids[gi] = g
+        self.grids = temp_grids
 
     def _populate_grid_objects(self):
         for g in self.grids:


--- a/yt/frontends/ramses/data_structures.py	Thu Sep 15 23:17:17 2011 +0200
+++ b/yt/frontends/ramses/data_structures.py	Wed Oct 05 17:00:00 2011 -0400
@@ -233,7 +233,8 @@
                 grids.append(self.grid(gi, self, level, fl, props[0,:]))
                 gi += 1
         self.proto_grids = []
-        self.grids = na.array(grids, dtype='object')
+        self.grids = na.empty(len(grids), dtype='object')
+        for gi, g in enumerate(grids): self.grids[gi] = g
 
     def _populate_grid_objects(self):
         mask = na.empty(self.grids.size, dtype='int32')


--- a/yt/frontends/stream/data_structures.py	Thu Sep 15 23:17:17 2011 +0200
+++ b/yt/frontends/stream/data_structures.py	Wed Oct 05 17:00:00 2011 -0400
@@ -175,12 +175,14 @@
             self._reconstruct_parent_child()
         self.max_level = self.grid_levels.max()
         mylog.debug("Preparing grids")
+        temp_grids = na.empty(self.num_grids, dtype='object')
         for i, grid in enumerate(self.grids):
             if (i%1e4) == 0: mylog.debug("Prepared % 7i / % 7i grids", i, self.num_grids)
             grid.filename = None
             grid._prepare_grid()
             grid.proc_num = self.grid_procs[i]
-        self.grids = na.array(self.grids, dtype='object')
+            temp_grids[i] = grid
+        self.grids = temp_grids
         mylog.debug("Prepared")
 
     def _reconstruct_parent_child(self):


--- a/yt/frontends/tiger/data_structures.py	Thu Sep 15 23:17:17 2011 +0200
+++ b/yt/frontends/tiger/data_structures.py	Wed Oct 05 17:00:00 2011 -0400
@@ -106,7 +106,8 @@
             levels.append(g.Level)
             counts.append(g.NumberOfParticles)
             i += 1
-        self.grids = na.array(grids, dtype='object')
+        self.grids = na.empty(len(grids), dtype='object')
+        for gi, g in enumerate(grids): self.grids[gi] = g
         self.grid_dimensions[:] = na.array(dims, dtype='int64')
         self.grid_left_edge[:] = na.array(LE, dtype='float64')
         self.grid_right_edge[:] = na.array(RE, dtype='float64')


--- a/yt/visualization/volume_rendering/grid_partitioner.py	Thu Sep 15 23:17:17 2011 +0200
+++ b/yt/visualization/volume_rendering/grid_partitioner.py	Wed Oct 05 17:00:00 2011 -0400
@@ -107,7 +107,6 @@
             pbar.update(i)
             bricks += self._partition_grid(g)
         pbar.finish()
-        bricks = na.array(bricks, dtype='object')
         self.initialize_bricks(bricks)
 
     def initialize_bricks(self, bricks):
@@ -120,14 +119,15 @@
         self.brick_right_edges = na.zeros( (NB, 3), dtype='float64')
         self.brick_parents = na.zeros( NB, dtype='int64')
         self.brick_dimensions = na.zeros( (NB, 3), dtype='int64')
+        self.bricks = na.empty(len(bricks), dtype='object')
         for i,b in enumerate(bricks):
             self.brick_left_edges[i,:] = b.LeftEdge
             self.brick_right_edges[i,:] = b.RightEdge
             self.brick_parents[i] = b.parent_grid_id
             self.brick_dimensions[i,:] = b.my_data[0].shape
+            self.bricks[i] = b
         # Vertex-centered means we subtract one from the shape
         self.brick_dimensions -= 1
-        self.bricks = na.array(bricks, dtype='object')
 
     def reflect_across_boundaries(self):
         mylog.warning("Note that this doesn't fix ghost zones, so there may be artifacts at domain boundaries!")

Repository URL: https://bitbucket.org/yt_analysis/yt/

--

This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.



More information about the yt-svn mailing list