[Yt-svn] yt-commit r1323 - in trunk/yt: lagos raven
mturk at wrangler.dreamhost.com
mturk at wrangler.dreamhost.com
Fri Jun 5 13:12:08 PDT 2009
Author: mturk
Date: Fri Jun 5 13:12:07 2009
New Revision: 1323
URL: http://yt.spacepope.org/changeset/1323
Log:
If a slice was taken, and some processors had no grids, the process would fail.
This should now be fixed. (Cutting planes, too.) This fixes some cases for
embedded usage, since in non-embedded, the grids will be distributed evenly as
long as N_proc > N_grids.
Also, fixed potential issues where center is not an array in the plot
collection and book saving is now parallel aware.
Modified:
trunk/yt/lagos/BaseDataTypes.py
trunk/yt/lagos/ParallelTools.py
trunk/yt/raven/PlotCollection.py
Modified: trunk/yt/lagos/BaseDataTypes.py
==============================================================================
--- trunk/yt/lagos/BaseDataTypes.py (original)
+++ trunk/yt/lagos/BaseDataTypes.py Fri Jun 5 13:12:07 2009
@@ -528,9 +528,11 @@
continue # A "True" return means we did it
# To ensure that we use data from this object as much as possible,
# we're going to have to set the same thing several times
- temp_data[field] = na.concatenate(
- [self._get_data_from_grid(grid, field)
- for grid in self._get_grids()])
+ data = [self._get_data_from_grid(grid, field)
+ for grid in self._get_grids()]
+ if len(data) == 0: data = None
+ else: data = na.concatenate(data)
+ temp_data[field] = data
# Now the next field can use this field
self[field] = temp_data[field]
# We finalize
@@ -675,7 +677,9 @@
points = []
for grid in self._get_grids():
points.append(self._generate_grid_coords(grid))
- t = self._mpi_catarray(na.concatenate(points))
+ if len(points) == 0: points = None
+ else: points = na.concatenate(points)
+ t = self._mpi_catarray(points)
self['px'] = t[:,0]
self['py'] = t[:,1]
self['pz'] = t[:,2]
@@ -847,7 +851,9 @@
points = []
for grid in self._get_grids():
points.append(self._generate_grid_coords(grid))
- t = self._mpi_catarray(na.concatenate(points))
+ if len(points) == 0: points = None
+ else: points = na.concatenate(points)
+ t = self._mpi_catarray(points)
pos = (t[:,0:3] - self.center)
self['px'] = na.dot(pos, self._x_vec)
self['py'] = na.dot(pos, self._y_vec)
Modified: trunk/yt/lagos/ParallelTools.py
==============================================================================
--- trunk/yt/lagos/ParallelTools.py (original)
+++ trunk/yt/lagos/ParallelTools.py Fri Jun 5 13:12:07 2009
@@ -277,7 +277,9 @@
if MPI.COMM_WORLD.rank == 0:
temp_data = []
for i in range(1,np):
- temp_data.append(_recv_array(source=i, tag=0))
+ buf = _recv_array(source=i, tag=0)
+ if buf is not None: temp_data.append(buf)
+ del buf
data[key] = na.concatenate([data[key]] + temp_data, axis=-1)
else:
_send_array(data[key], dest=0, tag=0)
@@ -324,7 +326,7 @@
# First we receive, then we make a new list.
for i in range(1,MPI.COMM_WORLD.size):
buf = _recv_array(source=i, tag=0)
- data = na.concatenate([data, buf])
+ if buf is not None: data = na.concatenate([data, buf])
return data
@parallel_passthrough
Modified: trunk/yt/raven/PlotCollection.py
==============================================================================
--- trunk/yt/raven/PlotCollection.py (original)
+++ trunk/yt/raven/PlotCollection.py Fri Jun 5 13:12:07 2009
@@ -50,7 +50,7 @@
if center == None:
v,self.c = pf.h.find_max("Density") # @todo: ensure no caching
else:
- self.c = center
+ self.c = na.array(center, dtype='float64')
if deliverator_id > 0:
self.submit = True
self._run_id = deliverator_id
@@ -412,6 +412,7 @@
del self.plots[-1].data
del self.plots[-1]
+ @rootonly
def save_book(self, filename):
from pyPdf import PdfFileWriter, PdfFileReader
outfile = PdfFileWriter()
More information about the yt-svn
mailing list