[Yt-svn] yt-commit r1047 - in trunk/yt/lagos: . hop

mturk at wrangler.dreamhost.com mturk at wrangler.dreamhost.com
Sat Dec 27 07:25:39 PST 2008


Author: mturk
Date: Sat Dec 27 07:25:38 2008
New Revision: 1047
URL: http://yt.spacepope.org/changeset/1047

Log:

Sub-calls to methods were getting handled as proxy calls, so added a state
variable ("_processing") to check if that was the case, and if so, to avoid
passing them back and forth.

The current solution for "write_out" is somewhat sub-par, because it relies on
tricker and nonsensery, but maybe in the future I'll change it.  It might work
better, in the future, to simply construct a string that gets written out in
one go.

Next up is sorting, and caching of derived quantities across processors to
avoid too much communication.  (Although at this point, the communication -- I
think -- should be such a minor part of the process that it won't be too big an
issue.)



Modified:
   trunk/yt/lagos/ParallelTools.py
   trunk/yt/lagos/hop/SS_HopOutput.py

Modified: trunk/yt/lagos/ParallelTools.py
==============================================================================
--- trunk/yt/lagos/ParallelTools.py	(original)
+++ trunk/yt/lagos/ParallelTools.py	Sat Dec 27 07:25:38 2008
@@ -26,7 +26,7 @@
 from yt.lagos import *
 from yt.funcs import *
 import yt.logger
-import itertools, sys
+import itertools, sys, cStringIO
 
 if os.path.basename(sys.executable) in ["mpi4py", "embed_enzo"] \
     or "--parallel" in sys.argv or '_parallel' in dir(sys):
@@ -114,10 +114,12 @@
     @wraps(func)
     def single_proc_results(self, *args, **kwargs):
         retval = None
-        if not self._distributed:
+        if self._processing or not self._distributed:
             return func(self, *args, **kwargs)
         if self._owner == MPI.COMM_WORLD.rank:
+            self._processing = True
             retval = func(self, *args, **kwargs)
+            self._processing = False
         retval = MPI.COMM_WORLD.Bcast(retval, root=self._owner)
         MPI.COMM_WORLD.Barrier()
         return retval
@@ -319,3 +321,10 @@
         if not parallel_capable: return
         obj._owner = MPI.COMM_WORLD.rank
         obj._distributed = True
+
+    def _write_on_root(self, fn):
+        if not parallel_capable: return open(fn, "w")
+        if MPI.COMM_WORLD.rank == 0:
+            return open(fn, "w")
+        else:
+            return cStringIO.StringIO()

Modified: trunk/yt/lagos/hop/SS_HopOutput.py
==============================================================================
--- trunk/yt/lagos/hop/SS_HopOutput.py	(original)
+++ trunk/yt/lagos/hop/SS_HopOutput.py	Sat Dec 27 07:25:38 2008
@@ -112,7 +112,10 @@
         """
         Write out standard HOP information to *filename*.
         """
-        f = open(filename,"w")
+        if hasattr(filename, 'write'):
+            f = filename
+        else:
+            f = open(filename,"w")
         f.write("\t".join(["# Group","Mass","# part","max dens"
                            "x","y","z", "center-of-mass",
                            "x","y","z",
@@ -120,7 +123,7 @@
         for group in self:
             f.write("%10i\t" % group.id)
             f.write("%0.9e\t" % group.total_mass())
-            f.write("%10i\t" % group.indices.size)
+            f.write("%10i\t" % group.get_size())
             f.write("%0.9e\t" % group.maximum_density())
             f.write("\t".join(["%0.9e" % v for v in group.maximum_density_location()]))
             f.write("\t")
@@ -130,6 +133,7 @@
             f.write("\t")
             f.write("%0.9e\t" % group.maximum_radius())
             f.write("\n")
+            f.flush()
         f.close()
 
 class HopIterator(object):
@@ -149,6 +153,7 @@
     """
     __metaclass__ = ParallelDummy # This will proxy up our methods
     _distributed = False
+    _processing = False
     _owner = 0
     indices = None
     dont_wrap = ["get_sphere"]
@@ -160,6 +165,7 @@
         if indices is not None: self.indices = hop_output._base_indices[indices]
         # We assume that if indices = None, the instantiator has OTHER plans
         # for us -- i.e., setting it somehow else
+
     def center_of_mass(self):
         """
         Calculate and return the center of mass.
@@ -235,6 +241,9 @@
                         center, radius=radius)
         return sphere
 
+    def get_size(self):
+        return self.indices.size
+
 class HaloFinder(HopList, ParallelAnalysisInterface):
     def __init__(self, pf, threshold=160.0, dm_only=True):
         self.pf = pf
@@ -263,7 +272,6 @@
     def _parse_hoplist(self):
         groups, max_dens, hi  = [], {}, 0
         LE, RE = self.bounds
-        print LE, RE
         for halo in self._groups:
             this_max_dens = halo.maximum_density_location()
             # if the most dense particle is in the box, keep it
@@ -283,7 +291,6 @@
         self._max_dens = max_dens
 
     def _join_hoplists(self):
-        from mpi4py import MPI
         # First we get the total number of halos the entire collection
         # has identified
         # Note I have added a new method here to help us get information
@@ -327,3 +334,7 @@
             arr = self.data_source["particle_position_%s" % ax]
             arr[arr < LE[i]-self.padding] += dw[i]
             arr[arr > RE[i]+self.padding] -= dw[i]
+
+    def write_out(self, filename):
+        f = self._write_on_root(filename)
+        HopList.write_out(self, f)



More information about the yt-svn mailing list