[Yt-svn] yt-commit r1471 - branches/yt-1.5/yt branches/yt-1.5/yt/lagos trunk/yt trunk/yt/lagos

mturk at wrangler.dreamhost.com mturk at wrangler.dreamhost.com
Mon Oct 5 10:58:31 PDT 2009


Author: mturk
Date: Mon Oct  5 10:58:27 2009
New Revision: 1471
URL: http://yt.enzotools.org/changeset/1471

Log:
Added docstrings.  Synced some top-level modules between 1.5 and trunk.



Modified:
   branches/yt-1.5/yt/config.py
   branches/yt-1.5/yt/convenience.py
   branches/yt-1.5/yt/funcs.py
   branches/yt-1.5/yt/lagos/ParallelTools.py
   trunk/yt/config.py
   trunk/yt/convenience.py
   trunk/yt/funcs.py
   trunk/yt/lagos/BaseGridType.py
   trunk/yt/lagos/ParallelTools.py

Modified: branches/yt-1.5/yt/config.py
==============================================================================
--- branches/yt-1.5/yt/config.py	(original)
+++ branches/yt-1.5/yt/config.py	Mon Oct  5 10:58:27 2009
@@ -47,8 +47,8 @@
         'ReconstructHierarchy': 'True',
         'serialize' : 'True',
         'onlydeserialize' : 'False',
-        'loadfieldplugins':'False',
-        'pluginfilename':'yt_plugins.py',
+        'loadfieldplugins':'True',
+        'pluginfilename':'my_plugins.py',
         },
     "yt":{
         'LogFile': 'False',
@@ -88,6 +88,10 @@
                 if not self.has_option(section, opt):
                     self.set(section, opt, val)
     def set(self, section, opt, val):
+        """
+        This sets an option named *opt* to *val* inside *section*, creating
+        *section* if necessary.
+        """
         if not self.has_section(section):
             self.add_section(section)
         ConfigParser.ConfigParser.set(self, section, opt, val)

Modified: branches/yt-1.5/yt/convenience.py
==============================================================================
--- branches/yt-1.5/yt/convenience.py	(original)
+++ branches/yt-1.5/yt/convenience.py	Mon Oct  5 10:58:27 2009
@@ -36,6 +36,12 @@
 from yt.fido import output_type_registry
 
 def all_pfs(max_depth=1, name_spec="*.hierarchy", **kwargs):
+    """
+    This function searchs a directory and its sub-directories, up to a depth of
+    *max_depth*, for parameter files.  It looks for the *name_spec* and then
+    instantiates an EnzoStaticOutput from each.  All subsequent *kwargs* are
+    passed on to the EnzoStaticOutput constructor.
+    """
     list_of_names = []
     for i in range(max_depth):
         bb = list('*' * i) + [name_spec]
@@ -45,11 +51,24 @@
         yield lagos.EnzoStaticOutput(fn[:-10], **kwargs)
 
 def max_spheres(width, unit, **kwargs):
+    """
+    This calls :func:`~yt.convenience.all_pfs` and then for each parameter file
+    creates a :class:`~yt.lagos.AMRSphereBase` for each one,
+    centered on the point of highest density, with radius *width* in units of
+    *unit*.
+    """
     for pf in all_pfs(**kwargs):
         v, c = pf.h.find_max("Density")
         yield pf.h.sphere(c, width/pf[unit])
 
 def load(*args ,**kwargs):
+    """
+    This function attempts to determine the base data type of a filename or
+    other set of arguments by calling
+    :meth:`yt.lagos.StaticOutput._is_valid` until it finds a
+    match, at which point it returns an instance of the appropriate
+    :class:`yt.lagos.StaticOutput` subclass.
+    """
     candidates = []
     for n, c in output_type_registry.items():
         if n is None: continue

Modified: branches/yt-1.5/yt/funcs.py
==============================================================================
--- branches/yt-1.5/yt/funcs.py	(original)
+++ branches/yt-1.5/yt/funcs.py	Mon Oct  5 10:58:27 2009
@@ -23,46 +23,17 @@
   along with this program.  If not, see <http://www.gnu.org/licenses/>.
 """
 
-import time, types, signal, inspect, traceback, sys, pdb, rpdb
-import warnings, os
+import time, types, signal, inspect, traceback, sys, pdb, rpdb, os
+import warnings
 import progressbar as pb
 from math import floor, ceil
 from yt.logger import ytLogger as mylog
 
-def signal_print_traceback(signo, frame):
-    print traceback.print_stack(frame)
-
-def signal_problem(signo, frame):
-    raise RuntimeError()
-
-try:
-    signal.signal(signal.SIGUSR1, signal_print_traceback)
-    mylog.debug("SIGUSR1 registered for traceback printing")
-    signal.signal(signal.SIGUSR2, signal_problem)
-    mylog.debug("SIGUSR2 registered for RuntimeError")
-except ValueError:  # Not in main thread
-    pass
-
-def paste_traceback(exc_type, exc, tb):
-    sys.__excepthook__(exc_type, exc, tb)
-    import xmlrpclib, cStringIO
-    p = xmlrpclib.ServerProxy(
-            "http://paste.enzotools.org/xmlrpc/",
-            allow_none=True)
-    s = cStringIO.StringIO()
-    traceback.print_exception(exc_type, exc, tb, file=s)
-    s = s.getvalue()
-    ret = p.pastes.newPaste('pytb', s, None, '', '', True)
-    print
-    print "Traceback pasted to http://paste.enzotools.org/show/%s" % (ret)
-    print
-
-if "--paste" in sys.argv:
-    sys.excepthook = paste_traceback
-if "--rpdb" in sys.argv:
-    sys.excepthook = rpdb.rpdb_excepthook
-    del sys.argv[sys.argv.index("--rpdb")]
+# Some compatibility functions.  In the long run, these *should* disappear as
+# we move toward newer python versions.  Most were implemented to get things
+# running on DataStar.
 
+# If we're running on python2.4, we need a 'wraps' function
 def blank_wrapper(f):
     return lambda a: a
 
@@ -71,25 +42,79 @@
 except ImportError:
     wraps = blank_wrapper
 
+# We need to ensure that we have a defaultdict implementation
+
+class __defaultdict(dict):
+    def __init__(self, func):
+        self.__func = func
+        dict.__init__(self)
+    def __getitem__(self, key):
+        if not self.has_key(key):
+            self.__setitem__(key, self.__func())
+        return dict.__getitem__(self, key)
+
+try:
+    from collections import defaultdict
+except ImportError:
+    defaultdict = __defaultdict
+
+# Some functions for handling sequences and other types
+
 def iterable(obj):
     """
-    Grabbed from Python Cookbook / matploblib.cbook
+    Grabbed from Python Cookbook / matploblib.cbook.  Returns true/false for
+    *obj* iterable.
     """
     try: len(obj)
     except: return False
     return True
 
 def ensure_list(obj):
+    """
+    This function ensures that *obj* is a list.  Typically used to convert a
+    string to a list, for instance ensuring the *fields* as an argument is a
+    list.
+    """
     if obj == None:
         return [obj]
     if not isinstance(obj, types.ListType):
         return [obj]
     return obj
 
+def just_one(obj):
+    # If we have an iterable, sometimes we only want one item
+    if hasattr(obj,'flat'):
+        return obj.flat[0]
+    elif iterable(obj):
+        return obj[0]
+    return obj
+
+# Taken from
+# http://www.goldb.org/goldblog/2008/02/06/PythonConvertSecsIntoHumanReadableTimeStringHHMMSS.aspx
+def humanize_time(secs):
+    """
+    Takes *secs* and returns a nicely formatted string
+    """
+    mins, secs = divmod(secs, 60)
+    hours, mins = divmod(mins, 60)
+    return '%02d:%02d:%02d' % (hours, mins, secs)
+
+#
+# Some function wrappers that come in handy once in a while
+#
+
 def time_execution(func):
     """
     Decorator for seeing how long a given function takes, depending on whether
     or not the global 'yt.timefunctions' config parameter is set.
+
+    This can be used like so:
+
+    .. code-block:: python
+
+       @time_execution
+    def some_longrunning_function(...):
+
     """
     @wraps(func)
     def wrapper(*arg, **kw):
@@ -104,7 +129,78 @@
     else:
         return func
 
+def print_tb(func):
+    """
+    This function is used as a decorate on a function to have the calling stack
+    printed whenever that function is entered.
+
+    This can be used like so:
+
+    .. code-block:: python
+
+       @print_tb
+       def some_deeply_nested_function(...):
+
+    """
+    @wraps(func)
+    def run_func(*args, **kwargs):
+        traceback.print_stack()
+        return func(*args, **kwargs)
+    return run_func
+
+def rootonly(func):
+    """
+    This is a decorator that, when used, will only call the function on the
+    root processor and then broadcast the results of the function to all other
+    processors.
+
+    This can be used like so:
+
+    .. code-block:: python
+
+       @rootonly
+       def some_root_only_function(...):
+
+    """
+    @wraps(func)
+    def donothing(*args, **kwargs):
+        return
+    from yt.config import ytcfg
+    if ytcfg.getint("yt","__parallel_rank") > 0: return donothing
+    return func
+
+def deprecate(func):
+    """
+    This decorator issues a deprecation warning.
+
+    This can be used like so:
+
+    .. code-block:: python
+
+       @rootonly
+       def some_really_old_function(...):
+
+    """
+    @wraps(func)
+    def run_func(*args, **kwargs):
+        warnings.warn("%s has been deprecated and may be removed without notice!" \
+                % func.func_name, DeprecationWarning, stacklevel=2)
+        func(*args, **kwargs)
+    return run_func
+
 def pdb_run(func):
+    """
+    This decorator inserts a pdb session on top of the call-stack into a
+    function.
+
+    This can be used like so:
+
+    .. code-block:: python
+
+       @rootonly
+       def some_function_to_debug(...):
+
+    """
     @wraps(func)
     def wrapper(*args, **kw):
         pdb.runcall(func, *args, **kw)
@@ -121,6 +217,13 @@
 """
 
 def insert_ipython(num_up=1):
+    """
+    Placed inside a function, this will insert an IPython interpreter at that
+    current location.  This will enabled detailed inspection of the current
+    exeuction environment, as well as (optional) modification of that environment.
+    *num_up* refers to how many frames of the stack get stripped off, and
+    defaults to 1 so that this function itself is stripped off.
+    """
     from IPython.Shell import IPShellEmbed
     stack = inspect.stack()
     frame = inspect.stack()[num_up]
@@ -133,15 +236,33 @@
             local_ns = loc, global_ns = glo)
     del ipshell
 
-class DummyProgressBar:
+
+#
+# Our progress bar types and how to get one
+#
+
+class DummyProgressBar(object):
+    # This progressbar gets handed if we don't
+    # want ANY output
     def __init__(self, *args, **kwargs):
         return
     def update(self, *args, **kwargs):
         return
-    def finish(sefl, *args, **kwargs):
+    def finish(self, *args, **kwargs):
+        return
+
+class ParallelProgressBar(object):
+    # This is just a simple progress bar
+    # that prints on start/stop
+    def __init__(self, title, maxval):
+        self.title = title
+        mylog.info("Starting '%s'", title)
+    def update(self, *args, **kwargs):
         return
+    def finish(self):
+        mylog.info("Finishing '%s'", self.title)
 
-class GUIProgressBar:
+class GUIProgressBar(object):
     def __init__(self, title, maxval):
         import wx
         self.maxval = maxval
@@ -158,14 +279,11 @@
     def finish(self):
         self._pbar.Destroy()
 
-def just_one(obj):
-    if hasattr(obj,'flat'):
-        return obj.flat[0]
-    elif iterable(obj):
-        return obj[0]
-    return obj
-
 def get_pbar(title, maxval):
+    """
+    This returns a progressbar of the most appropriate type, given a *title*
+    and a *maxval*.
+    """
     from yt.config import ytcfg
     if ytcfg.getboolean("yt","inGui"):
         if maxval > ytcfg.getint("reason","minpbar"): # Arbitrary number
@@ -174,6 +292,8 @@
             return DummyProgressBar()
     elif ytcfg.getboolean("yt","suppressStreamLogging"):
         return DummyProgressBar()
+    elif ytcfg.getboolean("yt", "__parallel"):
+        return ParallelProgressBar(title, maxval)
     elif "SAGE_ROOT" in os.environ:
         try:
             from sage.server.support import EMBEDDED_MODE
@@ -188,54 +308,65 @@
                           maxval=maxval).start()
     return pbar
 
-# Taken from
-# http://www.goldb.org/goldblog/2008/02/06/PythonConvertSecsIntoHumanReadableTimeStringHHMMSS.aspx
-def humanize_time(secs):
-    mins, secs = divmod(secs, 60)
-    hours, mins = divmod(mins, 60)
-    return '%02d:%02d:%02d' % (hours, mins, secs)
-
-class __defaultdict(dict):
-    def __init__(self, func):
-        self.__func = func
-        dict.__init__(self)
-    def __getitem__(self, key):
-        if not self.has_key(key):
-            self.__setitem__(key, self.__func())
-        return dict.__getitem__(self, key)
-
-import traceback
-def print_tb(func):
-    @wraps(func)
-    def run_func(*args, **kwargs):
-        traceback.print_stack()
-        return func(*args, **kwargs)
-    return run_func
-
-try:
-    from collections import defaultdict
-except ImportError:
-    defaultdict = __defaultdict
-
-def rootonly(func):
-    @wraps(func)
-    def donothing(*args, **kwargs):
-        return
-    from yt.config import ytcfg
-    if ytcfg.getint("yt","__parallel_rank") > 0: return donothing
-    return func
-
 def only_on_root(func, *args, **kwargs):
+    """
+    This function accepts a *func*, a set of *args* and *kwargs* and then only
+    on the root processor calls the function.  All other processors get "None"
+    handed back.
+    """
     from yt.config import ytcfg
     if not ytcfg.getboolean("yt","__parallel"):
         return func(*args,**kwargs)
     if ytcfg.getint("yt","__parallel_rank") > 0: return
     return func(*args, **kwargs)
 
-def deprecate(func):
-    @wraps(func)
-    def run_func(*args, **kwargs):
-        warnings.warn("%s has been deprecated and may be removed without notice!" \
-                % func.func_name, DeprecationWarning, stacklevel=2)
-        func(*args, **kwargs)
-    return run_func
+#
+# Our signal and traceback handling functions
+#
+
+def signal_print_traceback(signo, frame):
+    print traceback.print_stack(frame)
+
+def signal_problem(signo, frame):
+    raise RuntimeError()
+
+# We use two signals, SIGUSR1 and SIGUSR2.  In a non-threaded environment,
+# we set up handlers to process these by printing the current stack and to
+# raise a RuntimeError.  The latter can be used, inside pdb, to catch an error
+# and then examine the current stack.
+try:
+    signal.signal(signal.SIGUSR1, signal_print_traceback)
+    mylog.debug("SIGUSR1 registered for traceback printing")
+    signal.signal(signal.SIGUSR2, signal_problem)
+    mylog.debug("SIGUSR2 registered for RuntimeError")
+except ValueError:  # Not in main thread
+    pass
+
+def paste_traceback(exc_type, exc, tb):
+    """
+    This is a traceback handler that knows how to paste to the pastebin.
+    Should only be used in sys.excepthook.
+    """
+    sys.__excepthook__(exc_type, exc, tb)
+    import xmlrpclib, cStringIO
+    p = xmlrpclib.ServerProxy(
+            "http://paste.enzotools.org/xmlrpc/",
+            allow_none=True)
+    s = cStringIO.StringIO()
+    traceback.print_exception(exc_type, exc, tb, file=s)
+    s = s.getvalue()
+    ret = p.pastes.newPaste('pytb', s, None, '', '', True)
+    print
+    print "Traceback pasted to http://paste.enzotools.org/show/%s" % (ret)
+    print
+
+# If we recognize one of the arguments on the command line as indicating a
+# different mechanism for handling tracebacks, we attach one of those handlers
+# and remove the argument from sys.argv.
+if "--paste" in sys.argv:
+    sys.excepthook = paste_traceback
+    del sys.argv[sys.argv.index("--paste")]
+if "--rpdb" in sys.argv:
+    sys.excepthook = rpdb.rpdb_excepthook
+    del sys.argv[sys.argv.index("--rpdb")]
+

Modified: branches/yt-1.5/yt/lagos/ParallelTools.py
==============================================================================
--- branches/yt-1.5/yt/lagos/ParallelTools.py	(original)
+++ branches/yt-1.5/yt/lagos/ParallelTools.py	Mon Oct  5 10:58:27 2009
@@ -28,6 +28,7 @@
 import yt.logger, logging
 import itertools, sys, cStringIO
 
+# At import time, we determined whether or not we're being run in parallel.
 if os.path.basename(sys.executable) in \
         ["mpi4py", "embed_enzo",
          "python"+sys.version[:3]+"-mpi"] \
@@ -53,10 +54,17 @@
         f = logging.Formatter("P%03i %s" % (MPI.COMM_WORLD.rank,
                                             yt.logger.fstring))
         yt.logger.rootLogger.handlers[0].setFormatter(f)
+    if ytcfg.getint("yt","LogLevel") < 20:
+        yt.logger.ytLogger.warning(
+          "Log Level is set low -- this could affect parallel performance!")
 else:
     parallel_capable = False
 
 class ObjectIterator(object):
+    """
+    This is a generalized class that accepts a list of objects and then
+    attempts to intelligently iterate over them.
+    """
     def __init__(self, pobj, just_list = False, attr='_grids'):
         self.pobj = pobj
         if hasattr(pobj, attr) and getattr(pobj, attr) is not None:
@@ -81,8 +89,8 @@
         
 class ParallelObjectIterator(ObjectIterator):
     """
-    This takes an object, pobj, that implements ParallelAnalysisInterface,
-    and then does its thing.
+    This takes an object, *pobj*, that implements ParallelAnalysisInterface,
+    and then does its thing, calling initliaze and finalize on the object.
     """
     def __init__(self, pobj, just_list = False, attr='_grids',
                  round_robin=False):
@@ -106,6 +114,12 @@
         if not self.just_list: self.pobj._finalize_parallel()
 
 def parallel_simple_proxy(func):
+    """
+    This is a decorator that broadcasts the result of computation on a single
+    processor to all other processors.  To do so, it uses the _processing and
+    _distributed flags in the object to check for blocks.  Meant only to be
+    used on objects that subclass :class:`~yt.lagos.ParallelAnalysisInterface`.
+    """
     if not parallel_capable: return func
     @wraps(func)
     def single_proc_results(self, *args, **kwargs):
@@ -122,8 +136,11 @@
     return single_proc_results
 
 class ParallelDummy(type):
-    # All attributes that don't start with _ get replaced with
-    # parallel_simple_proxy attributes.
+    """
+    This is a base class that, on instantiation, replaces all attributes that
+    don't start with ``_`` with :func:`~yt.lagos.parallel_simple_proxy`-wrapped
+    attributes.  Used as a metaclass.
+    """
     def __init__(cls, name, bases, d):
         super(ParallelDummy, cls).__init__(name, bases, d)
         skip = d.pop("dont_wrap", [])
@@ -136,6 +153,10 @@
                 setattr(cls, attrname, parallel_simple_proxy(attr))
 
 def parallel_passthrough(func):
+    """
+    If we are not run in parallel, this function passes the input back as
+    output; otherwise, the function gets called.  Used as a decorator.
+    """
     @wraps(func)
     def passage(self, data):
         if not self._distributed: return data
@@ -143,6 +164,9 @@
     return passage
 
 def parallel_blocking_call(func):
+    """
+    This decorator blocks on entry and exit of a function.
+    """
     @wraps(func)
     def barrierize(*args, **kwargs):
         mylog.debug("Entering barrier before %s", func.func_name)
@@ -157,6 +181,10 @@
         return func
 
 def parallel_splitter(f1, f2):
+    """
+    This function returns either the function *f1* or *f2* depending on whether
+    or not we're the root processor.  Mainly used in class definitions.
+    """
     @wraps(f1)
     def in_order(*args, **kwargs):
         MPI.COMM_WORLD.Barrier()
@@ -170,6 +198,10 @@
     return in_order
 
 def parallel_root_only(func):
+    """
+    This decorator blocks and calls the function on the root processor,
+    but does not broadcast results to the other processors.
+    """
     @wraps(func)
     def root_only(*args, **kwargs):
         if MPI.COMM_WORLD.rank == 0:
@@ -188,6 +220,10 @@
     return func
 
 class ParallelAnalysisInterface(object):
+    """
+    This is an interface specification providing several useful utility
+    functions for analyzing something in parallel.
+    """
     _grids = None
     _distributed = parallel_capable
 
@@ -326,7 +362,9 @@
         # First we receive, then we make a new list.
         for i in range(1,MPI.COMM_WORLD.size):
             buf = _recv_array(source=i, tag=0)
-            if buf is not None: data = na.concatenate([data, buf])
+            if buf is not None:
+                if data is None: data = buf
+                else: data = na.concatenate([data, buf])
         return data
 
     @parallel_passthrough

Modified: trunk/yt/config.py
==============================================================================
--- trunk/yt/config.py	(original)
+++ trunk/yt/config.py	Mon Oct  5 10:58:27 2009
@@ -89,6 +89,10 @@
                 if not self.has_option(section, opt):
                     self.set(section, opt, val)
     def set(self, section, opt, val):
+        """
+        This sets an option named *opt* to *val* inside *section*, creating
+        *section* if necessary.
+        """
         if not self.has_section(section):
             self.add_section(section)
         ConfigParser.ConfigParser.set(self, section, opt, val)

Modified: trunk/yt/convenience.py
==============================================================================
--- trunk/yt/convenience.py	(original)
+++ trunk/yt/convenience.py	Mon Oct  5 10:58:27 2009
@@ -36,6 +36,12 @@
 from yt.fido import output_type_registry
 
 def all_pfs(max_depth=1, name_spec="*.hierarchy", **kwargs):
+    """
+    This function searchs a directory and its sub-directories, up to a depth of
+    *max_depth*, for parameter files.  It looks for the *name_spec* and then
+    instantiates an EnzoStaticOutput from each.  All subsequent *kwargs* are
+    passed on to the EnzoStaticOutput constructor.
+    """
     list_of_names = []
     for i in range(max_depth):
         bb = list('*' * i) + [name_spec]
@@ -45,11 +51,24 @@
         yield lagos.EnzoStaticOutput(fn[:-10], **kwargs)
 
 def max_spheres(width, unit, **kwargs):
+    """
+    This calls :func:`~yt.convenience.all_pfs` and then for each parameter file
+    creates a :class:`~yt.lagos.AMRSphereBase` for each one,
+    centered on the point of highest density, with radius *width* in units of
+    *unit*.
+    """
     for pf in all_pfs(**kwargs):
         v, c = pf.h.find_max("Density")
         yield pf.h.sphere(c, width/pf[unit])
 
 def load(*args ,**kwargs):
+    """
+    This function attempts to determine the base data type of a filename or
+    other set of arguments by calling
+    :meth:`yt.lagos.StaticOutput._is_valid` until it finds a
+    match, at which point it returns an instance of the appropriate
+    :class:`yt.lagos.StaticOutput` subclass.
+    """
     candidates = []
     for n, c in output_type_registry.items():
         if n is None: continue

Modified: trunk/yt/funcs.py
==============================================================================
--- trunk/yt/funcs.py	(original)
+++ trunk/yt/funcs.py	Mon Oct  5 10:58:27 2009
@@ -62,14 +62,19 @@
 
 def iterable(obj):
     """
-    Grabbed from Python Cookbook / matploblib.cbook
+    Grabbed from Python Cookbook / matploblib.cbook.  Returns true/false for
+    *obj* iterable.
     """
     try: len(obj)
     except: return False
     return True
 
 def ensure_list(obj):
-    # This makes sure that we have a list of items
+    """
+    This function ensures that *obj* is a list.  Typically used to convert a
+    string to a list, for instance ensuring the *fields* as an argument is a
+    list.
+    """
     if obj == None:
         return [obj]
     if not isinstance(obj, types.ListType):
@@ -105,7 +110,9 @@
 
     This can be used like so:
 
-    @time_execution
+    .. code-block:: python
+
+       @time_execution
     def some_longrunning_function(...):
 
     """
@@ -123,6 +130,18 @@
         return func
 
 def print_tb(func):
+    """
+    This function is used as a decorate on a function to have the calling stack
+    printed whenever that function is entered.
+
+    This can be used like so:
+
+    .. code-block:: python
+
+       @print_tb
+       def some_deeply_nested_function(...):
+
+    """
     @wraps(func)
     def run_func(*args, **kwargs):
         traceback.print_stack()
@@ -130,6 +149,19 @@
     return run_func
 
 def rootonly(func):
+    """
+    This is a decorator that, when used, will only call the function on the
+    root processor and then broadcast the results of the function to all other
+    processors.
+
+    This can be used like so:
+
+    .. code-block:: python
+
+       @rootonly
+       def some_root_only_function(...):
+
+    """
     @wraps(func)
     def donothing(*args, **kwargs):
         return
@@ -138,6 +170,17 @@
     return func
 
 def deprecate(func):
+    """
+    This decorator issues a deprecation warning.
+
+    This can be used like so:
+
+    .. code-block:: python
+
+       @rootonly
+       def some_really_old_function(...):
+
+    """
     @wraps(func)
     def run_func(*args, **kwargs):
         warnings.warn("%s has been deprecated and may be removed without notice!" \
@@ -146,6 +189,18 @@
     return run_func
 
 def pdb_run(func):
+    """
+    This decorator inserts a pdb session on top of the call-stack into a
+    function.
+
+    This can be used like so:
+
+    .. code-block:: python
+
+       @rootonly
+       def some_function_to_debug(...):
+
+    """
     @wraps(func)
     def wrapper(*args, **kw):
         pdb.runcall(func, *args, **kw)
@@ -225,6 +280,10 @@
         self._pbar.Destroy()
 
 def get_pbar(title, maxval):
+    """
+    This returns a progressbar of the most appropriate type, given a *title*
+    and a *maxval*.
+    """
     from yt.config import ytcfg
     if ytcfg.getboolean("yt","inGui"):
         if maxval > ytcfg.getint("reason","minpbar"): # Arbitrary number
@@ -283,8 +342,11 @@
 except ValueError:  # Not in main thread
     pass
 
-# This is a traceback handler that knows how to paste to the pastebin.
 def paste_traceback(exc_type, exc, tb):
+    """
+    This is a traceback handler that knows how to paste to the pastebin.
+    Should only be used in sys.excepthook.
+    """
     sys.__excepthook__(exc_type, exc, tb)
     import xmlrpclib, cStringIO
     p = xmlrpclib.ServerProxy(

Modified: trunk/yt/lagos/BaseGridType.py
==============================================================================
--- trunk/yt/lagos/BaseGridType.py	(original)
+++ trunk/yt/lagos/BaseGridType.py	Mon Oct  5 10:58:27 2009
@@ -37,6 +37,8 @@
     _skip_add = True
     _con_args = ('id', 'filename')
 
+    filename = None
+
     def __init__(self, id, filename=None, hierarchy = None):
         self.data = {}
         self.field_parameters = {}

Modified: trunk/yt/lagos/ParallelTools.py
==============================================================================
--- trunk/yt/lagos/ParallelTools.py	(original)
+++ trunk/yt/lagos/ParallelTools.py	Mon Oct  5 10:58:27 2009
@@ -28,6 +28,7 @@
 import yt.logger, logging
 import itertools, sys, cStringIO
 
+# At import time, we determined whether or not we're being run in parallel.
 if os.path.basename(sys.executable) in \
         ["mpi4py", "embed_enzo",
          "python"+sys.version[:3]+"-mpi"] \
@@ -60,6 +61,10 @@
     parallel_capable = False
 
 class ObjectIterator(object):
+    """
+    This is a generalized class that accepts a list of objects and then
+    attempts to intelligently iterate over them.
+    """
     def __init__(self, pobj, just_list = False, attr='_grids'):
         self.pobj = pobj
         if hasattr(pobj, attr) and getattr(pobj, attr) is not None:
@@ -84,8 +89,8 @@
         
 class ParallelObjectIterator(ObjectIterator):
     """
-    This takes an object, pobj, that implements ParallelAnalysisInterface,
-    and then does its thing.
+    This takes an object, *pobj*, that implements ParallelAnalysisInterface,
+    and then does its thing, calling initliaze and finalize on the object.
     """
     def __init__(self, pobj, just_list = False, attr='_grids',
                  round_robin=False):
@@ -109,6 +114,12 @@
         if not self.just_list: self.pobj._finalize_parallel()
 
 def parallel_simple_proxy(func):
+    """
+    This is a decorator that broadcasts the result of computation on a single
+    processor to all other processors.  To do so, it uses the _processing and
+    _distributed flags in the object to check for blocks.  Meant only to be
+    used on objects that subclass :class:`~yt.lagos.ParallelAnalysisInterface`.
+    """
     if not parallel_capable: return func
     @wraps(func)
     def single_proc_results(self, *args, **kwargs):
@@ -125,8 +136,11 @@
     return single_proc_results
 
 class ParallelDummy(type):
-    # All attributes that don't start with _ get replaced with
-    # parallel_simple_proxy attributes.
+    """
+    This is a base class that, on instantiation, replaces all attributes that
+    don't start with ``_`` with :func:`~yt.lagos.parallel_simple_proxy`-wrapped
+    attributes.  Used as a metaclass.
+    """
     def __init__(cls, name, bases, d):
         super(ParallelDummy, cls).__init__(name, bases, d)
         skip = d.pop("dont_wrap", [])
@@ -139,6 +153,10 @@
                 setattr(cls, attrname, parallel_simple_proxy(attr))
 
 def parallel_passthrough(func):
+    """
+    If we are not run in parallel, this function passes the input back as
+    output; otherwise, the function gets called.  Used as a decorator.
+    """
     @wraps(func)
     def passage(self, data):
         if not self._distributed: return data
@@ -146,6 +164,9 @@
     return passage
 
 def parallel_blocking_call(func):
+    """
+    This decorator blocks on entry and exit of a function.
+    """
     @wraps(func)
     def barrierize(*args, **kwargs):
         mylog.debug("Entering barrier before %s", func.func_name)
@@ -160,6 +181,10 @@
         return func
 
 def parallel_splitter(f1, f2):
+    """
+    This function returns either the function *f1* or *f2* depending on whether
+    or not we're the root processor.  Mainly used in class definitions.
+    """
     @wraps(f1)
     def in_order(*args, **kwargs):
         MPI.COMM_WORLD.Barrier()
@@ -173,6 +198,10 @@
     return in_order
 
 def parallel_root_only(func):
+    """
+    This decorator blocks and calls the function on the root processor,
+    but does not broadcast results to the other processors.
+    """
     @wraps(func)
     def root_only(*args, **kwargs):
         if MPI.COMM_WORLD.rank == 0:
@@ -191,6 +220,10 @@
     return func
 
 class ParallelAnalysisInterface(object):
+    """
+    This is an interface specification providing several useful utility
+    functions for analyzing something in parallel.
+    """
     _grids = None
     _distributed = parallel_capable
 



More information about the yt-svn mailing list