[Yt-svn] commit/yt: 43 new changesets

Bitbucket commits-noreply at bitbucket.org
Sat Oct 15 08:45:21 PDT 2011


43 new changesets in yt:

http://bitbucket.org/yt_analysis/yt/changeset/1f1410e5df8c/
changeset:   1f1410e5df8c
branch:      deliberate_fields
user:        MatthewTurk
date:        2011-05-24 22:02:09
summary:     Initial import of the new field info containers.

This removes the idea of "Borg" field info containers, which ultimately may
need to reappear, in favor of single instances of classes.  Re-importing may
wipe out changes, but I will address that as necessary.  The new behavior is to
create field info containers specific to each PF, with fallbacks.  The fallback
mechanism is designed to make it clearer to whom each field belongs.
Additionally, I have added the behavior that any fields found in the file will
replace the fields found in FieldInfo.  However, these fields will also remove
the units specification and unless they are explicitly converted via conversion
functions found in the file, their unit conversions will be 1.0.

Right now only Enzo works, so "yt.mods" fails but importing from the enzo
frontend API works.
affected #:  4 files (-1 bytes)

--- a/yt/data_objects/field_info_container.py	Mon May 23 23:25:53 2011 -0400
+++ b/yt/data_objects/field_info_container.py	Tue May 24 16:02:09 2011 -0400
@@ -33,34 +33,14 @@
 
 from yt.funcs import *
 
-class FieldInfoContainer(object): # We are all Borg.
+class FieldInfoContainer(dict): # We are all Borg.
     """
     This is a generic field container.  It contains a list of potential derived
     fields, all of which know how to act on a data object and return a value.  This
     object handles converting units as well as validating the availability of a
     given field.
     """
-    _shared_state = {}
-    _universal_field_list = {}
-    def __new__(cls, *args, **kwargs):
-        self = object.__new__(cls, *args, **kwargs)
-        self.__dict__ = cls._shared_state
-        return self
-    def __getitem__(self, key):
-        if key in self._universal_field_list:
-            return self._universal_field_list[key]
-        raise KeyError
-    def keys(self):
-        """
-        Return all the field names this object knows about.
-        """
-        return self._universal_field_list.keys()
-    def __iter__(self):
-        return self._universal_field_list.iterkeys()
-    def __setitem__(self, key, val):
-        self._universal_field_list[key] = val
-    def has_key(self, key):
-        return key in self._universal_field_list
+    fallback = None
     def add_field(self, name, function = None, **kwargs):
         """
         Add a new field, along with supplemental metadata, to the list of
@@ -74,6 +54,24 @@
                 return function
             return create_function
         self[name] = DerivedField(name, function, **kwargs)
+
+    def has_key(self, key):
+        # This gets used a lot
+        if key in self: return True
+        if self.fallback is None: return False
+        return key in self.fallback
+
+    def __missing__(self, key):
+        if self.fallback is None: raise KeyError
+        return self.fallback[key]
+
+    @classmethod
+    def create_with_fallback(cls, fallback):
+        obj = cls()
+        obj.fallback = fallback
+        return obj
+
+
 FieldInfo = FieldInfoContainer()
 add_field = FieldInfo.add_field
 
@@ -86,24 +84,6 @@
         return function
     return inner_decorator
 
-class CodeFieldInfoContainer(FieldInfoContainer):
-    def __setitem__(self, key, val):
-        self._field_list[key] = val
-    def __iter__(self):
-        return itertools.chain(self._field_list.iterkeys(),
-                        self._universal_field_list.iterkeys())
-    def keys(self):
-        return set(self._field_list.keys() + self._universal_field_list.keys())
-    def has_key(self, key):
-        return key in self._universal_field_list \
-            or key in self._field_list
-    def __getitem__(self, key):
-        if key in self._field_list:
-            return self._field_list[key]
-        if key in self._universal_field_list:
-            return self._universal_field_list[key]
-        raise KeyError(key)
-
 class ValidationException(Exception):
     pass
 


--- a/yt/frontends/enzo/api.py	Mon May 23 23:25:53 2011 -0400
+++ b/yt/frontends/enzo/api.py	Tue May 24 16:02:09 2011 -0400
@@ -39,8 +39,9 @@
       EnzoStaticOutputInMemory
 
 from .fields import \
-      EnzoFieldContainer, \
       EnzoFieldInfo, \
+      Enzo2DFieldInfo, \
+      Enzo1DFieldInfo, \
       add_enzo_field, \
       add_enzo_1d_field, \
       add_enzo_2d_field


--- a/yt/frontends/enzo/data_structures.py	Mon May 23 23:25:53 2011 -0400
+++ b/yt/frontends/enzo/data_structures.py	Tue May 24 16:02:09 2011 -0400
@@ -45,13 +45,16 @@
     AMRHierarchy
 from yt.data_objects.static_output import \
     StaticOutput
+from yt.data_objects.field_info_container import \
+    FieldInfoContainer
 from yt.utilities.definitions import mpc_conversion
 from yt.utilities import hdf5_light_reader
 from yt.utilities.logger import ytLogger as mylog
 
 from .definitions import parameterDict
-from .fields import EnzoFieldContainer, Enzo1DFieldContainer, \
-    Enzo2DFieldContainer, add_enzo_field
+from .fields import \
+    EnzoFieldInfo, Enzo2DFieldInfo, Enzo1DFieldInfo, \
+    add_enzo_field, add_enzo_2d_field, add_enzo_1d_field
 
 from yt.utilities.parallel_tools.parallel_analysis_interface import \
     parallel_blocking_call
@@ -406,7 +409,13 @@
 
     def _setup_unknown_fields(self):
         for field in self.field_list:
-            if field in self.parameter_file.field_info: continue
+            if field in self.parameter_file.field_info:
+                ff = self.parameter_file.field_info[field]
+                if "lambda" in str(ff._function): continue
+                # By allowing a backup, we don't mandate that it's found in our
+                # current field info.  This means we'll instead simply override
+                # it.
+                self.parameter_file.field_info.pop(field, None)
             mylog.info("Adding %s to list of fields", field)
             cf = None
             if self.parameter_file.has_key(field):
@@ -628,7 +637,7 @@
     Enzo-specific output, set at a fixed time.
     """
     _hierarchy_class = EnzoHierarchy
-    _fieldinfo_class = EnzoFieldContainer
+    _fieldinfo_fallback = EnzoFieldInfo
     def __init__(self, filename, data_style=None,
                  parameter_override = None,
                  conversion_override = None,
@@ -670,11 +679,12 @@
         if self["TopGridRank"] == 1: self._setup_1d()
         elif self["TopGridRank"] == 2: self._setup_2d()
 
-        self.field_info = self._fieldinfo_class()
+        self.field_info = FieldInfoContainer.create_with_fallback(
+                            self._fieldinfo_fallback)
 
     def _setup_1d(self):
         self._hierarchy_class = EnzoHierarchy1D
-        self._fieldinfo_class = Enzo1DFieldContainer
+        self._fieldinfo_fallback = Enzo1DFieldInfo
         self.domain_left_edge = \
             na.concatenate([self["DomainLeftEdge"], [0.0, 0.0]])
         self.domain_right_edge = \
@@ -682,7 +692,7 @@
 
     def _setup_2d(self):
         self._hierarchy_class = EnzoHierarchy2D
-        self._fieldinfo_class = Enzo2DFieldContainer
+        self._fieldinfo_fallback = Enzo2DFieldInfo
         self.domain_left_edge = \
             na.concatenate([self["DomainLeftEdge"], [0.0]])
         self.domain_right_edge = \
@@ -908,7 +918,8 @@
 
         StaticOutput.__init__(self, "InMemoryParameterFile", self._data_style)
 
-        self.field_info = self._fieldinfo_class()
+        self.field_info = FieldInfoContainer.create_with_fallback(
+                            self._fieldinfo_fallback)
 
     def _parse_parameter_file(self):
         enzo = self._obtain_enzo()


--- a/yt/frontends/enzo/fields.py	Mon May 23 23:25:53 2011 -0400
+++ b/yt/frontends/enzo/fields.py	Tue May 24 16:02:09 2011 -0400
@@ -26,7 +26,8 @@
 import numpy as na
 
 from yt.data_objects.field_info_container import \
-    CodeFieldInfoContainer, \
+    FieldInfoContainer, \
+    FieldInfo, \
     ValidateParameter, \
     ValidateDataField, \
     ValidateProperty, \
@@ -37,13 +38,7 @@
     mh
 import yt.utilities.amr_utils as amr_utils
 
-class EnzoFieldContainer(CodeFieldInfoContainer):
-    """
-    This is a container for Enzo-specific fields.
-    """
-    _shared_state = {}
-    _field_list = {}
-EnzoFieldInfo = EnzoFieldContainer()
+EnzoFieldInfo = FieldInfoContainer.create_with_fallback(FieldInfo)
 add_enzo_field = EnzoFieldInfo.add_field
 
 add_field = add_enzo_field
@@ -401,12 +396,7 @@
 # Now we do overrides for 2D fields
 #
 
-class Enzo2DFieldContainer(CodeFieldInfoContainer):
-    _shared_state = {}
-    _field_list = EnzoFieldContainer._field_list.copy()
-# We make a copy of the dict from the other, so we
-# can now update it...
-Enzo2DFieldInfo = Enzo2DFieldContainer()
+Enzo2DFieldInfo = FieldInfoContainer.create_with_fallback(EnzoFieldInfo)
 add_enzo_2d_field = Enzo2DFieldInfo.add_field
 
 def _CellArea(field, data):
@@ -444,12 +434,7 @@
 # Now we do overrides for 1D fields
 #
 
-class Enzo1DFieldContainer(CodeFieldInfoContainer):
-    _shared_state = {}
-    _field_list = EnzoFieldContainer._field_list.copy()
-# We make a copy of the dict from the other, so we
-# can now update it...
-Enzo1DFieldInfo = Enzo1DFieldContainer()
+Enzo1DFieldInfo = FieldInfoContainer.create_with_fallback(EnzoFieldInfo)
 add_enzo_1d_field = Enzo1DFieldInfo.add_field
 
 def _CellLength(field, data):


http://bitbucket.org/yt_analysis/yt/changeset/15f8f2d9bc1b/
changeset:   15f8f2d9bc1b
branch:      deliberate_fields
user:        MatthewTurk
date:        2011-05-26 03:18:42
summary:     Updated all the frontends to use the new field types.  Fixed a weird bug with
looking for the key in a fallback field container.
affected #:  32 files (-1 bytes)

--- a/yt/data_objects/api.py	Tue May 24 16:02:09 2011 -0400
+++ b/yt/data_objects/api.py	Wed May 25 21:18:42 2011 -0400
@@ -68,7 +68,6 @@
 from field_info_container import \
     FieldInfoContainer, \
     FieldInfo, \
-    CodeFieldInfoContainer, \
     NeedsGridType, \
     NeedsOriginalGrid, \
     NeedsDataField, \


--- a/yt/data_objects/field_info_container.py	Tue May 24 16:02:09 2011 -0400
+++ b/yt/data_objects/field_info_container.py	Wed May 25 21:18:42 2011 -0400
@@ -59,7 +59,7 @@
         # This gets used a lot
         if key in self: return True
         if self.fallback is None: return False
-        return key in self.fallback
+        return self.fallback.has_key(key)
 
     def __missing__(self, key):
         if self.fallback is None: raise KeyError


--- a/yt/frontends/art/api.py	Tue May 24 16:02:09 2011 -0400
+++ b/yt/frontends/art/api.py	Wed May 25 21:18:42 2011 -0400
@@ -34,7 +34,6 @@
       ARTStaticOutput
 
 from .fields import \
-      ARTFieldContainer, \
       ARTFieldInfo, \
       add_art_field
 


--- a/yt/frontends/art/data_structures.py	Tue May 24 16:02:09 2011 -0400
+++ b/yt/frontends/art/data_structures.py	Wed May 25 21:18:42 2011 -0400
@@ -37,8 +37,10 @@
       AMRHierarchy
 from yt.data_objects.static_output import \
       StaticOutput
-from .fields import ARTFieldContainer
-from .fields import add_field
+from yt.data_objects.field_info_container import \
+    FieldInfoContainer
+from .fields import ARTFieldInfo, add_art_field
+from .fields import add_art_field
 from yt.utilities.definitions import \
     mpc_conversion
 from yt.utilities.io_handler import \
@@ -353,7 +355,7 @@
                         return data.convert(f)
                     return _convert_function
                 cf = external_wrapper(field)
-            add_field(field, lambda a, b: None,
+            add_art_field(field, lambda a, b: None,
                       convert_function=cf, take_log=False)
 
     def _setup_derived_fields(self):
@@ -368,7 +370,7 @@
 
 class ARTStaticOutput(StaticOutput):
     _hierarchy_class = ARTHierarchy
-    _fieldinfo_class = ARTFieldContainer
+    _fieldinfo_fallback = ARTFieldInfo
     _handle = None
     
     def __init__(self, filename, data_style='art',
@@ -376,7 +378,8 @@
         StaticOutput.__init__(self, filename, data_style)
         self.storage_filename = storage_filename
         
-        self.field_info = self._fieldinfo_class()
+        self.field_info = FieldInfoContainer.create_with_fallback(
+                            self._fieldinfo_fallback)
         self.dimensionality = 3
         self.refine_by = 2
         self.parameters["HydroMethod"] = 'art'


--- a/yt/frontends/art/fields.py	Tue May 24 16:02:09 2011 -0400
+++ b/yt/frontends/art/fields.py	Wed May 25 21:18:42 2011 -0400
@@ -24,7 +24,8 @@
 """
 
 from yt.data_objects.field_info_container import \
-    CodeFieldInfoContainer, \
+    FieldInfoContainer, \
+    FieldInfo, \
     ValidateParameter, \
     ValidateDataField, \
     ValidateProperty, \
@@ -34,12 +35,7 @@
 from yt.utilities.physical_constants import \
     boltzmann_constant_cgs, mass_hydrogen_cgs
 
-import pdb
-
-class ARTFieldContainer(CodeFieldInfoContainer):
-    _shared_state = {}
-    _field_list = {}
-ARTFieldInfo = ARTFieldContainer()
+ARTFieldInfo = FieldInfoContainer.create_with_fallback(FieldInfo)
 add_art_field = ARTFieldInfo.add_field
 
 add_field = add_art_field


--- a/yt/frontends/castro/api.py	Tue May 24 16:02:09 2011 -0400
+++ b/yt/frontends/castro/api.py	Wed May 25 21:18:42 2011 -0400
@@ -34,7 +34,6 @@
       CastroStaticOutput
 
 from .fields import \
-      CastroFieldContainer, \
       CastroFieldInfo, \
       add_castro_field
 


--- a/yt/frontends/castro/data_structures.py	Tue May 24 16:02:09 2011 -0400
+++ b/yt/frontends/castro/data_structures.py	Wed May 25 21:18:42 2011 -0400
@@ -55,9 +55,11 @@
     castro_particle_field_names, \
     boxlib_bool_to_int
 
+from yt.data_objects.field_info_container import \
+    FieldInfoContainer
 from .fields import \
-    CastroFieldContainer, \
-    add_field
+    CastroFieldInfo, \
+    add_castro_field
 
 
 class CastroGrid(AMRGridPatch):
@@ -421,10 +423,10 @@
                         return data.convert(f)
                     return _convert_function
                 cf = external_wrapper(field)
-                # Note that we call add_field on the field_info directly.  This
+                # Note that we call add_castro_field on the field_info directly.  This
                 # will allow the same field detection mechanism to work for 1D, 2D
                 # and 3D fields.
-                self.pf.field_info.add_field(
+                self.pf.field_info.add_castro_field(
                         field, lambda a, b: None,
                         convert_function=cf, take_log=False,
                         particle_type=True)
@@ -460,7 +462,7 @@
                         return data.convert(f)
                     return _convert_function
                 cf = external_wrapper(field)
-            add_field(field, lambda a, b: None,
+            add_castro_field(field, lambda a, b: None,
                       convert_function=cf, take_log=False)
 
 
@@ -489,7 +491,7 @@
     *filename*, without looking at the Castro hierarchy.
     """
     _hierarchy_class = CastroHierarchy
-    _fieldinfo_class = CastroFieldContainer
+    _fieldinfo_fallback = CastroFieldInfo
 
     def __init__(self, plotname, paramFilename=None, fparamFilename=None,
                  data_style='castro_native', paranoia=False,
@@ -515,7 +517,8 @@
 
         StaticOutput.__init__(self, plotname.rstrip("/"),
                               data_style='castro_native')
-        self.field_info = self._fieldinfo_class()
+        self.field_info = FieldInfoContainer.create_with_fallback(
+                            self._fieldinfo_fallback)
 
         # These should maybe not be hardcoded?
         self.parameters["HydroMethod"] = 'castro' # always PPM DE


--- a/yt/frontends/castro/fields.py	Tue May 24 16:02:09 2011 -0400
+++ b/yt/frontends/castro/fields.py	Wed May 25 21:18:42 2011 -0400
@@ -25,7 +25,8 @@
 from yt.utilities.physical_constants import \
     mh, kboltz
 from yt.data_objects.field_info_container import \
-    CodeFieldInfoContainer, \
+    FieldInfoContainer, \
+    FieldInfo, \
     ValidateParameter, \
     ValidateDataField, \
     ValidateProperty, \
@@ -33,13 +34,7 @@
     ValidateGridType
 import yt.data_objects.universal_fields
 
-class CastroFieldContainer(CodeFieldInfoContainer):
-    """
-    All Castro-specific fields are stored in here.
-    """
-    _shared_state = {}
-    _field_list = {}
-CastroFieldInfo = CastroFieldContainer()
+CastroFieldInfo = FieldInfoContainer.create_with_fallback(FieldInfo)
 add_castro_field = CastroFieldInfo.add_field
 
 


--- a/yt/frontends/chombo/api.py	Tue May 24 16:02:09 2011 -0400
+++ b/yt/frontends/chombo/api.py	Wed May 25 21:18:42 2011 -0400
@@ -34,7 +34,6 @@
       ChomboStaticOutput
 
 from .fields import \
-      ChomboFieldContainer, \
       ChomboFieldInfo, \
       add_chombo_field
 


--- a/yt/frontends/chombo/data_structures.py	Tue May 24 16:02:09 2011 -0400
+++ b/yt/frontends/chombo/data_structures.py	Wed May 25 21:18:42 2011 -0400
@@ -55,7 +55,9 @@
 from yt.utilities.parallel_tools.parallel_analysis_interface import \
      parallel_root_only
 
-from .fields import ChomboFieldContainer
+from yt.data_objects.field_info_container import \
+    FieldInfoContainer
+from .fields import ChomboFieldInfo
 
 class ChomboGrid(AMRGridPatch):
     _id_offset = 0
@@ -175,7 +177,7 @@
 
 class ChomboStaticOutput(StaticOutput):
     _hierarchy_class = ChomboHierarchy
-    _fieldinfo_class = ChomboFieldContainer
+    _fieldinfo_fallback = ChomboFieldInfo
     
     def __init__(self, filename, data_style='chombo_hdf5',
                  storage_filename = None, ini_filename = None):
@@ -184,7 +186,8 @@
         self.ini_filename = ini_filename
         StaticOutput.__init__(self,filename,data_style)
         self.storage_filename = storage_filename
-        self.field_info = self._fieldinfo_class()
+        self.field_info = FieldInfoContainer.create_with_fallback(
+                            self._fieldinfo_fallback)
         
     def _set_units(self):
         """


--- a/yt/frontends/chombo/fields.py	Tue May 24 16:02:09 2011 -0400
+++ b/yt/frontends/chombo/fields.py	Wed May 25 21:18:42 2011 -0400
@@ -24,7 +24,8 @@
 """
 
 from yt.data_objects.field_info_container import \
-    CodeFieldInfoContainer, \
+    FieldInfoContainer, \
+    FieldInfo, \
     ValidateParameter, \
     ValidateDataField, \
     ValidateProperty, \
@@ -32,10 +33,7 @@
     ValidateGridType
 import yt.data_objects.universal_fields
 
-class ChomboFieldContainer(CodeFieldInfoContainer):
-    _shared_state = {}
-    _field_list = {}
-ChomboFieldInfo = ChomboFieldContainer()
+ChomboFieldInfo = FieldInfoContainer.create_with_fallback(FieldInfo)
 add_chombo_field = ChomboFieldInfo.add_field
 
 add_field = add_chombo_field


--- a/yt/frontends/flash/api.py	Tue May 24 16:02:09 2011 -0400
+++ b/yt/frontends/flash/api.py	Wed May 25 21:18:42 2011 -0400
@@ -34,7 +34,6 @@
       FLASHStaticOutput
 
 from .fields import \
-      FLASHFieldContainer, \
       FLASHFieldInfo, \
       add_flash_field
 


--- a/yt/frontends/flash/data_structures.py	Tue May 24 16:02:09 2011 -0400
+++ b/yt/frontends/flash/data_structures.py	Wed May 25 21:18:42 2011 -0400
@@ -41,8 +41,10 @@
     io_registry
 
 from .fields import \
-    FLASHFieldContainer, \
-    add_field
+    FLASHFieldInfo, \
+    add_flash_field
+from yt.data_objects.field_info_container import \
+    FieldInfoContainer
 
 class FLASHGrid(AMRGridPatch):
     _id_offset = 1
@@ -180,7 +182,7 @@
 
 class FLASHStaticOutput(StaticOutput):
     _hierarchy_class = FLASHHierarchy
-    _fieldinfo_class = FLASHFieldContainer
+    _fieldinfo_fallback = FLASHFieldInfo
     _handle = None
     
     def __init__(self, filename, data_style='flash_hdf5',
@@ -193,7 +195,8 @@
         StaticOutput.__init__(self, filename, data_style)
         self.storage_filename = storage_filename
 
-        self.field_info = self._fieldinfo_class()
+        self.field_info = FieldInfoContainer.create_with_fallback(
+                            self._fieldinfo_fallback)
         # These should be explicitly obtained from the file, but for now that
         # will wait until a reorganization of the source tree and better
         # generalization.


--- a/yt/frontends/flash/fields.py	Tue May 24 16:02:09 2011 -0400
+++ b/yt/frontends/flash/fields.py	Wed May 25 21:18:42 2011 -0400
@@ -24,7 +24,8 @@
 """
 
 from yt.data_objects.field_info_container import \
-    CodeFieldInfoContainer, \
+    FieldInfoContainer, \
+    FieldInfo, \
     ValidateParameter, \
     ValidateDataField, \
     ValidateProperty, \
@@ -32,10 +33,7 @@
     ValidateGridType
 import yt.data_objects.universal_fields
 
-class FLASHFieldContainer(CodeFieldInfoContainer):
-    _shared_state = {}
-    _field_list = {}
-FLASHFieldInfo = FLASHFieldContainer()
+FLASHFieldInfo = FieldInfoContainer.create_with_fallback(FieldInfo)
 add_flash_field = FLASHFieldInfo.add_field
 
 add_field = add_flash_field


--- a/yt/frontends/gadget/api.py	Tue May 24 16:02:09 2011 -0400
+++ b/yt/frontends/gadget/api.py	Wed May 25 21:18:42 2011 -0400
@@ -34,7 +34,6 @@
       GadgetStaticOutput
 
 from .fields import \
-      GadgetFieldContainer, \
       GadgetFieldInfo, \
       add_gadget_field
 


--- a/yt/frontends/gadget/data_structures.py	Tue May 24 16:02:09 2011 -0400
+++ b/yt/frontends/gadget/data_structures.py	Wed May 25 21:18:42 2011 -0400
@@ -37,7 +37,9 @@
 from yt.data_objects.static_output import \
     StaticOutput
 
-from .fields import GadgetFieldContainer
+from .fields import GadgetFieldInfo
+from yt.data_objects.field_info_container import \
+    FieldInfoContainer
 
 class GadgetGrid(AMRGridPatch):
     _id_offset = 0
@@ -144,10 +146,11 @@
 
 class GadgetStaticOutput(StaticOutput):
     _hierarchy_class = GadgetHierarchy
-    _fieldinfo_class = GadgetFieldContainer
+    _fieldinfo_fallback = GadgetFieldContainer
     def __init__(self, filename,storage_filename=None) :
         self.storage_filename = storage_filename
-        self.field_info = self._fieldinfo_class()
+        self.field_info = FieldInfoContainer.create_with_fallback(
+                            self._fieldinfo_fallback)
         self.filename = filename
         
         StaticOutput.__init__(self, filename, 'gadget_infrastructure')


--- a/yt/frontends/gadget/fields.py	Tue May 24 16:02:09 2011 -0400
+++ b/yt/frontends/gadget/fields.py	Wed May 25 21:18:42 2011 -0400
@@ -27,7 +27,8 @@
 
 from yt.funcs import *
 from yt.data_objects.field_info_container import \
-    CodeFieldInfoContainer, \
+    FieldInfoContainer, \
+    FieldInfo, \
     ValidateParameter, \
     ValidateDataField, \
     ValidateProperty, \
@@ -35,10 +36,7 @@
     ValidateGridType
 import yt.data_objects.universal_fields
 
-class GadgetFieldContainer(CodeFieldInfoContainer):
-    _shared_state = {}
-    _field_list = {}
-GadgetFieldInfo = GadgetFieldContainer()
+GadgetFieldInfo = FieldInfoContainer.create_with_fallback(FieldInfo)
 add_gadget_field = GadgetFieldInfo.add_field
 
 add_field = add_gadget_field


--- a/yt/frontends/gdf/api.py	Tue May 24 16:02:09 2011 -0400
+++ b/yt/frontends/gdf/api.py	Wed May 25 21:18:42 2011 -0400
@@ -34,7 +34,6 @@
       ChomboStaticOutput
 
 from .fields import \
-      ChomboFieldContainer, \
       ChomboFieldInfo, \
       add_chombo_field
 


--- a/yt/frontends/gdf/data_structures.py	Tue May 24 16:02:09 2011 -0400
+++ b/yt/frontends/gdf/data_structures.py	Wed May 25 21:18:42 2011 -0400
@@ -33,6 +33,8 @@
            StaticOutput
 
 from .fields import GDFFieldContainer
+from yt.data_objects.field_info_container import \
+    FieldInfoContainer
 
 class GDFGrid(AMRGridPatch):
     _id_offset = 0
@@ -142,14 +144,15 @@
 
 class GDFStaticOutput(StaticOutput):
     _hierarchy_class = GDFHierarchy
-    _fieldinfo_class = GDFFieldContainer
+    _fieldinfo_fallback = GDFFieldContainer
     
     def __init__(self, filename, data_style='grid_data_format',
                  storage_filename = None):
         StaticOutput.__init__(self, filename, data_style)
         self._handle = h5py.File(self.filename, "r")
         self.storage_filename = storage_filename
-        self.field_info = self._fieldinfo_class()
+        self.field_info = FieldInfoContainer.create_with_fallback(
+                            self._fieldinfo_fallback)
         self._handle.close()
         del self._handle
         


--- a/yt/frontends/gdf/fields.py	Tue May 24 16:02:09 2011 -0400
+++ b/yt/frontends/gdf/fields.py	Wed May 25 21:18:42 2011 -0400
@@ -24,7 +24,8 @@
 """
 
 from yt.data_objects.field_info_container import \
-    CodeFieldInfoContainer, \
+    FieldInfoContainer, \
+    FieldInfo, \
     ValidateParameter, \
     ValidateDataField, \
     ValidateProperty, \
@@ -32,10 +33,7 @@
     ValidateGridType
 import yt.data_objects.universal_fields
 
-class ChomboFieldContainer(CodeFieldInfoContainer):
-    _shared_state = {}
-    _field_list = {}
-ChomboFieldInfo = ChomboFieldContainer()
+ChomboFieldInfo = FieldInfoContainer.create_with_fallback(FieldInfo)
 add_chombo_field = ChomboFieldInfo.add_field
 
 add_field = add_chombo_field


--- a/yt/frontends/maestro/api.py	Tue May 24 16:02:09 2011 -0400
+++ b/yt/frontends/maestro/api.py	Wed May 25 21:18:42 2011 -0400
@@ -36,7 +36,6 @@
       MaestroStaticOutput
 
 from .fields import \
-      MaestroFieldContainer, \
       MaestroFieldInfo, \
       add_maestro_field
 


--- a/yt/frontends/maestro/data_structures.py	Tue May 24 16:02:09 2011 -0400
+++ b/yt/frontends/maestro/data_structures.py	Wed May 25 21:18:42 2011 -0400
@@ -54,9 +54,11 @@
     yt2maestroFieldsDict, \
     maestro_FAB_header_pattern
 
+from yt.data_objects.field_info_container import \
+    FieldInfoContainer
 from .fields import \
-    MaestroFieldContainer, \
-    add_field
+    MaestroFieldInfo, \
+    add_maestro_field
 
 
 class MaestroGrid(AMRGridPatch):
@@ -118,7 +120,7 @@
 class MaestroHierarchy(AMRHierarchy):
     grid = MaestroGrid
     def __init__(self, pf, data_style='maestro'):
-        self.field_info = MaestroFieldContainer()
+        self.field_info = pf.field_info
         self.field_indexes = {}
         self.parameter_file = weakref.proxy(pf)
         header_filename = os.path.join(pf.fullplotdir,'Header')
@@ -431,7 +433,7 @@
     *filename*, without looking at the Maestro hierarchy.
     """
     _hierarchy_class = MaestroHierarchy
-    _fieldinfo_class = MaestroFieldContainer
+    _fieldinfo_fallback = MaestroFieldInfo
 
     def __init__(self, plotname, paramFilename=None, 
                  data_style='maestro', paranoia=False,
@@ -455,7 +457,8 @@
         # this is the unit of time; NOT the current time
         self.parameters["Time"] = 1 # second
 
-        self.field_info = self._fieldinfo_class()
+        self.field_info = FieldInfoContainer.create_with_fallback(
+                            self._fieldinfo_fallback)
         self._parse_header_file()
 
 


--- a/yt/frontends/maestro/fields.py	Tue May 24 16:02:09 2011 -0400
+++ b/yt/frontends/maestro/fields.py	Wed May 25 21:18:42 2011 -0400
@@ -27,7 +27,8 @@
 from yt.utilities.physical_constants import \
     mh, kboltz
 from yt.data_objects.field_info_container import \
-    CodeFieldInfoContainer, \
+    FieldInfoContainer, \
+    FieldInfo, \
     ValidateParameter, \
     ValidateDataField, \
     ValidateProperty, \
@@ -35,13 +36,7 @@
     ValidateGridType
 import yt.data_objects.universal_fields
 
-class MaestroFieldContainer(CodeFieldInfoContainer):
-    """
-    All Maestro-specific fields are stored in here.
-    """
-    _shared_state = {}
-    _field_list = {}
-MaestroFieldInfo = MaestroFieldContainer()
+MaestroFieldInfo = FieldInfoContainer.create_with_fallback(FieldInfo)
 add_maestro_field = MaestroFieldInfo.add_field
 
 


--- a/yt/frontends/orion/api.py	Tue May 24 16:02:09 2011 -0400
+++ b/yt/frontends/orion/api.py	Wed May 25 21:18:42 2011 -0400
@@ -34,7 +34,6 @@
       OrionStaticOutput
 
 from .fields import \
-      OrionFieldContainer, \
       OrionFieldInfo, \
       add_orion_field
 


--- a/yt/frontends/orion/data_structures.py	Tue May 24 16:02:09 2011 -0400
+++ b/yt/frontends/orion/data_structures.py	Wed May 25 21:18:42 2011 -0400
@@ -55,8 +55,11 @@
     orion_FAB_header_pattern
 
 from .fields import \
-    OrionFieldContainer, \
-    add_field
+    OrionFieldInfo, \
+    add_orion_field
+
+from yt.data_objects.field_info_container import \
+    FieldInfoContainer
 
 
 class OrionGrid(AMRGridPatch):
@@ -410,7 +413,7 @@
                         return data.convert(f)
                     return _convert_function
                 cf = external_wrapper(field)
-            add_field(field, lambda a, b: None,
+            add_orion_field(field, lambda a, b: None,
                       convert_function=cf, take_log=False)
 
 
@@ -439,7 +442,7 @@
     *filename*, without looking at the Orion hierarchy.
     """
     _hierarchy_class = OrionHierarchy
-    _fieldinfo_class = OrionFieldContainer
+    _fieldinfo_fallback = OrionFieldInfo
 
     def __init__(self, plotname, paramFilename=None, fparamFilename=None,
                  data_style='orion_native', paranoia=False,
@@ -465,7 +468,8 @@
 
         StaticOutput.__init__(self, plotname.rstrip("/"),
                               data_style='orion_native')
-        self.field_info = self._fieldinfo_class()
+        self.field_info = FieldInfoContainer.create_with_fallback(
+                            self._fieldinfo_fallback)
 
         # These should maybe not be hardcoded?
         self.parameters["HydroMethod"] = 'orion' # always PPM DE


--- a/yt/frontends/orion/fields.py	Tue May 24 16:02:09 2011 -0400
+++ b/yt/frontends/orion/fields.py	Wed May 25 21:18:42 2011 -0400
@@ -25,7 +25,8 @@
 from yt.utilities.physical_constants import \
     mh, kboltz
 from yt.data_objects.field_info_container import \
-    CodeFieldInfoContainer, \
+    FieldInfoContainer, \
+    FieldInfo, \
     ValidateParameter, \
     ValidateDataField, \
     ValidateProperty, \
@@ -33,13 +34,7 @@
     ValidateGridType
 import yt.data_objects.universal_fields
 
-class OrionFieldContainer(CodeFieldInfoContainer):
-    """
-    All Orion-specific fields are stored in here.
-    """
-    _shared_state = {}
-    _field_list = {}
-OrionFieldInfo = OrionFieldContainer()
+OrionFieldInfo = FieldInfoContainer.create_with_fallback(FieldInfo)
 add_orion_field = OrionFieldInfo.add_field
 
 


--- a/yt/frontends/ramses/api.py	Tue May 24 16:02:09 2011 -0400
+++ b/yt/frontends/ramses/api.py	Wed May 25 21:18:42 2011 -0400
@@ -34,7 +34,6 @@
       RAMSESStaticOutput
 
 from .fields import \
-      RAMSESFieldContainer, \
       RAMSESFieldInfo, \
       add_ramses_field
 


--- a/yt/frontends/ramses/data_structures.py	Tue May 24 16:02:09 2011 -0400
+++ b/yt/frontends/ramses/data_structures.py	Wed May 25 21:18:42 2011 -0400
@@ -35,11 +35,13 @@
 from yt.data_objects.static_output import \
       StaticOutput
 import _ramses_reader
-from .fields import RAMSESFieldContainer
+from .fields import RAMSESFieldInfo
 from yt.utilities.definitions import \
     mpc_conversion
 from yt.utilities.io_handler import \
     io_registry
+from yt.data_objects.field_info_container import \
+    FieldInfoContainer
 
 def num_deep_inc(f):
     def wrap(self, *args, **kwargs):
@@ -317,7 +319,7 @@
 
 class RAMSESStaticOutput(StaticOutput):
     _hierarchy_class = RAMSESHierarchy
-    _fieldinfo_class = RAMSESFieldContainer
+    _fieldinfo_fallback = RAMSESFieldInfo
     _handle = None
     
     def __init__(self, filename, data_style='ramses',
@@ -325,7 +327,8 @@
         StaticOutput.__init__(self, filename, data_style)
         self.storage_filename = storage_filename
 
-        self.field_info = self._fieldinfo_class()
+        self.field_info = FieldInfoContainer.create_with_fallback(
+                            self._fieldinfo_fallback)
 
     def __repr__(self):
         return self.basename.rsplit(".", 1)[0]


--- a/yt/frontends/ramses/fields.py	Tue May 24 16:02:09 2011 -0400
+++ b/yt/frontends/ramses/fields.py	Wed May 25 21:18:42 2011 -0400
@@ -24,7 +24,8 @@
 """
 
 from yt.data_objects.field_info_container import \
-    CodeFieldInfoContainer, \
+    FieldInfoContainer, \
+    FieldInfo, \
     ValidateParameter, \
     ValidateDataField, \
     ValidateProperty, \
@@ -32,10 +33,7 @@
     ValidateGridType
 import yt.data_objects.universal_fields
 
-class RAMSESFieldContainer(CodeFieldInfoContainer):
-    _shared_state = {}
-    _field_list = {}
-RAMSESFieldInfo = RAMSESFieldContainer()
+RAMSESFieldInfo = FieldInfoContainer.create_with_fallback(FieldInfo)
 add_ramses_field = RAMSESFieldInfo.add_field
 
 add_field = add_ramses_field


--- a/yt/frontends/tiger/api.py	Tue May 24 16:02:09 2011 -0400
+++ b/yt/frontends/tiger/api.py	Wed May 25 21:18:42 2011 -0400
@@ -34,7 +34,6 @@
       TigerStaticOutput
 
 from .fields import \
-      TigerFieldContainer, \
       TigerFieldInfo, \
       add_tiger_field
 


--- a/yt/frontends/tiger/data_structures.py	Tue May 24 16:02:09 2011 -0400
+++ b/yt/frontends/tiger/data_structures.py	Wed May 25 21:18:42 2011 -0400
@@ -31,7 +31,7 @@
 from yt.data_objects.static_output import \
            StaticOutput
 
-from .fields import TigerFieldContainer
+from .fields import TigerFieldInfo
 
 class TigerGrid(AMRGridPatch):
     _id_offset = 0
@@ -134,7 +134,7 @@
 
 class TigerStaticOutput(StaticOutput):
     _hierarchy_class = TigerHierarchy
-    _fieldinfo_class = TigerFieldContainer
+    _fieldinfo_fallback = TigerFieldInfo
 
     def __init__(self, rhobname, root_size, max_grid_size=128,
                  data_style='tiger', storage_filename = None):
@@ -150,7 +150,8 @@
         if not iterable(max_grid_size): max_grid_size = (max_grid_size,) * 3
         self.max_grid_size = max_grid_size
 
-        self.field_info = self._fieldinfo_class()
+        self.field_info = FieldInfoContainer.create_with_fallback(
+                            self._fieldinfo_fallback)
 
         # We assume that we have basename + "rhob" and basename + "temp"
         # to get at our various parameters.


--- a/yt/frontends/tiger/fields.py	Tue May 24 16:02:09 2011 -0400
+++ b/yt/frontends/tiger/fields.py	Wed May 25 21:18:42 2011 -0400
@@ -24,7 +24,8 @@
 """
 
 from yt.data_objects.field_info_container import \
-    CodeFieldInfoContainer, \
+    FieldInfoContainer, \
+    FieldInfo, \
     ValidateParameter, \
     ValidateDataField, \
     ValidateProperty, \
@@ -32,12 +33,6 @@
     ValidateGridType
 import yt.data_objects.universal_fields
 
-class TigerFieldContainer(CodeFieldInfoContainer):
-    """
-    This is a container for Tiger-specific fields.
-    """
-    _shared_state = {}
-    _field_list = {}
-TigerFieldInfo = TigerFieldContainer()
+TigerFieldInfo = FieldInfoContainer.create_with_fallback(FieldInfo)
 add_tiger_field = TigerFieldInfo.add_field
 


http://bitbucket.org/yt_analysis/yt/changeset/add32baac35e/
changeset:   add32baac35e
branch:      deliberate_fields
user:        MatthewTurk
date:        2011-06-04 02:08:23
summary:     Merging
affected #:  34 files (-1 bytes)

--- a/CREDITS	Wed May 25 21:18:42 2011 -0400
+++ b/CREDITS	Fri Jun 03 17:08:23 2011 -0700
@@ -1,6 +1,6 @@
 YT is a group effort.
 
-Developers:                     Matthew Turk (matthewturk at gmail.com)
+Contributors:                   Matthew Turk (matthewturk at gmail.com)
                                 Britton Smith (brittonsmith at gmail.com)
                                 Jeff Oishi (jsoishi at gmail.com)
                                 Stephen Skory (s at skory.us)
@@ -14,7 +14,10 @@
                                 Chris Malone (cmalone at mail.astro.sunysb.edu)
                                 Cameron Hummels (chummels at astro.columbia.edu)
                                 Stefan Klemer (sklemer at phys.uni-goettingen.de)
+                                Tom Abel (tabel at stanford.edu)
                                 Andrew Myers (atmyers at astro.berkeley.edu)
+                                Michael Kuhlen (mqk at astro.berkeley.edu)
+                                Casey Stark (caseywstark at gmail.com)
 
 We also include the Delaunay Triangulation module written by Robert Kern of
 Enthought, the cmdln.py module by Trent Mick, and the progressbar module by


--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/activate	Fri Jun 03 17:08:23 2011 -0700
@@ -0,0 +1,110 @@
+### Adapted from virtualenv's activate script
+
+# This file must be used with "source bin/activate" *from bash*
+# you cannot run it directly
+
+deactivate () {
+    # reset old environment variables
+    if [ -n "$_OLD_VIRTUAL_PATH" ] ; then
+        PATH="$_OLD_VIRTUAL_PATH"
+        export PATH
+        unset _OLD_VIRTUAL_PATH
+    fi
+    if [ -n "$_OLD_VIRTUAL_PYTHONHOME" ] ; then
+        PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
+        export PYTHONHOME
+        unset _OLD_VIRTUAL_PYTHONHOME
+    fi
+
+    ### Begin extra yt vars
+    if [ -n "$_OLD_VIRTUAL_YT_DEST" ] ; then
+        YT_DEST="$_OLD_VIRTUAL_YT_DEST"
+        export YT_DEST
+        unset _OLD_VIRTUAL_PYTHONHOME
+    fi
+    if [ -n "$_OLD_VIRTUAL_PYTHONPATH" ] ; then
+        PYTHONPATH="$_OLD_VIRTUAL_PYTHONPATH"
+        export PYTHONPATH
+        unset _OLD_VIRTUAL_PYTHONPATH
+    fi
+    if [ -n "$_OLD_VIRTUAL_LD_LIBRARY_PATH" ] ; then
+        LD_LIBRARY_PATH="$_OLD_VIRTUAL_LD_LIBRARY_PATH"
+        export LD_LIBRARY_PATH
+        unset _OLD_VIRTUAL_LD_LIBRARY_PATH
+    fi
+    ### End extra yt vars
+
+    # This should detect bash and zsh, which have a hash command that must
+    # be called to get it to forget past commands.  Without forgetting
+    # past commands the $PATH changes we made may not be respected
+    if [ -n "$BASH" -o -n "$ZSH_VERSION" ] ; then
+        hash -r
+    fi
+
+    if [ -n "$_OLD_VIRTUAL_PS1" ] ; then
+        PS1="$_OLD_VIRTUAL_PS1"
+        export PS1
+        unset _OLD_VIRTUAL_PS1
+    fi
+
+    unset VIRTUAL_ENV
+    if [ ! "$1" = "nondestructive" ] ; then
+    # Self destruct!
+        unset -f deactivate
+    fi
+}
+
+# unset irrelavent variables
+deactivate nondestructive
+
+VIRTUAL_ENV="__YT_DIR__"
+export VIRTUAL_ENV
+
+_OLD_VIRTUAL_PATH="$PATH"
+PATH="$VIRTUAL_ENV/bin:$PATH"
+export PATH
+
+### Begin extra env vars for yt
+_OLD_VIRTUAL_YT_DEST="$YT_DEST"
+YT_DEST="$VIRTUAL_ENV"
+export YT_DEST
+
+_OLD_VIRTUAL_PYTHONPATH="$PYTHONPATH"
+PYTHONPATH="$VIRTUAL_ENV/lib/python2.7/site-packages"
+export PYTHONPATH
+
+_OLD_VIRTUAL_LD_LIBRARY_PATH="$LD_LIBRARY_PATH"
+LD_LIBRARY_PATH="$VIRTUAL_ENV/lib"
+export LD_LIBRARY_PATH
+### End extra env vars for yt
+
+# unset PYTHONHOME if set
+# this will fail if PYTHONHOME is set to the empty string (which is bad anyway)
+# could use `if (set -u; : $PYTHONHOME) ;` in bash
+if [ -n "$PYTHONHOME" ] ; then
+    _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
+    unset PYTHONHOME
+fi
+
+if [ -z "$VIRTUAL_ENV_DISABLE_PROMPT" ] ; then
+    _OLD_VIRTUAL_PS1="$PS1"
+    if [ "x" != x ] ; then
+	PS1="$PS1"
+    else
+    if [ "`basename \"$VIRTUAL_ENV\"`" = "__" ] ; then
+        # special case for Aspen magic directories
+        # see http://www.zetadev.com/software/aspen/
+        PS1="[`basename \`dirname \"$VIRTUAL_ENV\"\``] $PS1"
+    else
+        PS1="(`basename \"$VIRTUAL_ENV\"`)$PS1"
+    fi
+    fi
+    export PS1
+fi
+
+# This should detect bash and zsh, which have a hash command that must
+# be called to get it to forget past commands.  Without forgetting
+# past commands the $PATH changes we made may not be respected
+if [ -n "$BASH" -o -n "$ZSH_VERSION" ] ; then
+    hash -r
+fi


--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/activate.csh	Fri Jun 03 17:08:23 2011 -0700
@@ -0,0 +1,43 @@
+# This file must be used with "source bin/activate.csh" *from csh*.
+# You cannot run it directly.
+# Created by Davide Di Blasi <davidedb at gmail.com>.
+
+alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; test $?_OLD_VIRTUAL_YT_DEST != 0 && setenv YT_DEST "$_OLD_VIRTUAL_YT_DEST" && unset _OLD_VIRTUAL_YT_DEST; test $?_OLD_VIRTUAL_PYTHONPATH != 0 && setenv PYTHONPATH "$_OLD_VIRTUAL_PYTHONPATH" && unset _OLD_VIRTUAL_PYTHONPATH; test $?_OLD_VIRTUAL_LD_LIBRARY_PATH != 0 && setenv LD_LIBRARY_PATH "$_OLD_VIRTUAL_LD_LIBRARY_PATH" && unset _OLD_VIRTUAL_LD_LIBRARY_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; test "\!:*" != "nondestructive" && unalias deactivate'
+
+# Unset irrelavent variables.
+deactivate nondestructive
+
+setenv VIRTUAL_ENV "__YT_DIR__"
+
+set _OLD_VIRTUAL_PATH="$PATH"
+setenv PATH "$VIRTUAL_ENV/bin:$PATH"
+
+### Begin extra yt vars
+set _OLD_VIRTUAL_YT_DEST="$YT_DEST"
+setenv YT_DEST "$VIRTUAL_ENV:$YT_DEST"
+
+set _OLD_VIRTUAL_PYTHONPATH="$PYTHONPATH"
+setenv PYTHONPATH "$VIRTUAL_ENV/lib/python2.7/site-packages:$PYTHONPATH"
+
+set _OLD_VIRTUAL_LD_LIBRARY_PATH="$LD_LIBRARY_PATH"
+setenv LD_LIBRARY_PATH "$VIRTUAL_ENV/lib:$LD_LIBRARY_PATH"
+### End extra yt vars
+
+set _OLD_VIRTUAL_PROMPT="$prompt"
+
+if ("" != "") then
+    set env_name = ""
+else
+    if (`basename "$VIRTUAL_ENV"` == "__") then
+        # special case for Aspen magic directories
+        # see http://www.zetadev.com/software/aspen/
+        set env_name = `basename \`dirname "$VIRTUAL_ENV"\``
+    else
+        set env_name = `basename "$VIRTUAL_ENV"`
+    endif
+endif
+set prompt = "[$env_name] $prompt"
+unset env_name
+
+rehash
+


--- a/doc/install_script.sh	Wed May 25 21:18:42 2011 -0400
+++ b/doc/install_script.sh	Fri Jun 03 17:08:23 2011 -0700
@@ -588,6 +588,12 @@
     echo "*******************************************************"
 fi
 
+# Add the environment scripts
+( cp ${YT_DIR}/doc/activate ${DEST_DIR}/bin/activate 2>&1 ) 1>> ${LOG_FILE}
+sed -i.bak -e "s,__YT_DIR__,${DEST_DIR}," ${DEST_DIR}/bin/activate
+( cp ${YT_DIR}/doc/activate.csh ${DEST_DIR}/bin/activate.csh 2>&1 ) 1>> ${LOG_FILE}
+sed -i.bak -e "s,__YT_DIR__,${DEST_DIR}," ${DEST_DIR}/bin/activate.csh
+
 function print_afterword
 {
     echo
@@ -595,26 +601,30 @@
     echo "========================================================================"
     echo
     echo "yt is now installed in $DEST_DIR ."
-    echo "To run from this new installation, the a few variables need to be"
-    echo "prepended with the following information:"
     echo
-    echo "YT_DEST         => $DEST_DIR"
-    echo "PATH            => $DEST_DIR/bin/"
-    echo "PYTHONPATH      => $DEST_DIR/lib/python2.7/site-packages/"
-    echo "LD_LIBRARY_PATH => $DEST_DIR/lib/"
+    echo "To run from this new installation, use the activate script for this "
+    echo "environment."
+    echo
+    echo "    $ source $DEST_DIR/bin/activate"
+    echo "    (yt)$ "
+    echo
+    echo "This modifies the environment variables YT_DEST, PATH, PYTHONPATH, and"
+    echo "LD_LIBRARY_PATH to match your new yt install. But don't worry - as soon"
+    echo "as you are done you can run 'deactivate' to return to your previous"
+    echo "shell environment.  If you use csh, just append .csh to the above."
     echo
     echo "For interactive data analysis and visualization, we recommend running"
     echo "the IPython interface, which will become more fully featured with time:"
     echo
-    echo "$DEST_DIR/bin/iyt"
+    echo "    $DEST_DIR/bin/iyt"
     echo
     echo "For command line analysis run:"
     echo
-    echo "$DEST_DIR/bin/yt"
+    echo "    $DEST_DIR/bin/yt"
     echo
     echo "To bootstrap a development environment for yt, run:"
     echo 
-    echo "$DEST_DIR/bin/yt bootstrap_dev"
+    echo "    $DEST_DIR/bin/yt bootstrap_dev"
     echo
     echo "Note of interest: this installation will use the directory:"
     echo "    $YT_DIR"


--- a/setup.py	Wed May 25 21:18:42 2011 -0400
+++ b/setup.py	Fri Jun 03 17:08:23 2011 -0700
@@ -126,7 +126,6 @@
         keywords='astronomy astrophysics visualization amr adaptivemeshrefinement',
         entry_points = { 'console_scripts' : [
                             'yt = yt.utilities.command_line:run_main',
-                            'enzo_test = yt.utilities.answer_testing.runner:run_main',
                        ]},
         author="Matthew J. Turk",
         author_email="matthewturk at gmail.com",


--- a/yt/analysis_modules/halo_mass_function/halo_mass_function.py	Wed May 25 21:18:42 2011 -0400
+++ b/yt/analysis_modules/halo_mass_function/halo_mass_function.py	Fri Jun 03 17:08:23 2011 -0700
@@ -323,8 +323,8 @@
             dn_M_z = -1.0 / thissigma * dsigmadm * rho0 / self.massarray[i] * \
             self.multiplicityfunction(thissigma)*(self.massarray[i+1] - self.massarray[i]);
 
-            # scale by h^4 to get rid of all factors of h
-            dn_M_z *= math.pow(self.hubble0, 4.0);
+            # scale by h^3 to get rid of all factors of h
+            dn_M_z *= math.pow(self.hubble0, 3.0);
             
             # keep track of cumulative number density
             if dn_M_z > 1.0e-20:


--- a/yt/analysis_modules/halo_profiler/halo_filters.py	Wed May 25 21:18:42 2011 -0400
+++ b/yt/analysis_modules/halo_profiler/halo_filters.py	Fri Jun 03 17:08:23 2011 -0700
@@ -102,8 +102,8 @@
     elif (overDensity[-1] >= virial_overdensity):
         index = -2
     else:
-        for q in (na.arange(len(overDensity)-2))+2:
-            if (overDensity[q] < virial_overdensity):
+        for q in (na.arange(len(overDensity),0,-1)-1):
+            if (overDensity[q] < virial_overdensity) and (overDensity[q-1] >= virial_overdensity):
                 index = q - 1
                 break
 


--- a/yt/analysis_modules/halo_profiler/multi_halo_profiler.py	Wed May 25 21:18:42 2011 -0400
+++ b/yt/analysis_modules/halo_profiler/multi_halo_profiler.py	Fri Jun 03 17:08:23 2011 -0700
@@ -211,10 +211,12 @@
         # Create dataset object.
         self.pf = load(self.dataset)
         self.pf.h
-        if self.halos is 'single' or \
-                not 'r_max' in self.halo_list_format or \
-                halo_radius is not None:
+
+        # Figure out what max radius to use for profiling.
+        if halo_radius is not None:
             self.halo_radius = halo_radius / self.pf[radius_units]
+        elif self.halos is 'single' or not 'r_max' in self.halo_list_format:
+            self.halo_radius = 0.1
         else:
             self.halo_radius = None
 


--- a/yt/analysis_modules/two_point_functions/two_point_functions.py	Wed May 25 21:18:42 2011 -0400
+++ b/yt/analysis_modules/two_point_functions/two_point_functions.py	Fri Jun 03 17:08:23 2011 -0700
@@ -610,7 +610,7 @@
                 added_points = False
 
     @parallel_blocking_call
-    def write_out_means(self):
+    def write_out_means(self, fn = "%s.txt"):
         r"""Writes out the weighted-average value for each function for
         each dimension for each ruler length to a text file. The data is written
         to files of the name 'function_name.txt' in the current working
@@ -621,7 +621,7 @@
         >>> tpf.write_out_means()
         """
         for fset in self._fsets:
-            fp = self._write_on_root("%s.txt" % fset.function.__name__)
+            fp = self._write_on_root(fn % fset.function.__name__)
             fset._avg_bin_hits()
             line = "# length".ljust(sep)
             line += "count".ljust(sep)
@@ -643,7 +643,7 @@
             fp.close()
     
     @parallel_root_only
-    def write_out_arrays(self):
+    def write_out_arrays(self, fn = "%s.h5"):
         r"""Writes out the raw probability bins and the bin edges to an HDF5 file
         for each of the functions. The files are named 
         'function_name.txt' and saved in the current working directory.
@@ -654,7 +654,7 @@
         """
         if self.mine == 0:
             for fset in self._fsets:
-                f = h5py.File("%s.h5" % fset.function.__name__, "w")
+                f = h5py.File(fn % fset.function.__name__, "w")
                 bin_names = []
                 prob_names = []
                 bin_counts = []


--- a/yt/convenience.py	Wed May 25 21:18:42 2011 -0400
+++ b/yt/convenience.py	Fri Jun 03 17:08:23 2011 -0700
@@ -70,6 +70,17 @@
     match, at which point it returns an instance of the appropriate
     :class:`yt.data_objects.api.StaticOutput` subclass.
     """
+    if len(args) == 0:
+        try:
+            import Tkinter, tkFileDialog
+        except ImportError:
+            return None
+        root = Tkinter.Tk()
+        filename = tkFileDialog.askopenfilename(parent=root,title='Choose a file')
+        if filename != None:
+            return load(filename)
+        else:
+            return None
     candidates = []
     for n, c in output_type_registry.items():
         if n is None: continue


--- a/yt/data_objects/data_containers.py	Wed May 25 21:18:42 2011 -0400
+++ b/yt/data_objects/data_containers.py	Fri Jun 03 17:08:23 2011 -0700
@@ -581,8 +581,8 @@
         --------
 
         >>> pf = load("RedshiftOutput0005")
-        >>> ray = pf.h._ray((0.2, 0.74), (0.4, 0.91))
-        >>> print ray["Density"], ray["t"]
+        >>> ray = pf.h._ray((0.2, 0.74, 0.11), (0.4, 0.91, 0.31))
+        >>> print ray["Density"], ray["t"], ray["dts"]
         """
         AMR1DData.__init__(self, pf, fields, **kwargs)
         self.start_point = na.array(start_point, dtype='float64')
@@ -1517,8 +1517,8 @@
 
     def _initialize_source(self, source = None):
         if source is None:
-            check, source = self._partition_hierarchy_2d(self.axis)
-            self._check_region = check
+            source = self.pf.h.all_data()
+            self._check_region = False
             #self._okay_to_serialize = (not check)
         else:
             self._distributed = False
@@ -1565,19 +1565,22 @@
         # _project_level, then it would be more memory conservative
         if self.preload_style == 'all':
             print "Preloading %s grids and getting %s" % (
-                    len(self.source._grids), self._get_dependencies(fields))
-            self._preload(self.source._grids,
+                    len(self.source._get_grid_objs()),
+                    self._get_dependencies(fields))
+            self._preload([g for g in self._get_grid_objs()],
                           self._get_dependencies(fields), self.hierarchy.io)
         # By changing the remove-from-tree method to accumulate, we can avoid
         # having to do this by level, and instead do it by CPU file
         for level in range(0, self._max_level+1):
             if self.preload_style == 'level':
-                self._preload(self.source.select_grids(level),
+                self._preload([g for g in self._get_grid_objs()
+                                 if g.Level == level],
                               self._get_dependencies(fields), self.hierarchy.io)
             self._add_level_to_tree(tree, level, fields)
             mylog.debug("End of projecting level level %s, memory usage %0.3e", 
                         level, get_memory_usage()/1024.)
         # Note that this will briefly double RAM usage
+        tree = self.merge_quadtree_buffers(tree)
         coord_data, field_data, weight_data, dxs = [], [], [], []
         for level in range(0, self._max_level + 1):
             npos, nvals, nwvals = tree.get_all_from_level(level, False)
@@ -1591,7 +1594,6 @@
             else:
                 ds = 0.0
             dxs.append(na.ones(nvals.shape[0], dtype='float64') * ds)
-        del tree
         coord_data = na.concatenate(coord_data, axis=0).transpose()
         field_data = na.concatenate(field_data, axis=0).transpose()
         weight_data = na.concatenate(weight_data, axis=0).transpose()
@@ -1609,7 +1611,6 @@
         data['pdy'] = data['pdx'] # generalization is out the window!
         data['fields'] = field_data
         # Now we run the finalizer, which is ignored if we don't need it
-        data = self._mpi_catdict(data)
         field_data = na.vsplit(data.pop('fields'), len(fields))
         for fi, field in enumerate(fields):
             self[field] = field_data[fi].ravel()
@@ -1654,7 +1655,9 @@
                     to_add, weight_proj[used_points].ravel())
 
     def _add_level_to_tree(self, tree, level, fields):
-        grids_to_project = self.source.select_grids(level)
+        grids_to_project = [g for g in self._get_grid_objs()
+                            if g.Level == level]
+        if len(grids_to_project) == 0: return
         dls, convs = self._get_dls(grids_to_project[0], fields)
         zero_out = (level != self._max_level)
         pbar = get_pbar('Projecting  level % 2i / % 2i ' \


--- a/yt/data_objects/derived_quantities.py	Wed May 25 21:18:42 2011 -0400
+++ b/yt/data_objects/derived_quantities.py	Fri Jun 03 17:08:23 2011 -0700
@@ -388,7 +388,7 @@
         # symmetry.
         dxes = na.unique(data['dx']) # unique returns a sorted array,
         dyes = na.unique(data['dy']) # so these will all have the same
-        dzes = na.unique(data['dx']) # order.
+        dzes = na.unique(data['dz']) # order.
         # We only need one dim to figure out levels, we'll use x.
         dx = 1./data.pf.domain_dimensions[0]
         levels = (na.log(dx / dxes) / na.log(data.pf.refine_by)).astype('int')


--- a/yt/frontends/flash/data_structures.py	Wed May 25 21:18:42 2011 -0400
+++ b/yt/frontends/flash/data_structures.py	Fri Jun 03 17:08:23 2011 -0700
@@ -297,6 +297,21 @@
             [self._find_parameter("real", "%smin" % ax) for ax in 'xyz'])
         self.domain_right_edge = na.array(
             [self._find_parameter("real", "%smax" % ax) for ax in 'xyz'])
+
+        # Determine domain dimensions
+        try:
+            nxb = self._find_parameter("integer", "nxb", handle = self._handle)
+            nyb = self._find_parameter("integer", "nyb", handle = self._handle)
+            nzb = self._find_parameter("integer", "nzb", handle = self._handle)
+        except KeyError:
+            nxb, nyb, nzb = [int(self._handle["/simulation parameters"]['n%sb' % ax])
+                              for ax in 'xyz']
+        nblockx = self._find_parameter("integer", "nblockx", handle = self._handle)
+        nblocky = self._find_parameter("integer", "nblockx", handle = self._handle)
+        nblockz = self._find_parameter("integer", "nblockx", handle = self._handle)
+        self.domain_dimensions = \
+            na.array([nblockx*nxb,nblocky*nyb,nblockz*nzb])
+
         if self._flash_version == 7:
             self.current_time = float(
                 self._handle["simulation parameters"][:]["time"])


--- a/yt/frontends/ramses/setup.py	Wed May 25 21:18:42 2011 -0400
+++ b/yt/frontends/ramses/setup.py	Fri Jun 03 17:08:23 2011 -0700
@@ -9,6 +9,7 @@
         ["yt/frontends/ramses/_ramses_reader.pyx"],
         language="c++",
         include_dirs=["yt/frontends/ramses/ramses_headers/"],
+        libraries=["stdc++"],
         depends=glob.glob("yt/frontends/ramses/ramses_headers/*.hh")
         )
     config.make_config_py() # installs __config__.py


--- a/yt/frontends/setup.py	Wed May 25 21:18:42 2011 -0400
+++ b/yt/frontends/setup.py	Fri Jun 03 17:08:23 2011 -0700
@@ -15,4 +15,5 @@
     config.add_subpackage("art")
     config.add_subpackage("maestro")
     config.add_subpackage("castro")
+    config.add_subpackage("stream")
     return config


--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/yt/frontends/stream/api.py	Fri Jun 03 17:08:23 2011 -0700
@@ -0,0 +1,39 @@
+"""
+API for yt.frontends.stream
+
+Author: Matthew Turk <matthewturk at gmail.com>
+Affiliation: Columbia University
+Homepage: http://yt.enzotools.org/
+License:
+  Copyright (C) 2011 Matthew Turk.  All Rights Reserved.
+
+  This file is part of yt.
+
+  yt is free software; you can redistribute it and/or modify
+  it under the terms of the GNU General Public License as published by
+  the Free Software Foundation; either version 3 of the License, or
+  (at your option) any later version.
+
+  This program is distributed in the hope that it will be useful,
+  but WITHOUT ANY WARRANTY; without even the implied warranty of
+  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+  GNU General Public License for more details.
+
+  You should have received a copy of the GNU General Public License
+  along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+"""
+
+from .data_structures import \
+      StreamGrid, \
+      StreamHierarchy, \
+      StreamStaticOutput, \
+      StreamHandler
+
+from .fields import \
+      StreamFieldContainer, \
+      StreamFieldInfo, \
+      add_stream_field
+
+from .io import \
+      IOHandlerStream


--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/yt/frontends/stream/data_structures.py	Fri Jun 03 17:08:23 2011 -0700
@@ -0,0 +1,259 @@
+"""
+Data structures for Streaming, in-memory datasets
+
+Author: Matthew Turk <matthewturk at gmail.com>
+Affiliation: Columbia University
+Homepage: http://yt.enzotools.org/
+License:
+  Copyright (C) 2011 Matthew Turk.  All Rights Reserved.
+
+  This file is part of yt.
+
+  yt is free software; you can redistribute it and/or modify
+  it under the terms of the GNU General Public License as published by
+  the Free Software Foundation; either version 3 of the License, or
+  (at your option) any later version.
+
+  This program is distributed in the hope that it will be useful,
+  but WITHOUT ANY WARRANTY; without even the implied warranty of
+  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+  GNU General Public License for more details.
+
+  You should have received a copy of the GNU General Public License
+  along with this program.  If not, see <http://www.gnu.org/licenses/>.
+"""
+
+import weakref
+import numpy as na
+
+from yt.utilities.io_handler import io_registry
+from yt.funcs import *
+from yt.config import ytcfg
+from yt.data_objects.grid_patch import \
+    AMRGridPatch
+from yt.data_objects.hierarchy import \
+    AMRHierarchy
+from yt.data_objects.static_output import \
+    StaticOutput
+from yt.utilities.logger import ytLogger as mylog
+
+from .fields import \
+    StreamFieldContainer, \
+    add_stream_field
+
+class StreamGrid(AMRGridPatch):
+    """
+    Class representing a single In-memory Grid instance.
+    """
+
+    __slots__ = ['proc_num']
+    _id_offset = 0
+    def __init__(self, id, hierarchy):
+        """
+        Returns an instance of StreamGrid with *id*, associated with *filename*
+        and *hierarchy*.
+        """
+        #All of the field parameters will be passed to us as needed.
+        AMRGridPatch.__init__(self, id, filename = None, hierarchy = hierarchy)
+        self._children_ids = []
+        self._parent_id = -1
+        self.Level = -1
+
+    def _guess_properties_from_parent(self):
+        rf = self.pf.refine_by
+        my_ind = self.id - self._id_offset
+        le = self.LeftEdge
+        self.dds = self.Parent.dds/rf
+        ParentLeftIndex = na.rint((self.LeftEdge-self.Parent.LeftEdge)/self.Parent.dds)
+        self.start_index = rf*(ParentLeftIndex + self.Parent.get_global_startindex()).astype('int64')
+        self.LeftEdge = self.Parent.LeftEdge + self.Parent.dds * ParentLeftIndex
+        self.RightEdge = self.LeftEdge + self.ActiveDimensions*self.dds
+        self.hierarchy.grid_left_edge[my_ind,:] = self.LeftEdge
+        self.hierarchy.grid_right_edge[my_ind,:] = self.RightEdge
+        self._child_mask = None
+        self._child_index_mask = None
+        self._child_indices = None
+        self._setup_dx()
+
+    def set_filename(self, filename):
+        pass
+
+    def __repr__(self):
+        return "StreamGrid_%04i" % (self.id)
+
+    @property
+    def Parent(self):
+        if self._parent_id == -1: return None
+        return self.hierarchy.grids[self._parent_id - self._id_offset]
+
+    @property
+    def Children(self):
+        return [self.hierarchy.grids[cid - self._id_offset]
+                for cid in self._children_ids]
+
+class StreamHandler(object):
+    def __init__(self, left_edges, right_edges, dimensions,
+                 levels, parent_ids, particle_count, processor_ids,
+                 fields):
+        self.left_edges = left_edges
+        self.right_edges = right_edges
+        self.dimensions = dimensions
+        self.levels = levels
+        self.parent_ids = parent_ids
+        self.particle_count = particle_count
+        self.processor_ids = processor_ids
+        self.num_grids = self.levels.size
+        self.fields = fields
+
+    def get_fields(self):
+        return self.fields.all_fields
+
+class StreamHierarchy(AMRHierarchy):
+
+    grid = StreamGrid
+
+    def __init__(self, pf, data_style = None):
+        self.data_style = data_style
+        self.float_type = 'float64'
+        self.parameter_file = weakref.proxy(pf) # for _obtain_enzo
+        self.stream_handler = pf.stream_handler
+        self.float_type = "float64"
+        self.directory = os.getcwd()
+        AMRHierarchy.__init__(self, pf, data_style)
+
+    def _initialize_data_storage(self):
+        pass
+
+    def _count_grids(self):
+        self.num_grids = self.stream_handler.num_grids
+
+    def _setup_unknown_fields(self):
+        for field in self.field_list:
+            if field in self.parameter_file.field_info: continue
+            mylog.info("Adding %s to list of fields", field)
+            cf = None
+            if self.parameter_file.has_key(field):
+                def external_wrapper(f):
+                    def _convert_function(data):
+                        return data.convert(f)
+                    return _convert_function
+                cf = external_wrapper(field)
+            # Note that we call add_field on the field_info directly.  This
+            # will allow the same field detection mechanism to work for 1D, 2D
+            # and 3D fields.
+            self.pf.field_info.add_field(
+                    field, lambda a, b: None,
+                    convert_function=cf, take_log=False)
+            
+
+    def _parse_hierarchy(self):
+        self.grid_dimensions = self.stream_handler.dimensions
+        self.grid_left_edge[:] = self.stream_handler.left_edges
+        self.grid_right_edge[:] = self.stream_handler.right_edges
+        self.grid_levels[:] = self.stream_handler.levels
+        self.grid_procs = self.stream_handler.processor_ids
+        self.grid_particle_count[:] = self.stream_handler.particle_count
+        mylog.debug("Copying reverse tree")
+        reverse_tree = self.stream_handler.parent_ids.tolist()
+        # Initial setup:
+        mylog.debug("Reconstructing parent-child relationships")
+        self.grids = []
+        # We enumerate, so it's 0-indexed id and 1-indexed pid
+        self.filenames = ["-1"] * self.num_grids
+        for id,pid in enumerate(reverse_tree):
+            self.grids.append(self.grid(id, self))
+            self.grids[-1].Level = self.grid_levels[id, 0]
+            if pid >= 0:
+                self.grids[-1]._parent_id = pid
+                self.grids[pid]._children_ids.append(self.grids[-1].id)
+        self.max_level = self.grid_levels.max()
+        mylog.debug("Preparing grids")
+        for i, grid in enumerate(self.grids):
+            if (i%1e4) == 0: mylog.debug("Prepared % 7i / % 7i grids", i, self.num_grids)
+            grid.filename = None
+            grid._prepare_grid()
+            grid.proc_num = self.grid_procs[i]
+        self.grids = na.array(self.grids, dtype='object')
+        mylog.debug("Prepared")
+
+    def _initialize_grid_arrays(self):
+        AMRHierarchy._initialize_grid_arrays(self)
+        self.grid_procs = na.zeros((self.num_grids,1),'int32')
+
+    def save_data(self, *args, **kwargs):
+        pass
+
+    def _detect_fields(self):
+        self.field_list = list(set(self.stream_handler.get_fields()))
+
+    def _setup_derived_fields(self):
+        self.derived_field_list = []
+        for field in self.parameter_file.field_info:
+            try:
+                fd = self.parameter_file.field_info[field].get_dependencies(
+                            pf = self.parameter_file)
+            except:
+                continue
+            available = na.all([f in self.field_list for f in fd.requested])
+            if available: self.derived_field_list.append(field)
+        for field in self.field_list:
+            if field not in self.derived_field_list:
+                self.derived_field_list.append(field)
+
+    def _setup_classes(self):
+        dd = self._get_data_reader_dict()
+        AMRHierarchy._setup_classes(self, dd)
+        self.object_types.sort()
+
+    def _populate_grid_objects(self):
+        for g in self.grids:
+            g._setup_dx()
+        self.max_level = self.grid_levels.max()
+
+    def _setup_data_io(self):
+        self.io = io_registry[self.data_style](self.stream_handler)
+
+class StreamStaticOutput(StaticOutput):
+    _hierarchy_class = StreamHierarchy
+    _fieldinfo_class = StreamFieldContainer
+    _data_style = 'stream'
+
+    def __init__(self, stream_handler):
+        #if parameter_override is None: parameter_override = {}
+        #self._parameter_override = parameter_override
+        #if conversion_override is None: conversion_override = {}
+        #self._conversion_override = conversion_override
+
+        self.stream_handler = stream_handler
+        StaticOutput.__init__(self, "InMemoryParameterFile", self._data_style)
+
+        self.field_info = self._fieldinfo_class()
+        self.units = {}
+        self.time_units = {}
+
+    def _parse_parameter_file(self):
+        self.basename = self.stream_handler.name
+        self.parameters['CurrentTimeIdentifier'] = time.time()
+        self.unique_identifier = self.parameters["CurrentTimeIdentifier"]
+        self.domain_left_edge = self.stream_handler.domain_left_edge[:]
+        self.domain_right_edge = self.stream_handler.domain_right_edge[:]
+        self.refine_by = self.stream_handler.refine_by
+        self.dimensionality = self.stream_handler.dimensionality
+        self.domain_dimensions = self.stream_handler.domain_dimensions
+        self.current_time = self.stream_handler.simulation_time
+        if self.stream_handler.cosmology_simulation:
+            self.cosmological_simulation = 1
+            self.current_redshift = self.stream_handler.current_redshift
+            self.omega_lambda = self.stream_handler.omega_lambda
+            self.omega_matter = self.stream_handler.omega_matter
+            self.hubble_constant = self.stream_handler.hubble_constant
+        else:
+            self.current_redshift = self.omega_lambda = self.omega_matter = \
+                self.hubble_constant = self.cosmological_simulation = 0.0
+
+    def _set_units(self):
+        pass
+
+    @classmethod
+    def _is_valid(cls, *args, **kwargs):
+        return False


--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/yt/frontends/stream/definitions.py	Fri Jun 03 17:08:23 2011 -0700
@@ -0,0 +1,26 @@
+"""
+Definitions specific to the Streaming API
+
+Author: Matthew Turk <matthewturk at gmail.com>
+Affiliation: Columbia University
+Homepage: http://yt.enzotools.org/
+
+License:
+  Copyright (C) 2011 Matthew Turk.  All Rights Reserved.
+
+  This file is part of yt.
+
+  yt is free software; you can redistribute it and/or modify
+  it under the terms of the GNU General Public License as published by
+  the Free Software Foundation; either version 3 of the License, or
+  (at your option) any later version.
+
+  This program is distributed in the hope that it will be useful,
+  but WITHOUT ANY WARRANTY; without even the implied warranty of
+  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+  GNU General Public License for more details.
+
+  You should have received a copy of the GNU General Public License
+  along with this program.  If not, see <http://www.gnu.org/licenses/>.
+"""
+


--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/yt/frontends/stream/fields.py	Fri Jun 03 17:08:23 2011 -0700
@@ -0,0 +1,50 @@
+"""
+Fields specific to Streaming data
+
+Author: Matthew Turk <matthewturk at gmail.com>
+Affiliation: KIPAC/SLAC/Stanford
+Homepage: http://yt.enzotools.org/
+License:
+  Copyright (C) 2008-2011 Matthew Turk.  All Rights Reserved.
+
+  This file is part of yt.
+
+  yt is free software; you can redistribute it and/or modify
+  it under the terms of the GNU General Public License as published by
+  the Free Software Foundation; either version 3 of the License, or
+  (at your option) any later version.
+
+  This program is distributed in the hope that it will be useful,
+  but WITHOUT ANY WARRANTY; without even the implied warranty of
+  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+  GNU General Public License for more details.
+
+  You should have received a copy of the GNU General Public License
+  along with this program.  If not, see <http://www.gnu.org/licenses/>.
+"""
+
+import numpy as na
+
+from yt.data_objects.field_info_container import \
+    CodeFieldInfoContainer, \
+    ValidateParameter, \
+    ValidateDataField, \
+    ValidateProperty, \
+    ValidateSpatial, \
+    ValidateGridType
+import yt.data_objects.universal_fields
+from yt.utilities.physical_constants import \
+    mh
+import yt.utilities.amr_utils as amr_utils
+
+class StreamFieldContainer(CodeFieldInfoContainer):
+    """
+    This is a container for Streaming-specific fields.
+    """
+    _shared_state = {}
+    _field_list = {}
+StreamFieldInfo = StreamFieldContainer()
+add_stream_field = StreamFieldInfo.add_field
+
+add_field = add_stream_field
+


--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/yt/frontends/stream/io.py	Fri Jun 03 17:08:23 2011 -0700
@@ -0,0 +1,71 @@
+"""
+Enzo-specific IO functions
+
+Author: Matthew Turk <matthewturk at gmail.com>
+Affiliation: KIPAC/SLAC/Stanford
+Homepage: http://yt.enzotools.org/
+License:
+  Copyright (C) 2007-2011 Matthew Turk.  All Rights Reserved.
+
+  This file is part of yt.
+
+  yt is free software; you can redistribute it and/or modify
+  it under the terms of the GNU General Public License as published by
+  the Free Software Foundation; either version 3 of the License, or
+  (at your option) any later version.
+
+  This program is distributed in the hope that it will be useful,
+  but WITHOUT ANY WARRANTY; without even the implied warranty of
+  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+  GNU General Public License for more details.
+
+  You should have received a copy of the GNU General Public License
+  along with this program.  If not, see <http://www.gnu.org/licenses/>.
+"""
+
+from collections import defaultdict
+
+import exceptions
+import os
+
+from yt.utilities.io_handler import \
+    BaseIOHandler, _axis_ids
+from yt.utilities.logger import ytLogger as mylog
+
+class IOHandlerStream(BaseIOHandler):
+
+    _data_style = "stream"
+
+    def __init__(self, stream_handler):
+        self.fields = stream_handler.fields
+        BaseIOHandler.__init__(self)
+
+    def _read_data_set(self, grid, field):
+        # This is where we implement processor-locking
+        #if grid.id not in self.grids_in_memory:
+        #    mylog.error("Was asked for %s but I have %s", grid.id, self.grids_in_memory.keys())
+        #    raise KeyError
+        tr = self.fields[grid.id][field]
+        # If it's particles, we copy.
+        if len(tr.shape) == 1: return tr.copy()
+        # New in-place unit conversion breaks if we don't copy first
+        return tr
+
+    def modify(self, field):
+        return field
+
+    def _read_field_names(self, grid):
+        return self.fields[grid.id].keys()
+
+    def _read_data_slice(self, grid, field, axis, coord):
+        sl = [slice(0,-0), slice(0,-0), slice(0,-0)]
+        sl[axis] = slice(coord, coord + 1)
+        sl = tuple(reversed(sl))
+        tr = self.fields[grid.id][field][sl].swapaxes(0,2)
+        # In-place unit conversion requires we return a copy
+        return tr.copy()
+
+    @property
+    def _read_exception(self):
+        return KeyError
+


--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/yt/frontends/stream/misc.py	Fri Jun 03 17:08:23 2011 -0700
@@ -0,0 +1,25 @@
+"""
+Miscellaneous functions that are Streaming-specific
+
+Author: Matthew Turk <matthewturk at gmail.com>
+Affiliation: KIPAC/SLAC/Stanford
+Homepage: http://yt.enzotools.org/
+License:
+  Copyright (C) 2008-2011 Matthew Turk.  All Rights Reserved.
+
+  This file is part of yt.
+
+  yt is free software; you can redistribute it and/or modify
+  it under the terms of the GNU General Public License as published by
+  the Free Software Foundation; either version 3 of the License, or
+  (at your option) any later version.
+
+  This program is distributed in the hope that it will be useful,
+  but WITHOUT ANY WARRANTY; without even the implied warranty of
+  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+  GNU General Public License for more details.
+
+  You should have received a copy of the GNU General Public License
+  along with this program.  If not, see <http://www.gnu.org/licenses/>.
+"""
+


--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/yt/frontends/stream/setup.py	Fri Jun 03 17:08:23 2011 -0700
@@ -0,0 +1,12 @@
+#!/usr/bin/env python
+import setuptools
+import os, sys, os.path
+
+import os.path
+
+def configuration(parent_package='',top_path=None):
+    from numpy.distutils.misc_util import Configuration
+    config = Configuration('stream',parent_package,top_path)
+    config.make_config_py() # installs __config__.py
+    config.make_svn_version_py()
+    return config


--- a/yt/gui/reason/bottle_mods.py	Wed May 25 21:18:42 2011 -0400
+++ b/yt/gui/reason/bottle_mods.py	Fri Jun 03 17:08:23 2011 -0700
@@ -152,6 +152,9 @@
             print "WARNING: %s has no _route_prefix attribute.  Not notifying."
             continue
             w._route_prefix = token
+    repl.activate()
+    while not repl.execution_thread.queue.empty():
+        time.sleep(1)
     print
     print
     print "============================================================================="
@@ -195,7 +198,7 @@
         import yt.utilities.rocket as rocket
         server_type = YTRocketServer
         log = logging.getLogger('Rocket')
-        log.setLevel(logging.INFO)
+        log.setLevel(logging.WARNING)
         kwargs = {'timeout': 600, 'max_threads': 2}
         if repl is not None:
             repl.server = YTRocketServer.server_info


--- a/yt/gui/reason/extdirect_repl.py	Wed May 25 21:18:42 2011 -0400
+++ b/yt/gui/reason/extdirect_repl.py	Fri Jun 03 17:08:23 2011 -0700
@@ -106,24 +106,24 @@
 
     def run(self):
         while 1:
-            print "Checking for a queue ..."
+            #print "Checking for a queue ..."
             try:
                 task = self.queue.get(True, 10)
             except (Queue.Full, Queue.Empty):
                 if self.repl.stopped: return
                 continue
-            print "Received the task", task
+            #print "Received the task", task
             if task['type'] == 'code':
                 self.execute_one(task['code'], task['hide'])
                 self.queue.task_done()
             elif task['type'] == 'add_widget':
-                print "Adding new widget"
+                #print "Adding new widget"
                 self.queue.task_done()
                 new_code = self.repl._add_widget(
                     task['name'], task['widget_data_name'])
-                print "Got this command:", new_code
+                #print "Got this command:", new_code
                 self.repl.execute(new_code, hide=True)
-                print "Executed!"
+                #print "Executed!"
 
     def execute_one(self, code, hide):
         self.repl.executed_cell_texts.append(code)
@@ -237,13 +237,13 @@
         self.execute("data_objects = []", hide = True)
         self.locals['load_script'] = ext_load_script
         self.locals['deliver_image'] = deliver_image
+
+    def activate(self):
         self._setup_logging_handlers()
-
         # Setup our heartbeat
         self.last_heartbeat = time.time()
         self._check_heartbeat()
         self.execution_thread.start()
-        if self.debug: time.sleep(3)
 
     def exception_handler(self, exc):
         result = {'type': 'cell_results',


--- a/yt/mods.py	Wed May 25 21:18:42 2011 -0400
+++ b/yt/mods.py	Fri Jun 03 17:08:23 2011 -0700
@@ -96,7 +96,7 @@
     PlotCollection, PlotCollectionInteractive, \
     get_multi_plot, FixedResolutionBuffer, ObliqueFixedResolutionBuffer, \
     callback_registry, write_bitmap, write_image, annotate_image, \
-    apply_colormap, _scale_image
+    apply_colormap, scale_image
 
 from yt.visualization.volume_rendering.api import \
     ColorTransferFunction, PlanckTransferFunction, ProjectionTransferFunction, \


--- a/yt/utilities/_amr_utils/QuadTree.pyx	Wed May 25 21:18:42 2011 -0400
+++ b/yt/utilities/_amr_utils/QuadTree.pyx	Fri Jun 03 17:08:23 2011 -0700
@@ -31,6 +31,7 @@
 cimport cython
 
 from stdlib cimport malloc, free, abs
+from cython.operator cimport dereference as deref, preincrement as inc
 
 cdef extern from "stdlib.h":
     # NOTE that size_t might not be int
@@ -78,13 +79,14 @@
     node.nvals = nvals
     node.val = <np.float64_t *> malloc(
                 nvals * sizeof(np.float64_t))
-    for i in range(nvals):
-        node.val[i] = val[i]
-    node.weight_val = weight_val
     for i in range(2):
         for j in range(2):
             node.children[i][j] = NULL
     node.level = level
+    if val != NULL:
+        for i in range(nvals):
+            node.val[i] = val[i]
+        node.weight_val = weight_val
     return node
 
 cdef void QTN_free(QuadTreeNode *node):
@@ -103,9 +105,11 @@
     cdef np.int64_t po2[80] 
     cdef QuadTreeNode ***root_nodes
     cdef np.int64_t top_grid_dims[2]
+    cdef int merged
 
     def __cinit__(self, np.ndarray[np.int64_t, ndim=1] top_grid_dims,
                   int nvals):
+        self.merged = 1
         cdef int i, j
         cdef QuadTreeNode *node
         cdef np.int64_t pos[2]
@@ -134,6 +138,90 @@
                 self.root_nodes[i][j] = QTN_initialize(
                     pos, nvals, vals, weight_val, 0)
 
+    cdef int count_total_cells(self, QuadTreeNode *root):
+        cdef int total = 0
+        cdef int i, j
+        if root.children[0][0] == NULL: return 1
+        for i in range(2):
+            for j in range(2):
+                total += self.count_total_cells(root.children[i][j])
+        return total + 1
+
+    cdef int fill_buffer(self, QuadTreeNode *root, int curpos,
+                          np.ndarray[np.int32_t, ndim=1] refined,
+                          np.ndarray[np.float64_t, ndim=2] values,
+                          np.ndarray[np.float64_t, ndim=1] wval):
+        cdef int i, j
+        for i in range(self.nvals):
+            values[curpos, i] = root.val[i]
+        wval[curpos] = root.weight_val
+        if root.children[0][0] != NULL: refined[curpos] = 1
+        else: return curpos+1
+        curpos += 1
+        for i in range(2):
+            for j in range(2):
+                curpos = self.fill_buffer(root.children[i][j], curpos,
+                                 refined, values, wval)
+        return curpos
+
+    cdef int unfill_buffer(self, QuadTreeNode *root, int curpos,
+                          np.ndarray[np.int32_t, ndim=1] refined,
+                          np.ndarray[np.float64_t, ndim=2] values,
+                          np.ndarray[np.float64_t, ndim=1] wval):
+        cdef int i, j
+        for i in range(self.nvals):
+            root.val[i] = values[curpos, i]
+        root.weight_val = wval[curpos]
+        if refined[curpos] == 0: return curpos+1
+        curpos += 1
+        cdef QuadTreeNode *child
+        cdef np.int64_t pos[2]
+        for i in range(2):
+            for j in range(2):
+                pos[0] = root.pos[0]*2 + i
+                pos[1] = root.pos[1]*2 + j
+                child = QTN_initialize(pos, self.nvals, NULL, 0.0, root.level+1)
+                root.children[i][j] = child
+                curpos = self.unfill_buffer(child, curpos, refined, values, wval)
+        return curpos
+
+
+    def frombuffer(self, np.ndarray[np.int32_t, ndim=1] refined,
+                         np.ndarray[np.float64_t, ndim=2] values,
+                         np.ndarray[np.float64_t, ndim=1] wval):
+        self.merged = 1 # Just on the safe side
+        cdef int curpos = 0
+        cdef QuadTreeNode *root
+        for i in range(self.top_grid_dims[0]):
+            for j in range(self.top_grid_dims[1]):
+                curpos = self.unfill_buffer(self.root_nodes[i][j], curpos,
+                                 refined, values, wval)
+
+    def tobuffer(self):
+        cdef int total = 0
+        for i in range(self.top_grid_dims[0]):
+            for j in range(self.top_grid_dims[1]):
+                total += self.count_total_cells(self.root_nodes[i][j])
+        # We now have four buffers:
+        # Refined or not (total,) int32
+        # Values in each node (total, nvals) float64
+        # Weight values in each node (total,) float64
+        cdef np.ndarray[np.int32_t, ndim=1] refined 
+        refined = np.zeros(total, dtype='int32')
+        cdef np.ndarray[np.float64_t, ndim=2] values
+        values = np.zeros((total, self.nvals), dtype='float64')
+        cdef np.ndarray[np.float64_t, ndim=1] wval
+        wval = np.zeros(total, dtype='float64')
+        cdef int curpos = 0
+        for i in range(self.top_grid_dims[0]):
+            for j in range(self.top_grid_dims[1]):
+                curpos = self.fill_buffer(self.root_nodes[i][j], curpos,
+                                 refined, values, wval)
+        return (refined, values, wval)
+
+    def get_args(self):
+        return (self.top_grid_dims[0], self.top_grid_dims[1], self.nvals)
+
     cdef void add_to_position(self,
                  int level, np.int64_t pos[2],
                  np.float64_t *val,
@@ -189,7 +277,7 @@
     @cython.boundscheck(False)
     @cython.wraparound(False)
     def get_all_from_level(self, int level, int count_only = 0):
-        cdef int i, j
+        cdef int i, j, vi
         cdef int total = 0
         vals = []
         for i in range(self.top_grid_dims[0]):
@@ -207,10 +295,15 @@
         cdef np.int64_t *pdata = <np.int64_t *> npos.data
         cdef np.float64_t *vdata = <np.float64_t *> nvals.data
         cdef np.float64_t *wdata = <np.float64_t *> nwvals.data
+        cdef np.float64_t wtoadd
+        cdef np.float64_t *vtoadd = <np.float64_t *> alloca(
+                sizeof(np.float64_t)*self.nvals)
         for i in range(self.top_grid_dims[0]):
             for j in range(self.top_grid_dims[1]):
+                for vi in range(self.nvals): vtoadd[vi] = 0.0
+                wtoadd = 0.0
                 curpos += self.fill_from_level(self.root_nodes[i][j],
-                    level, curpos, pdata, vdata, wdata)
+                    level, curpos, pdata, vdata, wdata, vtoadd, wtoadd)
         return npos, nvals, nwvals
 
     cdef int count_at_level(self, QuadTreeNode *node, int level):
@@ -232,22 +325,33 @@
                               np.int64_t curpos,
                               np.int64_t *pdata,
                               np.float64_t *vdata,
-                              np.float64_t *wdata):
+                              np.float64_t *wdata,
+                              np.float64_t *vtoadd,
+                              np.float64_t wtoadd):
         cdef int i, j
         if node.level == level:
             if node.children[0][0] != NULL: return 0
             for i in range(self.nvals):
-                vdata[self.nvals * curpos + i] = node.val[i]
-            wdata[curpos] = node.weight_val
+                vdata[self.nvals * curpos + i] = node.val[i] + vtoadd[i]
+            wdata[curpos] = node.weight_val + wtoadd
             pdata[curpos * 2] = node.pos[0]
             pdata[curpos * 2 + 1] = node.pos[1]
             return 1
         if node.children[0][0] == NULL: return 0
         cdef np.int64_t added = 0
+        if self.merged == 1:
+            for i in range(self.nvals):
+                vtoadd[i] += node.val[i]
+            wtoadd += node.weight_val
         for i in range(2):
             for j in range(2):
                 added += self.fill_from_level(node.children[i][j],
-                        level, curpos + added, pdata, vdata, wdata)
+                        level, curpos + added, pdata, vdata, wdata,
+                        vtoadd, wtoadd)
+        if self.merged == 1:
+            for i in range(self.nvals):
+                vtoadd[i] -= node.val[i]
+            wtoadd -= node.weight_val
         return added
 
     def __dealloc__(self):
@@ -257,3 +361,37 @@
                 QTN_free(self.root_nodes[i][j])
             free(self.root_nodes[i])
         free(self.root_nodes)
+
+cdef void QTN_merge_nodes(QuadTreeNode *n1, QuadTreeNode *n2):
+    # We have four choices when merging nodes.
+    # 1. If both nodes have no refinement, then we add values of n2 to n1.
+    # 2. If both have refinement, we call QTN_merge_nodes on all four children.
+    # 3. If n2 has refinement and n1 does not, we detach n2's children and
+    #    attach them to n1.
+    # 4. If n1 has refinement and n2 does not, we add the value of n2 to n1.
+    cdef int i, j
+
+    QTN_add_value(n1, n2.val, n2.weight_val)
+    if n1.children[0][0] == n2.children[0][0] == NULL:
+        pass
+    elif n1.children[0][0] != NULL and n2.children[0][0] != NULL:
+        for i in range(2):
+            for j in range(2):
+                QTN_merge_nodes(n1.children[i][j], n2.children[i][j])
+    elif n1.children[0][0] == NULL and n2.children[0][0] != NULL:
+        for i in range(2):
+            for j in range(2):
+                n1.children[i][j] = n2.children[i][j]
+                n2.children[i][j] = NULL
+    elif n1.children[0][0] != NULL and n2.children[0][0] == NULL:
+        pass
+    else:
+        raise RuntimeError
+
+def merge_quadtrees(QuadTree qt1, QuadTree qt2):
+    cdef int i, j
+    for i in range(qt1.top_grid_dims[0]):
+        for j in range(qt1.top_grid_dims[1]):
+            QTN_merge_nodes(qt1.root_nodes[i][j],
+                            qt2.root_nodes[i][j])
+    qt1.merged = 1


--- a/yt/utilities/command_line.py	Wed May 25 21:18:42 2011 -0400
+++ b/yt/utilities/command_line.py	Fri Jun 03 17:08:23 2011 -0700
@@ -26,7 +26,8 @@
 from yt.mods import *
 from yt.funcs import *
 import cmdln as cmdln
-import optparse, os, os.path, math, sys, time, subprocess
+import optparse, os, os.path, math, sys, time, subprocess, getpass, tempfile
+import urllib, urllib2, base64
 
 def _fix_pf(arg):
     if os.path.isdir("%s" % arg) and \
@@ -277,6 +278,22 @@
     version = _get_hg_version(path)[:12]
     return version
 
+# This code snippet is modified from Georg Brandl
+def bb_apicall(endpoint, data, use_pass = True):
+    uri = 'https://api.bitbucket.org/1.0/%s/' % endpoint
+    # since bitbucket doesn't return the required WWW-Authenticate header when
+    # making a request without Authorization, we cannot use the standard urllib2
+    # auth handlers; we have to add the requisite header from the start
+    if data is not None:
+        data = urllib.urlencode(data)
+    req = urllib2.Request(uri, data)
+    if use_pass:
+        username = raw_input("Bitbucket Username? ")
+        password = getpass.getpass()
+        upw = '%s:%s' % (username, password)
+        req.add_header('Authorization', 'Basic %s' % base64.b64encode(upw).strip())
+    return urllib2.urlopen(req).read()
+
 class YTCommands(cmdln.Cmdln):
     name="yt"
 
@@ -609,7 +626,7 @@
                   help="Description for this pasteboard entry")
     def do_pasteboard(self, subcmd, opts, arg):
         """
-        Place a file into the user's pasteboard
+        Place a file into your pasteboard.
         """
         if opts.desc is None: raise RuntimeError
         from yt.utilities.pasteboard import PostInventory
@@ -620,9 +637,103 @@
                   default = None, dest="output_fn",
                   help="File to output to; else, print.")
     def do_pastegrab(self, subcmd, opts, username, paste_id):
+        """
+        Download from your or another user's pasteboard.
+        """
         from yt.utilities.pasteboard import retrieve_pastefile
         retrieve_pastefile(username, paste_id, opts.output_fn)
 
+    def do_bugreport(self, subcmd, opts):
+        """
+        Report a bug in yt
+        """
+        print "==============================================================="
+        print
+        print "Hi there!  Welcome to the yt bugreport taker."
+        print
+        print "==============================================================="
+        print
+        print "At any time in advance of the upload of the bug, you"
+        print "should feel free to ctrl-C out and submit the bug "
+        print "report manually by going here:"
+        print "   http://hg.enzotools.org/yt/issues/new"
+        print
+        print "First off, how about a nice, pithy summary of the bug?"
+        print
+        try:
+            current_version = get_yt_version()
+        except:
+            current_version = "Unavailable"
+        summary = raw_input("Summary? ")
+        bugtype = "bug"
+        data = dict(title = summary, type=bugtype)
+        print "Okay, now let's get a bit more information."
+        print
+        print "Remember that if you want to submit a traceback, you can run"
+        print "any script with --paste or --detailed-paste to submit it to"
+        print "the pastebin."
+        if "EDITOR" in os.environ:
+            print
+            print "Press enter to spawn your editor, %s" % os.environ["EDITOR"]
+            loki = raw_input()
+            tf = tempfile.NamedTemporaryFile(delete=False)
+            fn = tf.name
+            tf.close()
+            popen = subprocess.call("$EDITOR %s" % fn, shell = True)
+            content = open(fn).read()
+            try:
+                os.unlink(fn)
+            except:
+                pass
+        else:
+            print
+            print "Couldn't find an $EDITOR variable.  So, let's just take"
+            print "take input here.  Type up your summary until you're ready"
+            print "to be done, and to signal you're done, type --- by itself"
+            print "on a line to signal your completion."
+            print
+            print "(okay, type now)"
+            print
+            lines = []
+            while 1:
+                line = raw_input()
+                if line.strip() == "---": break
+                lines.append(line)
+            content = "\n".join(lines)
+        content = "Reporting Version: %s\n\n%s" % (current_version, content)
+        endpoint = "repositories/yt_analysis/yt/issues"
+        data['content'] = content
+        print
+        print "==============================================================="
+        print 
+        print "Okay, we're going to submit with this:"
+        print
+        print "Summary: %s" % (data['title'])
+        print
+        print "---"
+        print content
+        print "---"
+        print
+        print "==============================================================="
+        print
+        print "Is that okay?  If not, hit ctrl-c.  Otherwise, enter means"
+        print "'submit'.  Next we'll ask for your Bitbucket Username."
+        print "If you don't have one, run the 'yt bootstrap_dev' command."
+        print
+        loki = raw_input()
+        retval = bb_apicall(endpoint, data, use_pass=True)
+        import json
+        retval = json.loads(retval)
+        url = "http://hg.enzotools.org/yt/issue/%s" % retval['local_id']
+        print 
+        print "==============================================================="
+        print
+        print "Thanks for your bug report!  You can view it here:"
+        print "   %s" % url
+        print
+        print "Keep in touch!"
+        print
+
     def do_bootstrap_dev(self, subcmd, opts):
         """
         Bootstrap a yt development environment
@@ -1009,6 +1120,7 @@
         bottle.debug()
         uuid_serve_functions(open_browser=opts.open_browser,
                     port=int(opts.port), repl=hr)
+
     
     def do_remote(self, subcmd, opts):
         import getpass, sys, socket, time, webbrowser


--- a/yt/utilities/parallel_tools/parallel_analysis_interface.py	Wed May 25 21:18:42 2011 -0400
+++ b/yt/utilities/parallel_tools/parallel_analysis_interface.py	Fri Jun 03 17:08:23 2011 -0700
@@ -36,6 +36,8 @@
 from yt.utilities.definitions import \
     x_dict, y_dict
 import yt.utilities.logger
+from yt.utilities.amr_utils import \
+    QuadTree, merge_quadtrees
 
 exe_name = os.path.basename(sys.executable)
 # At import time, we determined whether or not we're being run in parallel.
@@ -1253,6 +1255,74 @@
         if not obj._distributed: return True
         return (obj._owner == MPI.COMM_WORLD.rank)
 
+    def _send_quadtree(self, target, qt, tgd, args):
+        sizebuf = na.zeros(1, 'int64')
+        buf = qt.tobuffer()
+        sizebuf[0] = buf[0].size
+        MPI.COMM_WORLD.Send([sizebuf, MPI.LONG], dest=target)
+        MPI.COMM_WORLD.Send([buf[0], MPI.INT], dest=target)
+        MPI.COMM_WORLD.Send([buf[1], MPI.DOUBLE], dest=target)
+        MPI.COMM_WORLD.Send([buf[2], MPI.DOUBLE], dest=target)
+        
+    def _recv_quadtree(self, target, tgd, args):
+        sizebuf = na.zeros(1, 'int64')
+        MPI.COMM_WORLD.Recv(sizebuf, source=target)
+        buf = [na.empty((sizebuf[0],), 'int32'),
+               na.empty((sizebuf[0], args[2]),'float64'),
+               na.empty((sizebuf[0],),'float64')]
+        MPI.COMM_WORLD.Recv([buf[0], MPI.INT], source=target)
+        MPI.COMM_WORLD.Recv([buf[1], MPI.DOUBLE], source=target)
+        MPI.COMM_WORLD.Recv([buf[2], MPI.DOUBLE], source=target)
+        qt = QuadTree(tgd, args[2])
+        qt.frombuffer(*buf)
+        return qt
+
+    @parallel_passthrough
+    def merge_quadtree_buffers(self, qt):
+        # This is a modified version of pairwise reduction from Lisandro Dalcin,
+        # in the reductions demo of mpi4py
+        size = MPI.COMM_WORLD.size
+        rank = MPI.COMM_WORLD.rank
+
+        mask = 1
+
+        args = qt.get_args() # Will always be the same
+        tgd = na.array([args[0], args[1]], dtype='int64')
+        sizebuf = na.zeros(1, 'int64')
+
+        while mask < size:
+            if (mask & rank) != 0:
+                target = (rank & ~mask) % size
+                print "SENDING FROM %02i to %02i" % (rank, target)
+                self._send_quadtree(target, qt, tgd, args)
+                #qt = self._recv_quadtree(target, tgd, args)
+            else:
+                target = (rank | mask)
+                if target < size:
+                    print "RECEIVING FROM %02i on %02i" % (target, rank)
+                    qto = self._recv_quadtree(target, tgd, args)
+                    merge_quadtrees(qt, qto)
+                    del qto
+                    #self._send_quadtree(target, qt, tgd, args)
+            mask <<= 1
+
+        if rank == 0:
+            buf = qt.tobuffer()
+            sizebuf[0] = buf[0].size
+        MPI.COMM_WORLD.Bcast([sizebuf, MPI.LONG], root=0)
+        if rank != 0:
+            buf = [na.empty((sizebuf[0],), 'int32'),
+                   na.empty((sizebuf[0], args[2]),'float64'),
+                   na.empty((sizebuf[0],),'float64')]
+        MPI.COMM_WORLD.Bcast([buf[0], MPI.INT], root=0)
+        MPI.COMM_WORLD.Bcast([buf[1], MPI.DOUBLE], root=0)
+        MPI.COMM_WORLD.Bcast([buf[2], MPI.DOUBLE], root=0)
+        self.refined = buf[0]
+        if rank != 0:
+            qt = QuadTree(tgd, args[2])
+            qt.frombuffer(*buf)
+        return qt
+
 __tocast = 'c'
 
 def _send_array(arr, dest, tag = 0):


--- a/yt/visualization/_colormap_data.py	Wed May 25 21:18:42 2011 -0400
+++ b/yt/visualization/_colormap_data.py	Fri Jun 03 17:08:23 2011 -0700
@@ -4,6 +4,188 @@
 color_map_luts = {}
 
 
+### RdBu ###
+
+color_map_luts['RdBu'] = \
+   (
+array([ 0.40392157,  0.4154556 ,  0.42698963,  0.43852365,  0.45005768,
+        0.4615917 ,  0.47312573,  0.48465976,  0.49619378,  0.50772781,
+        0.51926183,  0.53079586,  0.54232988,  0.55386391,  0.56539794,
+        0.57693196,  0.58846599,  0.60000001,  0.61153404,  0.62306807,
+        0.63460209,  0.64613612,  0.65767014,  0.66920417,  0.68073819,
+        0.69227222,  0.7008074 ,  0.70634373,  0.71188006,  0.7174164 ,
+        0.72295273,  0.72848906,  0.73402539,  0.73956172,  0.74509805,
+        0.75063439,  0.75617072,  0.76170705,  0.76724338,  0.77277971,
+        0.77831605,  0.78385238,  0.78938871,  0.79492504,  0.80046137,
+        0.8059977 ,  0.81153404,  0.81707037,  0.8226067 ,  0.82814303,
+        0.83367936,  0.8392157 ,  0.84382931,  0.84844292,  0.85305653,
+        0.85767014,  0.86228375,  0.86689736,  0.87151096,  0.87612457,
+        0.88073818,  0.88535179,  0.8899654 ,  0.89457901,  0.89919262,
+        0.90380623,  0.90841984,  0.91303345,  0.91764706,  0.92226067,
+        0.92687428,  0.93148789,  0.9361015 ,  0.94071511,  0.94532872,
+        0.94994233,  0.95455594,  0.95755479,  0.95893887,  0.96032296,
+        0.96170704,  0.96309112,  0.9644752 ,  0.96585929,  0.96724337,
+        0.96862745,  0.97001154,  0.97139562,  0.9727797 ,  0.97416378,
+        0.97554787,  0.97693195,  0.97831603,  0.97970012,  0.9810842 ,
+        0.98246828,  0.98385237,  0.98523645,  0.98662053,  0.98800461,
+        0.9893887 ,  0.99077278,  0.99215686,  0.99123414,  0.99031142,
+        0.9893887 ,  0.98846598,  0.98754325,  0.98662053,  0.98569781,
+        0.98477509,  0.98385237,  0.98292964,  0.98200692,  0.9810842 ,
+        0.98016148,  0.97923876,  0.97831603,  0.97739331,  0.97647059,
+        0.97554787,  0.97462515,  0.97370242,  0.9727797 ,  0.97185698,
+        0.97093426,  0.97001154,  0.96908881,  0.9657055 ,  0.95986159,
+        0.95401769,  0.94817378,  0.94232988,  0.93648597,  0.93064206,
+        0.92479816,  0.91895425,  0.91311035,  0.90726644,  0.90142254,
+        0.89557863,  0.88973472,  0.88389082,  0.87804691,  0.87220301,
+        0.8663591 ,  0.86051519,  0.85467129,  0.84882738,  0.84298348,
+        0.83713957,  0.83129567,  0.82545176,  0.81960785,  0.80991927,
+        0.80023069,  0.79054211,  0.78085353,  0.77116495,  0.76147637,
+        0.75178779,  0.74209921,  0.73241063,  0.72272205,  0.71303347,
+        0.70334489,  0.6936563 ,  0.68396772,  0.67427914,  0.66459056,
+        0.65490198,  0.6452134 ,  0.63552482,  0.62583624,  0.61614766,
+        0.60645908,  0.5967705 ,  0.58708192,  0.57739334,  0.56647446,
+        0.55432528,  0.54217611,  0.53002694,  0.51787776,  0.50572859,
+        0.49357942,  0.48143024,  0.46928107,  0.45713189,  0.44498272,
+        0.43283355,  0.42068437,  0.4085352 ,  0.39638602,  0.38423685,
+        0.37208768,  0.3599385 ,  0.34778933,  0.33564016,  0.32349098,
+        0.31134181,  0.29919263,  0.28704346,  0.27489429,  0.26274511,
+        0.25751635,  0.2522876 ,  0.24705884,  0.24183008,  0.23660132,
+        0.23137256,  0.2261438 ,  0.22091504,  0.21568629,  0.21045753,
+        0.20522877,  0.20000001,  0.19477125,  0.18954249,  0.18431374,
+        0.17908498,  0.17385622,  0.16862746,  0.1633987 ,  0.15816994,
+        0.15294118,  0.14771243,  0.14248367,  0.13725491,  0.13202615,
+        0.12725875,  0.12295272,  0.11864668,  0.11434064,  0.11003461,
+        0.10572857,  0.10142254,  0.0971165 ,  0.09281046,  0.08850443,
+        0.08419839,  0.07989235,  0.07558632,  0.07128028,  0.06697424,
+        0.06266821,  0.05836217,  0.05405613,  0.0497501 ,  0.04544406,
+        0.04113803,  0.03683199,  0.03252595,  0.02821992,  0.02391388,
+        0.01960784]),
+array([ 0.        ,  0.00369089,  0.00738178,  0.01107266,  0.01476355,
+        0.01845444,  0.02214533,  0.02583622,  0.02952711,  0.03321799,
+        0.03690888,  0.04059977,  0.04429066,  0.04798155,  0.05167243,
+        0.05536332,  0.05905421,  0.0627451 ,  0.06643599,  0.07012688,
+        0.07381776,  0.07750865,  0.08119954,  0.08489043,  0.08858132,
+        0.0922722 ,  0.09965398,  0.11072665,  0.12179931,  0.13287197,
+        0.14394464,  0.1550173 ,  0.16608997,  0.17716263,  0.1882353 ,
+        0.19930796,  0.21038063,  0.22145329,  0.23252596,  0.24359862,
+        0.25467129,  0.26574395,  0.27681661,  0.28788928,  0.29896194,
+        0.31003461,  0.32110727,  0.33217994,  0.3432526 ,  0.35432527,
+        0.36539793,  0.3764706 ,  0.3870819 ,  0.3976932 ,  0.40830451,
+        0.41891581,  0.42952712,  0.44013842,  0.45074972,  0.46136103,
+        0.47197233,  0.48258363,  0.49319494,  0.50380624,  0.51441755,
+        0.52502885,  0.53564015,  0.54625146,  0.55686276,  0.56747407,
+        0.57808537,  0.58869667,  0.59930798,  0.60991928,  0.62053058,
+        0.63114189,  0.64175319,  0.65121109,  0.65951559,  0.66782009,
+        0.67612459,  0.68442908,  0.69273358,  0.70103808,  0.70934258,
+        0.71764708,  0.72595157,  0.73425607,  0.74256057,  0.75086507,
+        0.75916956,  0.76747406,  0.77577856,  0.78408306,  0.79238756,
+        0.80069205,  0.80899655,  0.81730105,  0.82560555,  0.83391004,
+        0.84221454,  0.85051904,  0.85882354,  0.86312957,  0.86743561,
+        0.87174165,  0.87604768,  0.88035372,  0.88465975,  0.88896579,
+        0.89327182,  0.89757786,  0.9018839 ,  0.90618993,  0.91049597,
+        0.914802  ,  0.91910804,  0.92341408,  0.92772011,  0.93202615,
+        0.93633218,  0.94063822,  0.94494426,  0.94925029,  0.95355633,
+        0.95786236,  0.9621684 ,  0.96647443,  0.96724337,  0.9644752 ,
+        0.96170704,  0.95893887,  0.95617071,  0.95340254,  0.95063437,
+        0.94786621,  0.94509804,  0.94232988,  0.93956171,  0.93679354,
+        0.93402538,  0.93125721,  0.92848905,  0.92572088,  0.92295272,
+        0.92018455,  0.91741638,  0.91464822,  0.91188005,  0.90911189,
+        0.90634372,  0.90357555,  0.90080739,  0.89803922,  0.89311804,
+        0.88819685,  0.88327567,  0.87835449,  0.8734333 ,  0.86851212,
+        0.86359093,  0.85866975,  0.85374857,  0.84882738,  0.8439062 ,
+        0.83898502,  0.83406383,  0.82914265,  0.82422146,  0.81930028,
+        0.8143791 ,  0.80945791,  0.80453673,  0.79961554,  0.79469436,
+        0.78977318,  0.78485199,  0.77993081,  0.77500963,  0.76870436,
+        0.76101501,  0.75332566,  0.74563631,  0.73794696,  0.73025761,
+        0.72256826,  0.71487891,  0.70718956,  0.69950021,  0.69181086,
+        0.68412151,  0.67643216,  0.66874281,  0.66105346,  0.65336411,
+        0.64567476,  0.63798541,  0.63029606,  0.62260671,  0.61491736,
+        0.60722801,  0.59953866,  0.59184931,  0.58415996,  0.57647061,
+        0.5695502 ,  0.56262978,  0.55570937,  0.54878895,  0.54186853,
+        0.53494812,  0.5280277 ,  0.52110729,  0.51418687,  0.50726645,
+        0.50034604,  0.49342562,  0.48650521,  0.47958479,  0.47266437,
+        0.46574396,  0.45882354,  0.45190313,  0.44498271,  0.43806229,
+        0.43114188,  0.42422146,  0.41730105,  0.41038063,  0.40346021,
+        0.39584776,  0.38754326,  0.37923876,  0.37093426,  0.36262976,
+        0.35432526,  0.34602077,  0.33771627,  0.32941177,  0.32110727,
+        0.31280277,  0.30449827,  0.29619378,  0.28788928,  0.27958478,
+        0.27128028,  0.26297578,  0.25467128,  0.24636679,  0.23806229,
+        0.22975779,  0.22145329,  0.21314879,  0.20484429,  0.1965398 ,
+        0.1882353 ]),
+array([ 0.12156863,  0.12341407,  0.12525952,  0.12710496,  0.1289504 ,
+        0.13079585,  0.13264129,  0.13448674,  0.13633218,  0.13817763,
+        0.14002307,  0.14186851,  0.14371396,  0.1455594 ,  0.14740485,
+        0.14925029,  0.15109574,  0.15294118,  0.15478662,  0.15663207,
+        0.15847751,  0.16032296,  0.1621684 ,  0.16401385,  0.16585929,
+        0.16770473,  0.17124184,  0.17647059,  0.18169935,  0.18692811,
+        0.19215687,  0.19738563,  0.20261439,  0.20784314,  0.2130719 ,
+        0.21830066,  0.22352942,  0.22875818,  0.23398694,  0.23921569,
+        0.24444445,  0.24967321,  0.25490197,  0.26013073,  0.26535949,
+        0.27058825,  0.275817  ,  0.28104576,  0.28627452,  0.29150328,
+        0.29673204,  0.3019608 ,  0.31011151,  0.31826222,  0.32641293,
+        0.33456364,  0.34271436,  0.35086507,  0.35901578,  0.36716649,
+        0.3753172 ,  0.38346792,  0.39161863,  0.39976934,  0.40792005,
+        0.41607076,  0.42422148,  0.43237219,  0.4405229 ,  0.44867361,
+        0.45682432,  0.46497504,  0.47312575,  0.48127646,  0.48942717,
+        0.49757788,  0.50572859,  0.5151096 ,  0.5257209 ,  0.53633221,
+        0.54694351,  0.55755481,  0.56816612,  0.57877742,  0.58938872,
+        0.60000002,  0.61061133,  0.62122263,  0.63183393,  0.64244523,
+        0.65305654,  0.66366784,  0.67427914,  0.68489045,  0.69550175,
+        0.70611305,  0.71672435,  0.72733566,  0.73794696,  0.74855826,
+        0.75916956,  0.76978087,  0.78039217,  0.78777395,  0.79515572,
+        0.8025375 ,  0.80991927,  0.81730105,  0.82468282,  0.8320646 ,
+        0.83944638,  0.84682815,  0.85420993,  0.8615917 ,  0.86897348,
+        0.87635526,  0.88373703,  0.89111881,  0.89850058,  0.90588236,
+        0.91326413,  0.92064591,  0.92802769,  0.93540946,  0.94279124,
+        0.95017301,  0.95755479,  0.96493656,  0.9680892 ,  0.96701269,
+        0.96593618,  0.96485967,  0.96378316,  0.96270665,  0.96163014,
+        0.96055364,  0.95947713,  0.95840062,  0.95732411,  0.9562476 ,
+        0.95517109,  0.95409458,  0.95301807,  0.95194156,  0.95086505,
+        0.94978855,  0.94871204,  0.94763553,  0.94655902,  0.94548251,
+        0.944406  ,  0.94332949,  0.94225298,  0.94117647,  0.93840831,
+        0.93564014,  0.93287198,  0.93010381,  0.92733564,  0.92456748,
+        0.92179931,  0.91903115,  0.91626298,  0.91349481,  0.91072665,
+        0.90795848,  0.90519032,  0.90242215,  0.89965399,  0.89688582,
+        0.89411765,  0.89134949,  0.88858132,  0.88581316,  0.88304499,
+        0.88027682,  0.87750866,  0.87474049,  0.87197233,  0.86851212,
+        0.86435987,  0.86020762,  0.85605537,  0.85190312,  0.84775087,
+        0.84359863,  0.83944638,  0.83529413,  0.83114188,  0.82698963,
+        0.82283738,  0.81868513,  0.81453288,  0.81038063,  0.80622839,
+        0.80207614,  0.79792389,  0.79377164,  0.78961939,  0.78546714,
+        0.78131489,  0.77716264,  0.77301039,  0.76885815,  0.7647059 ,
+        0.7611688 ,  0.75763169,  0.75409459,  0.75055749,  0.74702039,
+        0.74348329,  0.73994619,  0.73640909,  0.73287199,  0.72933489,
+        0.72579779,  0.72226069,  0.71872358,  0.71518648,  0.71164938,
+        0.70811228,  0.70457518,  0.70103808,  0.69750098,  0.69396388,
+        0.69042678,  0.68688968,  0.68335258,  0.67981547,  0.67627837,
+        0.66874281,  0.65720878,  0.64567476,  0.63414073,  0.62260671,
+        0.61107268,  0.59953866,  0.58800463,  0.5764706 ,  0.56493658,
+        0.55340255,  0.54186853,  0.5303345 ,  0.51880047,  0.50726645,
+        0.49573242,  0.4841984 ,  0.47266437,  0.46113034,  0.44959632,
+        0.43806229,  0.42652827,  0.41499424,  0.40346022,  0.39192619,
+        0.38039216]),
+array([ 1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,
+        1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,
+        1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,
+        1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,
+        1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,
+        1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,
+        1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,
+        1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,
+        1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,
+        1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,
+        1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,
+        1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,
+        1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,
+        1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,
+        1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,
+        1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,
+        1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,
+        1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,
+        1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,
+        1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.]),
+   )
+
 ### algae ###
 
 color_map_luts['algae'] = \
@@ -223,162 +405,159 @@
 
 color_map_luts['gist_stern'] = \
    (
-array([ 0.        ,  0.06588235,  0.13176471,  0.19764706,  0.26352941,
-        0.32941176,  0.39529412,  0.46326797,  0.53098039,  0.59686274,
-        0.68627448,  0.79921568,  0.86509803,  0.93150326,  0.99477124,
-        0.97647059,  0.95816995,  0.9398693 ,  0.92156865,  0.90326799,
-        0.88235296,  0.86274512,  0.84444446,  0.82614381,  0.80784316,
-        0.78823532,  0.76732028,  0.74901963,  0.73071897,  0.71241832,
-        0.69411767,  0.67581701,  0.63712427,  0.61568633,  0.59738568,
-        0.57908502,  0.55843144,  0.53856215,  0.52026149,  0.50196083,
-        0.48366016,  0.4653595 ,  0.44627454,  0.42483663,  0.40653597,
-        0.38823532,  0.36993467,  0.35163401,  0.33019611,  0.31111114,
-        0.29281048,  0.27450983,  0.25620917,  0.22692813,  0.19607845,
-        0.17777779,  0.15947714,  0.14117648,  0.12235296,  0.1006536 ,
-        0.08235295,  0.06405229,  0.04575164,  0.02745098,  0.19032622,
-        0.25359477,  0.2572549 ,  0.26091503,  0.26457516,  0.2682353 ,
-        0.27189543,  0.27555556,  0.27921569,  0.28287582,  0.28679737,
-        0.29411764,  0.29777778,  0.30143791,  0.30509804,  0.30875817,
-        0.3124183 ,  0.31607843,  0.31973856,  0.32339869,  0.32705882,
-        0.33071896,  0.33437909,  0.33803922,  0.34169935,  0.34535948,
-        0.34901961,  0.35267974,  0.35633987,  0.36      ,  0.36366013,
-        0.36732026,  0.37333333,  0.37856209,  0.38222222,  0.38588235,
-        0.38954248,  0.39320261,  0.39686275,  0.40052288,  0.40418301,
-        0.40784314,  0.41150327,  0.4151634 ,  0.41882353,  0.42248366,
-        0.42614379,  0.42980392,  0.43346405,  0.43712418,  0.44078431,
-        0.44444445,  0.44810458,  0.45254902,  0.45934641,  0.46300654,
-        0.46666667,  0.4703268 ,  0.47398693,  0.47764706,  0.48130719,
-        0.48496732,  0.48862745,  0.49228758,  0.49777777,  0.50352942,
-        0.50718956,  0.51084968,  0.51450983,  0.51816994,  0.52183009,
-        0.5254902 ,  0.52915035,  0.53281046,  0.53647061,  0.54300653,
-        0.54771244,  0.55137256,  0.5550327 ,  0.55869283,  0.56235295,
-        0.56601309,  0.56967321,  0.57333336,  0.57699347,  0.58065362,
-        0.58431373,  0.58797388,  0.59163399,  0.59529414,  0.59895426,
-        0.6026144 ,  0.60627452,  0.60993466,  0.61359479,  0.61725491,
-        0.62222224,  0.62849674,  0.63215689,  0.635817  ,  0.63947715,
-        0.64313726,  0.64679741,  0.65045752,  0.65411767,  0.65777779,
-        0.66143793,  0.66509805,  0.66875819,  0.67241832,  0.67607844,
-        0.67973858,  0.6833987 ,  0.68705885,  0.69071896,  0.69437911,
-        0.69803922,  0.70169937,  0.70875816,  0.7129412 ,  0.71660132,
-        0.72026146,  0.72392158,  0.72758172,  0.73124185,  0.73490197,
-        0.73856211,  0.74222223,  0.7466667 ,  0.75346406,  0.75712421,
-        0.76078431,  0.76444447,  0.76810458,  0.77176473,  0.77542485,
-        0.77908499,  0.78274511,  0.78640524,  0.79189544,  0.79764707,
-        0.80130721,  0.80496733,  0.80862747,  0.81228759,  0.81594774,
-        0.81960784,  0.823268  ,  0.82692811,  0.83058826,  0.83424837,
-        0.83790852,  0.84156864,  0.84522877,  0.8488889 ,  0.85254903,
-        0.85620917,  0.85986929,  0.86352943,  0.86718955,  0.87111116,
-        0.87843137,  0.88209153,  0.88575164,  0.88941179,  0.8930719 ,
-        0.89673205,  0.90039217,  0.9040523 ,  0.90771243,  0.91137256,
-        0.9150327 ,  0.91869282,  0.92235296,  0.92601308,  0.92967323,
-        0.93333333,  0.93699349,  0.9406536 ,  0.94431375,  0.94797386,
-        0.95163401,  0.95764708,  0.96287583,  0.96653596,  0.97019609,
-        0.97385623,  0.97751635,  0.98117649,  0.98483661,  0.98849676,
-        0.99215686]),
-array([ 0.        ,  0.00366013,  0.00732026,  0.01098039,  0.01464052,
-        0.01830065,  0.02196078,  0.02562092,  0.02928105,  0.03294118,
-        0.0379085 ,  0.04418301,  0.04784314,  0.05150327,  0.0551634 ,
-        0.05882353,  0.06248366,  0.06614379,  0.06980392,  0.07346405,
-        0.07712418,  0.08078431,  0.08444444,  0.08810458,  0.09176471,
-        0.09542484,  0.09908497,  0.1027451 ,  0.10640523,  0.11006536,
-        0.11372549,  0.11738562,  0.12444444,  0.12862745,  0.13228758,
-        0.13594771,  0.13960784,  0.14326797,  0.14692811,  0.15058824,
-        0.15424837,  0.1579085 ,  0.16156863,  0.16522876,  0.16888889,
-        0.17254902,  0.17620915,  0.17986928,  0.18352941,  0.18718954,
-        0.19084967,  0.1945098 ,  0.19816994,  0.20366013,  0.20941176,
-        0.2130719 ,  0.21673203,  0.22039216,  0.22405229,  0.22771242,
-        0.23137255,  0.23503268,  0.23869281,  0.24235294,  0.24888888,
-        0.25359477,  0.2572549 ,  0.26091503,  0.26457516,  0.2682353 ,
-        0.27189543,  0.27555556,  0.27921569,  0.28287582,  0.28679737,
-        0.29411764,  0.29777778,  0.30143791,  0.30509804,  0.30875817,
-        0.3124183 ,  0.31607843,  0.31973856,  0.32339869,  0.32705882,
-        0.33071896,  0.33437909,  0.33803922,  0.34169935,  0.34535948,
-        0.34901961,  0.35267974,  0.35633987,  0.36      ,  0.36366013,
-        0.36732026,  0.37333333,  0.37856209,  0.38222222,  0.38588235,
-        0.38954248,  0.39320261,  0.39686275,  0.40052288,  0.40418301,
-        0.40784314,  0.41150327,  0.4151634 ,  0.41882353,  0.42248366,
-        0.42614379,  0.42980392,  0.43346405,  0.43712418,  0.44078431,
-        0.44444445,  0.44810458,  0.45254902,  0.45934641,  0.46300654,
-        0.46666667,  0.4703268 ,  0.47398693,  0.47764706,  0.48130719,
-        0.48496732,  0.48862745,  0.49228758,  0.49777777,  0.50352942,
-        0.50718956,  0.51084968,  0.51450983,  0.51816994,  0.52183009,
-        0.5254902 ,  0.52915035,  0.53281046,  0.53647061,  0.54300653,
-        0.54771244,  0.55137256,  0.5550327 ,  0.55869283,  0.56235295,
-        0.56601309,  0.56967321,  0.57333336,  0.57699347,  0.58065362,
-        0.58431373,  0.58797388,  0.59163399,  0.59529414,  0.59895426,
-        0.6026144 ,  0.60627452,  0.60993466,  0.61359479,  0.61725491,
-        0.62222224,  0.62849674,  0.63215689,  0.635817  ,  0.63947715,
-        0.64313726,  0.64679741,  0.65045752,  0.65411767,  0.65777779,
-        0.66143793,  0.66509805,  0.66875819,  0.67241832,  0.67607844,
-        0.67973858,  0.6833987 ,  0.68705885,  0.69071896,  0.69437911,
-        0.69803922,  0.70169937,  0.70875816,  0.7129412 ,  0.71660132,
-        0.72026146,  0.72392158,  0.72758172,  0.73124185,  0.73490197,
-        0.73856211,  0.74222223,  0.7466667 ,  0.75346406,  0.75712421,
-        0.76078431,  0.76444447,  0.76810458,  0.77176473,  0.77542485,
-        0.77908499,  0.78274511,  0.78640524,  0.79189544,  0.79764707,
-        0.80130721,  0.80496733,  0.80862747,  0.81228759,  0.81594774,
-        0.81960784,  0.823268  ,  0.82692811,  0.83058826,  0.83424837,
-        0.83790852,  0.84156864,  0.84522877,  0.8488889 ,  0.85254903,
-        0.85620917,  0.85986929,  0.86352943,  0.86718955,  0.87111116,
-        0.87843137,  0.88209153,  0.88575164,  0.88941179,  0.8930719 ,
-        0.89673205,  0.90039217,  0.9040523 ,  0.90771243,  0.91137256,
-        0.9150327 ,  0.91869282,  0.92235296,  0.92601308,  0.92967323,
-        0.93333333,  0.93699349,  0.9406536 ,  0.94431375,  0.94797386,
-        0.95163401,  0.95764708,  0.96287583,  0.96653596,  0.97019609,
-        0.97385623,  0.97751635,  0.98117649,  0.98483661,  0.98849676,
-        0.99215686]),
-array([ 0.        ,  0.00366013,  0.01071895,  0.01803922,  0.02535948,
-        0.03267974,  0.04      ,  0.04732026,  0.05464052,  0.06196078,
-        0.07189542,  0.08444444,  0.09176471,  0.09908497,  0.10640523,
-        0.11372549,  0.12104575,  0.12836601,  0.13568628,  0.14300654,
-        0.1503268 ,  0.15764706,  0.16496732,  0.17228758,  0.17960784,
-        0.18692811,  0.19424837,  0.20156863,  0.20888889,  0.21620915,
-        0.22352941,  0.23084967,  0.2449673 ,  0.25333333,  0.2606536 ,
-        0.26797386,  0.27529412,  0.28261438,  0.28993464,  0.2972549 ,
-        0.30457516,  0.31189543,  0.31921569,  0.32653595,  0.33385621,
-        0.34117647,  0.34849673,  0.35581699,  0.36313726,  0.37045752,
-        0.37777778,  0.38509804,  0.3924183 ,  0.40339869,  0.41490196,
-        0.42222222,  0.42954248,  0.43686275,  0.44418301,  0.45150327,
-        0.45882353,  0.46614379,  0.47346405,  0.48078431,  0.49385617,
-        0.50326797,  0.51058824,  0.5179085 ,  0.52522876,  0.53254902,
-        0.53986928,  0.54718954,  0.55450981,  0.56183007,  0.56967318,
-        0.5843137 ,  0.59163399,  0.59895425,  0.60627451,  0.61359477,
-        0.62091503,  0.6282353 ,  0.63555556,  0.64287582,  0.65019608,
-        0.65751634,  0.6648366 ,  0.67215686,  0.67947713,  0.68679739,
-        0.69411765,  0.70143791,  0.70875817,  0.71607843,  0.72339869,
-        0.73071896,  0.74274509,  0.75320262,  0.76052288,  0.76784314,
-        0.7751634 ,  0.78248366,  0.78980392,  0.79712418,  0.80444445,
-        0.81176471,  0.81908497,  0.82640523,  0.83372549,  0.84104575,
-        0.84836601,  0.85568628,  0.86300654,  0.8703268 ,  0.87764706,
-        0.88496732,  0.89228758,  0.90117647,  0.91477124,  0.9220915 ,
-        0.92941177,  0.93673203,  0.94405229,  0.95137255,  0.95869281,
-        0.96601307,  0.97333333,  0.9806536 ,  0.99163394,  0.99372556,
-        0.979085  ,  0.96444453,  0.94901963,  0.93124193,  0.91660131,
-        0.90196089,  0.88732027,  0.86928116,  0.85411767,  0.82797401,
-        0.80653599,  0.7905883 ,  0.77594777,  0.76130724,  0.74509812,
-        0.72810461,  0.71346413,  0.69882355,  0.68366025,  0.66562092,
-        0.6509805 ,  0.63633988,  0.62169944,  0.60392159,  0.58849682,
-        0.57385625,  0.55921575,  0.54248372,  0.52601313,  0.51137262,
-        0.49019612,  0.46248371,  0.44784313,  0.43320268,  0.41830063,
-        0.4000001 ,  0.38535946,  0.37071902,  0.35607843,  0.33856216,
-        0.32287583,  0.30823533,  0.2935948 ,  0.27712421,  0.2603922 ,
-        0.24575164,  0.23111115,  0.21568625,  0.19790855,  0.18326794,
-        0.16862752,  0.15398689,  0.12235305,  0.10509801,  0.09045755,
-        0.0732026 ,  0.05725492,  0.04261438,  0.02797386,  0.01176472,
-        0.00392158,  0.01594769,  0.03372556,  0.06091499,  0.07529416,
-        0.08627447,  0.10091509,  0.11555552,  0.13019612,  0.14483659,
-        0.15686277,  0.17019608,  0.18483662,  0.20679742,  0.2282353 ,
-        0.24052291,  0.2551634 ,  0.26980398,  0.28444443,  0.29882359,
-        0.3098039 ,  0.32444452,  0.33908495,  0.35372555,  0.36836601,
-        0.38039219,  0.39372551,  0.40836604,  0.42300657,  0.43764708,
-        0.45098043,  0.46300654,  0.47764712,  0.49228757,  0.50797404,
-        0.53725488,  0.54823538,  0.5626144 ,  0.57725499,  0.59189546,
-        0.60653602,  0.61882357,  0.63189549,  0.64653601,  0.66117652,
-        0.67581707,  0.68941179,  0.70117656,  0.71581701,  0.73045762,
-        0.74509804,  0.75973867,  0.77098041,  0.78509813,  0.79973859,
-        0.81437916,  0.83607851,  0.8554249 ,  0.87006542,  0.88470593,
-        0.89934648,  0.9129412 ,  0.92470597,  0.93934642,  0.95398703,
-        0.96862745]),
+array([ 0.        ,  0.0716923 ,  0.14338459,  0.21507689,  0.28676919,
+        0.35846148,  0.43015378,  0.50184608,  0.57353837,  0.64523067,
+        0.71692297,  0.78861526,  0.86030756,  0.93199986,  0.99899382,
+        0.97945625,  0.95991869,  0.94038112,  0.92084356,  0.90130599,
+        0.88176843,  0.86223087,  0.8426933 ,  0.82315574,  0.80361817,
+        0.78408061,  0.76454305,  0.74500548,  0.72546792,  0.70593035,
+        0.68639279,  0.66685523,  0.64731766,  0.6277801 ,  0.60824253,
+        0.58870497,  0.5691674 ,  0.54962984,  0.53009228,  0.51055471,
+        0.49101715,  0.47147958,  0.45194202,  0.43240446,  0.41286689,
+        0.39332933,  0.37379176,  0.3542542 ,  0.33471664,  0.31517907,
+        0.29564151,  0.27610394,  0.25656638,  0.23702881,  0.21749125,
+        0.19795369,  0.17841612,  0.15887856,  0.13934099,  0.11980343,
+        0.10026587,  0.0807283 ,  0.06119074,  0.04165317,  0.25098039,
+        0.25490196,  0.25882353,  0.2627451 ,  0.26666667,  0.27058824,
+        0.2745098 ,  0.27843137,  0.28235294,  0.28627451,  0.29019608,
+        0.29411765,  0.29803922,  0.30196078,  0.30588235,  0.30980392,
+        0.31372549,  0.31764706,  0.32156863,  0.3254902 ,  0.32941176,
+        0.33333333,  0.3372549 ,  0.34117647,  0.34509804,  0.34901961,
+        0.35294118,  0.35686275,  0.36078431,  0.36470588,  0.36862745,
+        0.37254902,  0.37647059,  0.38039216,  0.38431373,  0.38823529,
+        0.39215686,  0.39607843,  0.4       ,  0.40392157,  0.40784314,
+        0.41176471,  0.41568627,  0.41960784,  0.42352941,  0.42745098,
+        0.43137255,  0.43529412,  0.43921569,  0.44313725,  0.44705882,
+        0.45098039,  0.45490196,  0.45882353,  0.4627451 ,  0.46666667,
+        0.47058824,  0.4745098 ,  0.47843137,  0.48235294,  0.48627451,
+        0.49019608,  0.49411765,  0.49803922,  0.50196078,  0.50588235,
+        0.50980392,  0.51372549,  0.51764706,  0.52156863,  0.5254902 ,
+        0.52941176,  0.53333333,  0.5372549 ,  0.54117647,  0.54509804,
+        0.54901961,  0.55294118,  0.55686275,  0.56078431,  0.56470588,
+        0.56862745,  0.57254902,  0.57647059,  0.58039216,  0.58431373,
+        0.58823529,  0.59215686,  0.59607843,  0.6       ,  0.60392157,
+        0.60784314,  0.61176471,  0.61568627,  0.61960784,  0.62352941,
+        0.62745098,  0.63137255,  0.63529412,  0.63921569,  0.64313725,
+        0.64705882,  0.65098039,  0.65490196,  0.65882353,  0.6627451 ,
+        0.66666667,  0.67058824,  0.6745098 ,  0.67843137,  0.68235294,
+        0.68627451,  0.69019608,  0.69411765,  0.69803922,  0.70196078,
+        0.70588235,  0.70980392,  0.71372549,  0.71764706,  0.72156863,
+        0.7254902 ,  0.72941176,  0.73333333,  0.7372549 ,  0.74117647,
+        0.74509804,  0.74901961,  0.75294118,  0.75686275,  0.76078431,
+        0.76470588,  0.76862745,  0.77254902,  0.77647059,  0.78039216,
+        0.78431373,  0.78823529,  0.79215686,  0.79607843,  0.8       ,
+        0.80392157,  0.80784314,  0.81176471,  0.81568627,  0.81960784,
+        0.82352941,  0.82745098,  0.83137255,  0.83529412,  0.83921569,
+        0.84313725,  0.84705882,  0.85098039,  0.85490196,  0.85882353,
+        0.8627451 ,  0.86666667,  0.87058824,  0.8745098 ,  0.87843137,
+        0.88235294,  0.88627451,  0.89019608,  0.89411765,  0.89803922,
+        0.90196078,  0.90588235,  0.90980392,  0.91372549,  0.91764706,
+        0.92156863,  0.9254902 ,  0.92941176,  0.93333333,  0.9372549 ,
+        0.94117647,  0.94509804,  0.94901961,  0.95294118,  0.95686275,
+        0.96078431,  0.96470588,  0.96862745,  0.97254902,  0.97647059,
+        0.98039216,  0.98431373,  0.98823529,  0.99215686,  0.99607843,  1.        ]),
+array([ 0.        ,  0.00392157,  0.00784314,  0.01176471,  0.01568627,
+        0.01960784,  0.02352941,  0.02745098,  0.03137255,  0.03529412,
+        0.03921569,  0.04313725,  0.04705882,  0.05098039,  0.05490196,
+        0.05882353,  0.0627451 ,  0.06666667,  0.07058824,  0.0745098 ,
+        0.07843137,  0.08235294,  0.08627451,  0.09019608,  0.09411765,
+        0.09803922,  0.10196078,  0.10588235,  0.10980392,  0.11372549,
+        0.11764706,  0.12156863,  0.1254902 ,  0.12941176,  0.13333333,
+        0.1372549 ,  0.14117647,  0.14509804,  0.14901961,  0.15294118,
+        0.15686275,  0.16078431,  0.16470588,  0.16862745,  0.17254902,
+        0.17647059,  0.18039216,  0.18431373,  0.18823529,  0.19215686,
+        0.19607843,  0.2       ,  0.20392157,  0.20784314,  0.21176471,
+        0.21568627,  0.21960784,  0.22352941,  0.22745098,  0.23137255,
+        0.23529412,  0.23921569,  0.24313725,  0.24705882,  0.25098039,
+        0.25490196,  0.25882353,  0.2627451 ,  0.26666667,  0.27058824,
+        0.2745098 ,  0.27843137,  0.28235294,  0.28627451,  0.29019608,
+        0.29411765,  0.29803922,  0.30196078,  0.30588235,  0.30980392,
+        0.31372549,  0.31764706,  0.32156863,  0.3254902 ,  0.32941176,
+        0.33333333,  0.3372549 ,  0.34117647,  0.34509804,  0.34901961,
+        0.35294118,  0.35686275,  0.36078431,  0.36470588,  0.36862745,
+        0.37254902,  0.37647059,  0.38039216,  0.38431373,  0.38823529,
+        0.39215686,  0.39607843,  0.4       ,  0.40392157,  0.40784314,
+        0.41176471,  0.41568627,  0.41960784,  0.42352941,  0.42745098,
+        0.43137255,  0.43529412,  0.43921569,  0.44313725,  0.44705882,
+        0.45098039,  0.45490196,  0.45882353,  0.4627451 ,  0.46666667,
+        0.47058824,  0.4745098 ,  0.47843137,  0.48235294,  0.48627451,
+        0.49019608,  0.49411765,  0.49803922,  0.50196078,  0.50588235,
+        0.50980392,  0.51372549,  0.51764706,  0.52156863,  0.5254902 ,
+        0.52941176,  0.53333333,  0.5372549 ,  0.54117647,  0.54509804,
+        0.54901961,  0.55294118,  0.55686275,  0.56078431,  0.56470588,
+        0.56862745,  0.57254902,  0.57647059,  0.58039216,  0.58431373,
+        0.58823529,  0.59215686,  0.59607843,  0.6       ,  0.60392157,
+        0.60784314,  0.61176471,  0.61568627,  0.61960784,  0.62352941,
+        0.62745098,  0.63137255,  0.63529412,  0.63921569,  0.64313725,
+        0.64705882,  0.65098039,  0.65490196,  0.65882353,  0.6627451 ,
+        0.66666667,  0.67058824,  0.6745098 ,  0.67843137,  0.68235294,
+        0.68627451,  0.69019608,  0.69411765,  0.69803922,  0.70196078,
+        0.70588235,  0.70980392,  0.71372549,  0.71764706,  0.72156863,
+        0.7254902 ,  0.72941176,  0.73333333,  0.7372549 ,  0.74117647,
+        0.74509804,  0.74901961,  0.75294118,  0.75686275,  0.76078431,
+        0.76470588,  0.76862745,  0.77254902,  0.77647059,  0.78039216,
+        0.78431373,  0.78823529,  0.79215686,  0.79607843,  0.8       ,
+        0.80392157,  0.80784314,  0.81176471,  0.81568627,  0.81960784,
+        0.82352941,  0.82745098,  0.83137255,  0.83529412,  0.83921569,
+        0.84313725,  0.84705882,  0.85098039,  0.85490196,  0.85882353,
+        0.8627451 ,  0.86666667,  0.87058824,  0.8745098 ,  0.87843137,
+        0.88235294,  0.88627451,  0.89019608,  0.89411765,  0.89803922,
+        0.90196078,  0.90588235,  0.90980392,  0.91372549,  0.91764706,
+        0.92156863,  0.9254902 ,  0.92941176,  0.93333333,  0.9372549 ,
+        0.94117647,  0.94509804,  0.94901961,  0.95294118,  0.95686275,
+        0.96078431,  0.96470588,  0.96862745,  0.97254902,  0.97647059,
+        0.98039216,  0.98431373,  0.98823529,  0.99215686,  0.99607843,  1.        ]),
+array([ 0.        ,  0.00784314,  0.01568627,  0.02352941,  0.03137255,
+        0.03921569,  0.04705882,  0.05490196,  0.0627451 ,  0.07058824,
+        0.07843137,  0.08627451,  0.09411765,  0.10196078,  0.10980392,
+        0.11764706,  0.1254902 ,  0.13333333,  0.14117647,  0.14901961,
+        0.15686275,  0.16470588,  0.17254902,  0.18039216,  0.18823529,
+        0.19607843,  0.20392157,  0.21176471,  0.21960784,  0.22745098,
+        0.23529412,  0.24313725,  0.25098039,  0.25882353,  0.26666667,
+        0.2745098 ,  0.28235294,  0.29019608,  0.29803922,  0.30588235,
+        0.31372549,  0.32156863,  0.32941176,  0.3372549 ,  0.34509804,
+        0.35294118,  0.36078431,  0.36862745,  0.37647059,  0.38431373,
+        0.39215686,  0.4       ,  0.40784314,  0.41568627,  0.42352941,
+        0.43137255,  0.43921569,  0.44705882,  0.45490196,  0.4627451 ,
+        0.47058824,  0.47843137,  0.48627451,  0.49411765,  0.50196078,
+        0.50980392,  0.51764706,  0.5254902 ,  0.53333333,  0.54117647,
+        0.54901961,  0.55686275,  0.56470588,  0.57254902,  0.58039216,
+        0.58823529,  0.59607843,  0.60392157,  0.61176471,  0.61960784,
+        0.62745098,  0.63529412,  0.64313725,  0.65098039,  0.65882353,
+        0.66666667,  0.6745098 ,  0.68235294,  0.69019608,  0.69803922,
+        0.70588235,  0.71372549,  0.72156863,  0.72941176,  0.7372549 ,
+        0.74509804,  0.75294118,  0.76078431,  0.76862745,  0.77647059,
+        0.78431373,  0.79215686,  0.8       ,  0.80784314,  0.81568627,
+        0.82352941,  0.83137255,  0.83921569,  0.84705882,  0.85490196,
+        0.8627451 ,  0.87058824,  0.87843137,  0.88627451,  0.89411765,
+        0.90196078,  0.90980392,  0.91764706,  0.9254902 ,  0.93333333,
+        0.94117647,  0.94901961,  0.95686275,  0.96470588,  0.97254902,
+        0.98039216,  0.98823529,  0.99607843,  0.99165624,  0.97496871,
+        0.95828118,  0.94159366,  0.92490613,  0.90821861,  0.89153108,
+        0.87484355,  0.85815603,  0.8414685 ,  0.82478098,  0.80809345,
+        0.79140592,  0.7747184 ,  0.75803087,  0.74134335,  0.72465582,
+        0.70796829,  0.69128077,  0.67459324,  0.65790572,  0.64121819,
+        0.62453066,  0.60784314,  0.59115561,  0.57446809,  0.55778056,
+        0.54109303,  0.52440551,  0.50771798,  0.49103045,  0.47434293,
+        0.4576554 ,  0.44096788,  0.42428035,  0.40759282,  0.3909053 ,
+        0.37421777,  0.35753025,  0.34084272,  0.32415519,  0.30746767,
+        0.29078014,  0.27409262,  0.25740509,  0.24071756,  0.22403004,
+        0.20734251,  0.19065499,  0.17396746,  0.15727993,  0.14059241,
+        0.12390488,  0.10721736,  0.09052983,  0.0738423 ,  0.05715478,
+        0.04046725,  0.02377972,  0.0070922 ,  0.00850906,  0.02330744,
+        0.03810581,  0.05290418,  0.06770255,  0.08250092,  0.0972993 ,
+        0.11209767,  0.12689604,  0.14169441,  0.15649279,  0.17129116,
+        0.18608953,  0.2008879 ,  0.21568627,  0.23048465,  0.24528302,
+        0.26008139,  0.27487976,  0.28967814,  0.30447651,  0.31927488,
+        0.33407325,  0.34887162,  0.36367   ,  0.37846837,  0.39326674,
+        0.40806511,  0.42286349,  0.43766186,  0.45246023,  0.4672586 ,
+        0.48205697,  0.49685535,  0.51165372,  0.52645209,  0.54125046,
+        0.55604883,  0.57084721,  0.58564558,  0.60044395,  0.61524232,
+        0.6300407 ,  0.64483907,  0.65963744,  0.67443581,  0.68923418,
+        0.70403256,  0.71883093,  0.7336293 ,  0.74842767,  0.76322605,
+        0.77802442,  0.79282279,  0.80762116,  0.82241953,  0.83721791,
+        0.85201628,  0.86681465,  0.88161302,  0.89641139,  0.91120977,
+        0.92600814,  0.94080651,  0.95560488,  0.97040326,  0.98520163,  1.        ]),
 array([ 1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,
         1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,
         1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,


--- a/yt/visualization/api.py	Wed May 25 21:18:42 2011 -0400
+++ b/yt/visualization/api.py	Fri Jun 03 17:08:23 2011 -0700
@@ -49,7 +49,7 @@
     splat_points, \
     annotate_image, \
     apply_colormap, \
-    _scale_image
+    scale_image
 
 from plot_modifications import \
     PlotCallback, \


--- a/yt/visualization/image_writer.py	Wed May 25 21:18:42 2011 -0400
+++ b/yt/visualization/image_writer.py	Fri Jun 03 17:08:23 2011 -0700
@@ -29,7 +29,19 @@
 import _colormap_data as cmd
 import yt.utilities.amr_utils as au
 
-def _scale_image(image):
+def scale_image(image):
+    r"""Scale an image ([NxNxM] where M = 1-4) to be uint8 and values scaled 
+    from [0,255].
+
+    Parameters
+    ----------
+    image : array_like or tuple of image info
+
+    Examples
+    --------
+
+        >>> image = scale_image(image)
+    """
     if isinstance(image, na.ndarray) and image.dtype == na.uint8:
         return image
     if isinstance(image, (types.TupleType, types.ListType)):
@@ -86,16 +98,16 @@
         >>> multi_image_composite("multi_channel1.png", red_channel, blue_channel)
 
     """
-    red_channel = _scale_image(red_channel)
-    blue_channel = _scale_image(blue_channel)
+    red_channel = scale_image(red_channel)
+    blue_channel = scale_image(blue_channel)
     if green_channel is None:
         green_channel = na.zeros(red_channel.shape, dtype='uint8')
     else:
-        green_channel = _scale_image(green_channel)
+        green_channel = scale_image(green_channel)
     if alpha_channel is None:
         alpha_channel = na.zeros(red_channel.shape, dtype='uint8') + 255
     else:
-        alpha_channel = _scale_image(alpha_channel) 
+        alpha_channel = scale_image(alpha_channel) 
     image = na.array([red_channel, green_channel, blue_channel, alpha_channel])
     image = image.transpose().copy() # Have to make sure it's contiguous 
     au.write_png(image, fn)
@@ -279,9 +291,9 @@
     return mapped.copy("C")
 
 def strip_colormap_data(fn = "color_map_data.py",
-            cmaps = ("jet", "algae", "hot", "gist_stern")):
+            cmaps = ("jet", "algae", "hot", "gist_stern", "RdBu")):
     import pprint
-    import _colormap_data as rcm
+    import color_maps as rcm
     f = open(fn, "w")
     f.write("### Auto-generated colormap tables, taken from Matplotlib ###\n\n")
     f.write("from numpy import array\n")


--- a/yt/visualization/plot_collection.py	Wed May 25 21:18:42 2011 -0400
+++ b/yt/visualization/plot_collection.py	Fri Jun 03 17:08:23 2011 -0700
@@ -1707,7 +1707,8 @@
             canvas = FigureCanvasSVG(plot._figure)
             send_svg_canvas(canvas)
 
-def get_multi_plot(nx, ny, colorbar = 'vertical', bw = 4, dpi=300):
+def get_multi_plot(nx, ny, colorbar = 'vertical', bw = 4, dpi=300,
+                   cbar_padding = 0.4):
     r"""Construct a multiple axes plot object, with or without a colorbar, into
     which multiple plots may be inserted.
 
@@ -1750,11 +1751,11 @@
     if colorbar is None:
         fudge_x = fudge_y = 1.0
     elif colorbar.lower() == 'vertical':
-        fudge_x = nx/(0.25+nx)
+        fudge_x = nx/(cbar_padding+nx)
         fudge_y = 1.0
     elif colorbar.lower() == 'horizontal':
         fudge_x = 1.0
-        fudge_y = ny/(0.40+ny)
+        fudge_y = ny/(cbar_padding+ny)
     fig = figure.Figure((bw*nx/fudge_x, bw*ny/fudge_y), dpi=dpi)
     from _mpl_imports import FigureCanvasAgg
     fig.set_canvas(FigureCanvasAgg(fig))


--- a/yt/visualization/profile_plotter.py	Wed May 25 21:18:42 2011 -0400
+++ b/yt/visualization/profile_plotter.py	Fri Jun 03 17:08:23 2011 -0700
@@ -37,7 +37,8 @@
     BinnedProfile1D, \
     BinnedProfile2D
 from .plot_types import ProfilePlot, PhasePlot
-from .tick_locators import LogLocator
+from .tick_locators import LogLocator, LinearLocator
+from yt.utilities.logger import ytLogger as mylog
 
 def invalidate_plot(f):
     @wraps(f)
@@ -56,6 +57,8 @@
     def calculate_ticks(self):
         if self.scale == 'log':
             locator = LogLocator()
+        elif self.scale == 'linear':
+            locator = LinearLocator()
         else:
             raise NotImplementedError
         self.ticks = locator(*self.bounds)
@@ -85,10 +88,12 @@
         if self.x_spec.scale == 'log' and \
            self.y_spec.scale == 'log':
             func = axes.loglog
-        elif self.x_spec == 'log':
+        elif self.x_spec.scale == 'log':
             func = axes.semilogx
-        elif self.y_spec == 'log':
+        elif self.y_spec.scale == 'log':
             func = axes.semilogy
+        else:
+            func = axes.plot
         if self.plot_spec is None:
             kwargs = {}
         else:
@@ -268,12 +273,15 @@
                                   y_bins, field_y, y_min, y_max, y_log,
                                   lazy_reader)
         # This is a fallback, in case we forget.
-        if field_z == "CellMassMsun": weight = None
+        if field_z.startswith("CellMass") or \
+           field_z.startswith("CellVolume"):
+            mylog.warning("Setting weight to None")
+            weight = None
         profile.add_fields(field_z, weight=weight, accumulation=accumulation, fractional=fractional)
         self._current_field = field_z
         self.profile = profile
         self.scale = {True:'log', False:'linear'}.get(
-                data_source.pf.field_info[field_z], "log")
+                data_source.pf.field_info[field_z].take_log, "log")
         self._setup_plot()
 
     def _setup_plot(self):
@@ -392,12 +400,15 @@
                                   x_bins, field_x, x_min, x_max, x_log,
                                   lazy_reader)
         # This is a fallback, in case we forget.
-        if field_y == "CellMassMsun": weight = None
+        if field_y.startswith("CellMass") or \
+           field_y.startswith("CellVolume"):
+            mylog.warning("Setting weight to None")
+            weight = None
         profile.add_fields(field_y, weight=weight, accumulation=accumulation, fractional=fractional)
         self._current_field = field_y
         self.profile = profile
         self.scale = {True:'log', False:'linear'}.get(
-                data_source.pf.field_info[field_y], "log")
+                data_source.pf.field_info[field_y].take_log, "log")
         self._setup_plot()
 
     def _setup_plot(self):
@@ -414,7 +425,7 @@
             ny = (self.profile[self._current_field] > 0)
             mi = self.profile[self._current_field][ny].min()
         else:
-            mi = self.profile[self._current_field][ny].min()
+            mi = self.profile[self._current_field].min()
         ma = self.profile[self._current_field].max()
         yax.bounds = (mi, ma)
         yax.scale = self.scale


http://bitbucket.org/yt_analysis/yt/changeset/dde36dec085f/
changeset:   dde36dec085f
branch:      deliberate_fields
user:        MatthewTurk
date:        2011-06-04 02:53:41
summary:     Differentiate between known and unknown fields.  This should help with unitful
fields we don't normally known about.
affected #:  3 files (-1 bytes)

--- a/yt/data_objects/field_info_container.py	Fri Jun 03 17:08:23 2011 -0700
+++ b/yt/data_objects/field_info_container.py	Fri Jun 03 17:53:41 2011 -0700
@@ -33,7 +33,7 @@
 
 from yt.funcs import *
 
-class FieldInfoContainer(dict): # We are all Borg.
+class FieldInfoContainer(dict): # Resistance has utility
     """
     This is a generic field container.  It contains a list of potential derived
     fields, all of which know how to act on a data object and return a value.  This
@@ -71,6 +71,8 @@
         obj.fallback = fallback
         return obj
 
+def NullFunc(field, data):
+    return
 
 FieldInfo = FieldInfoContainer()
 add_field = FieldInfo.add_field


--- a/yt/frontends/enzo/data_structures.py	Fri Jun 03 17:08:23 2011 -0700
+++ b/yt/frontends/enzo/data_structures.py	Fri Jun 03 17:53:41 2011 -0700
@@ -54,7 +54,8 @@
 from .definitions import parameterDict
 from .fields import \
     EnzoFieldInfo, Enzo2DFieldInfo, Enzo1DFieldInfo, \
-    add_enzo_field, add_enzo_2d_field, add_enzo_1d_field
+    add_enzo_field, add_enzo_2d_field, add_enzo_1d_field, \
+    KnownEnzoFields
 
 from yt.utilities.parallel_tools.parallel_analysis_interface import \
     parallel_blocking_call
@@ -416,20 +417,24 @@
                 # current field info.  This means we'll instead simply override
                 # it.
                 self.parameter_file.field_info.pop(field, None)
-            mylog.info("Adding %s to list of fields", field)
-            cf = None
-            if self.parameter_file.has_key(field):
-                def external_wrapper(f):
-                    def _convert_function(data):
-                        return data.convert(f)
-                    return _convert_function
-                cf = external_wrapper(field)
-            # Note that we call add_field on the field_info directly.  This
-            # will allow the same field detection mechanism to work for 1D, 2D
-            # and 3D fields.
-            self.pf.field_info.add_field(
-                    field, lambda a, b: None,
-                    convert_function=cf, take_log=False)
+            if field not in KnownEnzoFields:
+                mylog.info("Adding unknown field %s to list of fields", field)
+                cf = None
+                if self.parameter_file.has_key(field):
+                    def external_wrapper(f):
+                        def _convert_function(data):
+                            return data.convert(f)
+                        return _convert_function
+                    cf = external_wrapper(field)
+                # Note that we call add_field on the field_info directly.  This
+                # will allow the same field detection mechanism to work for 1D, 2D
+                # and 3D fields.
+                self.pf.field_info.add_field(
+                        field, lambda a, b: None,
+                        convert_function=cf, take_log=False)
+            else:
+                mylog.info("Adding known field %s to list of fields", field)
+                self.parameter_file.field_info[field] = KnownEnzoFields[field]
             
 
     def _setup_derived_fields(self):


--- a/yt/frontends/enzo/fields.py	Fri Jun 03 17:08:23 2011 -0700
+++ b/yt/frontends/enzo/fields.py	Fri Jun 03 17:53:41 2011 -0700
@@ -27,6 +27,7 @@
 
 from yt.data_objects.field_info_container import \
     FieldInfoContainer, \
+    NullFunc, \
     FieldInfo, \
     ValidateParameter, \
     ValidateDataField, \
@@ -39,9 +40,10 @@
 import yt.utilities.amr_utils as amr_utils
 
 EnzoFieldInfo = FieldInfoContainer.create_with_fallback(FieldInfo)
-add_enzo_field = EnzoFieldInfo.add_field
+add_field = EnzoFieldInfo.add_field
 
-add_field = add_enzo_field
+KnownEnzoFields = FieldInfoContainer()
+add_enzo_field = KnownEnzoFields.add_field
 
 _speciesList = ["HI","HII","Electron",
                "HeI","HeII","HeIII",
@@ -113,10 +115,10 @@
           validators=ValidateDataField("SN_Colour"),
           projection_conversion="1")
 
-add_field("Cooling_Time", units=r"\rm{s}",
-          function=lambda a, b: None,
-          validators=ValidateDataField("Cooling_Time"),
-          projection_conversion="1")
+add_enzo_field("Cooling_Time", units=r"\rm{s}",
+               function=NullFunc,
+               validators=ValidateDataField("Cooling_Time"),
+               projection_conversion="1")
 
 def _ThermalEnergy(field, data):
     if data.pf["HydroMethod"] == 2:
@@ -149,7 +151,9 @@
 def _convertEnergy(data):
     return data.convert("x-velocity")**2.0
 
-add_field("GasEnergy", function=lambda a, b: None,
+add_enzo_field("GasEnergy", function=NullFunc,
+          units=r"\rm{ergs}/\rm{g}", convert_function=_convertEnergy)
+add_enzo_field("Gas_Energy", function=NullFunc,
           units=r"\rm{ergs}/\rm{g}", convert_function=_convertEnergy)
 
 def _Gas_Energy(field, data):
@@ -157,7 +161,12 @@
 add_field("Gas_Energy", function=_Gas_Energy,
           units=r"\rm{ergs}/\rm{g}", convert_function=_convertEnergy)
 
-add_field("TotalEnergy", function=lambda a, b: None,
+# We set up fields for both TotalEnergy and Total_Energy in the known fields
+# lists.  Note that this does not mean these will be the used definitions.
+add_enzo_field("TotalEnergy", function=NullFunc,
+          display_name = "\mathrm{Total}\/\mathrm{Energy}",
+          units=r"\rm{ergs}/\rm{g}", convert_function=_convertEnergy)
+add_enzo_field("Total_Energy", function=NullFunc,
           display_name = "\mathrm{Total}\/\mathrm{Energy}",
           units=r"\rm{ergs}/\rm{g}", convert_function=_convertEnergy)
 
@@ -216,36 +225,44 @@
 
 for field in _default_fields:
     dn = field.replace("_","\/")
-    add_field(field, function=lambda a, b: None, take_log=True,
+    add_enzo_field(field, function=NullFunc, take_log=True,
               display_name = dn,
-              validators=[ValidateDataField(field)], units=r"\rm{g}/\rm{cm}^3")
-EnzoFieldInfo["x-velocity"].projection_conversion='1'
-EnzoFieldInfo["y-velocity"].projection_conversion='1'
-EnzoFieldInfo["z-velocity"].projection_conversion='1'
+              validators=[ValidateDataField(field)], units=r"Unknown")
+KnownEnzoFields["x-velocity"].projection_conversion='1'
+KnownEnzoFields["y-velocity"].projection_conversion='1'
+KnownEnzoFields["z-velocity"].projection_conversion='1'
+
+def _convertBfield(data): 
+    return na.sqrt(4*na.pi*data.convert("Density")*data.convert("x-velocity")**2)
+for field in ['Bx','By','Bz']:
+    f = KnownEnzoFields[field]
+    f._convert_function=_convertBfield
+    f._units=r"\mathrm{Gau\ss}"
+    f.take_log=False
 
 # Now we override
 
 def _convertDensity(data):
     return data.convert("Density")
 for field in ["Density"] + [ "%s_Density" % sp for sp in _speciesList ]:
-    EnzoFieldInfo[field]._units = r"\rm{g}/\rm{cm}^3"
-    EnzoFieldInfo[field]._projected_units = r"\rm{g}/\rm{cm}^2"
-    EnzoFieldInfo[field]._convert_function=_convertDensity
+    KnownEnzoFields[field]._units = r"\rm{g}/\rm{cm}^3"
+    KnownEnzoFields[field]._projected_units = r"\rm{g}/\rm{cm}^2"
+    KnownEnzoFields[field]._convert_function=_convertDensity
 
-add_field("Dark_Matter_Density", function=lambda a,b: None,
+add_enzo_field("Dark_Matter_Density", function=NullFunc,
           convert_function=_convertDensity,
           validators=[ValidateDataField("Dark_Matter_Density"),
                       ValidateSpatial(0)],
           display_name = "Dark\ Matter\ Density",
           not_in_all = True)
 
-EnzoFieldInfo["Temperature"]._units = r"\rm{K}"
-EnzoFieldInfo["Temperature"].units = r"K"
+KnownEnzoFields["Temperature"]._units = r"\rm{K}"
+KnownEnzoFields["Temperature"].units = r"K"
 
 def _convertVelocity(data):
     return data.convert("x-velocity")
 for ax in ['x','y','z']:
-    f = EnzoFieldInfo["%s-velocity" % ax]
+    f = KnownEnzoFields["%s-velocity" % ax]
     f._units = r"\rm{cm}/\rm{s}"
     f._convert_function = _convertVelocity
     f.take_log = False
@@ -376,14 +393,6 @@
 add_field('IsStarParticle', function=_IsStarParticle,
           particle_type = True)
 
-def _convertBfield(data): 
-    return na.sqrt(4*na.pi*data.convert("Density")*data.convert("x-velocity")**2)
-for field in ['Bx','By','Bz']:
-    f = EnzoFieldInfo[field]
-    f._convert_function=_convertBfield
-    f._units=r"\mathrm{Gau\ss}"
-    f.take_log=False
-
 def _Bmag(field, data):
     """ magnitude of bvec
     """
@@ -465,7 +474,7 @@
 def _convertBfield(data): 
     return na.sqrt(4*na.pi*data.convert("Density")*data.convert("x-velocity")**2)
 for field in ['Bx','By','Bz']:
-    f = EnzoFieldInfo[field]
+    f = KnownEnzoFields[field]
     f._convert_function=_convertBfield
     f._units=r"\mathrm{Gau\ss}"
     f.take_log=False


http://bitbucket.org/yt_analysis/yt/changeset/cc2f1d6646a9/
changeset:   cc2f1d6646a9
branch:      deliberate_fields
user:        MatthewTurk
date:        2011-06-04 03:04:40
summary:     Use NullFunc instead of a lambda.
affected #:  1 file (-1 bytes)

--- a/yt/frontends/enzo/data_structures.py	Fri Jun 03 17:53:41 2011 -0700
+++ b/yt/frontends/enzo/data_structures.py	Fri Jun 03 18:04:40 2011 -0700
@@ -46,7 +46,7 @@
 from yt.data_objects.static_output import \
     StaticOutput
 from yt.data_objects.field_info_container import \
-    FieldInfoContainer
+    FieldInfoContainer, NullFunc
 from yt.utilities.definitions import mpc_conversion
 from yt.utilities import hdf5_light_reader
 from yt.utilities.logger import ytLogger as mylog
@@ -430,8 +430,8 @@
                 # will allow the same field detection mechanism to work for 1D, 2D
                 # and 3D fields.
                 self.pf.field_info.add_field(
-                        field, lambda a, b: None,
-                        convert_function=cf, take_log=False)
+                        field, NullFunc,
+                        convert_function=cf, take_log=False, units=r"Unknown")
             else:
                 mylog.info("Adding known field %s to list of fields", field)
                 self.parameter_file.field_info[field] = KnownEnzoFields[field]


http://bitbucket.org/yt_analysis/yt/changeset/629279d8b0f9/
changeset:   629279d8b0f9
branch:      deliberate_fields
user:        MatthewTurk
date:        2011-06-08 17:15:25
summary:     Merging
affected #:  23 files (-1 bytes)

--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/yt/analysis_modules/absorption_spectrum/__init__.py	Wed Jun 08 08:15:25 2011 -0700
@@ -0,0 +1,25 @@
+"""
+Import stuff for light cone generator.
+
+Author: Britton Smith <brittons at origins.colorado.edu>
+Affiliation: CASA/University of CO, Boulder
+Homepage: http://yt.enzotools.org/
+License:
+  Copyright (C) 2008-2011 Britton Smith.  All Rights Reserved.
+
+  This file is part of yt.
+
+  yt is free software; you can redistribute it and/or modify
+  it under the terms of the GNU General Public License as published by
+  the Free Software Foundation; either version 3 of the License, or
+  (at your option) any later version.
+
+  This program is distributed in the hope that it will be useful,
+  but WITHOUT ANY WARRANTY; without even the implied warranty of
+  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+  GNU General Public License for more details.
+
+  You should have received a copy of the GNU General Public License
+  along with this program.  If not, see <http://www.gnu.org/licenses/>.
+"""
+


--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/yt/analysis_modules/absorption_spectrum/absorption_line.py	Wed Jun 08 08:15:25 2011 -0700
@@ -0,0 +1,210 @@
+"""
+Absorption line generating functions.
+
+Author: Britton Smith <brittonsmith at gmail.com>
+Affiliation: Michigan State University
+Homepage: http://yt.enzotools.org/
+License:
+  Copyright (C) 2008-2011 Britton Smith.  All Rights Reserved.
+
+  This file is part of yt.
+
+  yt is free software; you can redistribute it and/or modify
+  it under the terms of the GNU General Public License as published by
+  the Free Software Foundation; either version 3 of the License, or
+  (at your option) any later version.
+
+  This program is distributed in the hope that it will be useful,
+  but WITHOUT ANY WARRANTY; without even the implied warranty of
+  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+  GNU General Public License for more details.
+
+  You should have received a copy of the GNU General Public License
+  along with this program.  If not, see <http://www.gnu.org/licenses/>.
+"""
+
+import numpy as na
+
+def voigt(a,u):
+    """
+    NAME:
+        VOIGT 
+    PURPOSE:
+        Implementation of Voigt function 
+    CATEGORY:
+            Math
+    CALLING SEQUENCE:
+            voigt=Voigt(a,u)
+    INPUTS:
+            A = Voigt "A" parameter.
+            U = Frequency in units of the Doppler frequency.
+
+            The line profile "Phi(v)", the doppler width
+            "Delv", the voigt parameter "a", and the frequency "u"
+            are given by:
+
+            Phi(v) =  Voigt(a,u)/[ Delv * sqrt(pi) ]
+            Delv   =  Vo/c * sqrt[ 2kT/m ]
+            u      =  V - Vo / Delv
+            a      =  GAMMA / [ Delv * 4pi ]
+            Gamma  =  Gu + Gl + 2*Vcol
+            "Gu" and "Gl" are the widths of the upper and lower states
+            "Vcol" is the collisions per unit time
+            "Vo" is the line center frequency
+
+    OUTPUTS:
+            An array of the same type as u
+    RESTRICTIONS:
+            U must be an array, a should not be. Also this procedure is only valid
+            for the region a<1.0, u<4.0 or a<1.8(u+1), u>4, which should be most 
+            astrophysical conditions (see the article below for further comments
+    PROCEDURE:
+            Follows procedure in Armstrong JQSRT 7, 85 (1967)
+            also the same as the intrinsic in the previous version of IDL
+    MODIFICATION HISTORY:
+            J. Murthy, Mar 1990 (adapted from the FORTRAN program of Armstrong)
+                      Sep 1990 (better overflow checking)
+    """
+    x = na.asarray(u).astype(na.float64)
+    y = na.asarray(a).astype(na.float64)
+
+    w = na.array([0.462243670,   0.286675505,   0.109017206, 
+                  0.0248105209,  0.00324377334, 0.000228338636, 
+                  7.80255648e-6, 1.08606937e-7, 4.39934099e-10, 
+                  2.22939365e-13])
+
+    t = na.array([0.245340708, 0.737473729, 1.23407622, 1.73853771, 
+                  2.25497400,  2.78880606,  3.34785457, 3.94476404, 
+                  4.60368245,  5.38748089])
+
+    # Hummer's Chebyshev Coefficients
+    c = ( 0.1999999999972224, -0.1840000000029998,   0.1558399999965025, 
+         -0.1216640000043988,  0.0877081599940391,  -0.0585141248086907, 
+          0.0362157301623914, -0.0208497654398036,   0.0111960116346270, 
+         -0.56231896167109e-2, 0.26487634172265e-2, -0.11732670757704e-2, 
+          0.4899519978088e-3, -0.1933630801528e-3,   0.722877446788e-4, 
+         -0.256555124979e-4,   0.86620736841e-5,    -0.27876379719e-5, 
+          0.8566873627e-6,    -0.2518433784e-6,      0.709360221e-7, 
+         -0.191732257e-7,      0.49801256e-8,       -0.12447734e-8, 
+          0.2997777e-9,       -0.696450e-10,         0.156262e-10, 
+         -0.33897e-11,         0.7116e-12,          -0.1447e-12, 
+          0.285e-13,          -0.55e-14,             0.10e-14,
+         -0.2e-15)
+
+    y2 = y * y
+
+    # limits are y<1.,  x<4 or y<1.8(x+1),  x>4 (no checking performed)
+    u1 = na.exp(-x * x + y2) * na.cos(2. * x * y)
+
+    # Clenshaw's Algorithm
+    bno1 = na.zeros(x.shape)
+    bno2 = na.zeros(x.shape)
+    x1 = na.clip((x / 5.), -na.inf, 1.)
+    coef = 4. * x1 * x1 - 2.
+    for i in range(33, -1, -1):
+        bn = coef * bno1 - bno2 + c[i]
+        bno2 = na.copy(bno1)
+        bno1 = na.copy(bn)
+
+    f = x1 * (bn - bno2)
+    dno1 = 1. - 2. * x * f
+    dno2 = f
+
+    q = x > 5
+    if q.any():
+        x14 = na.power(na.clip(x[q], -na.inf, 500.),  14)
+        x12 = na.power(na.clip(x[q], -na.inf, 1000.), 12)
+        x10 = na.power(na.clip(x[q], -na.inf, 5000.), 10)
+        x8  = na.power(na.clip(x[q], -na.inf, 50000.), 8)
+        x6  = na.power(na.clip(x[q], -na.inf, 1.e6),   6)
+        x4  = na.power(na.clip(x[q], -na.inf, 1.e9),   4)
+        x2  = na.power(na.clip(x[q], -na.inf, 1.e18),  2)
+        dno1[q] = -(0.5 / x2 + 0.75 / x4 + 1.875 / x6 + 
+                    6.5625 / x8 + 29.53125 / x10 +
+                    162.4218 / x12 + 1055.7421 / x14)
+        dno2[q] = (1. - dno1[q]) / (2. * x[q])
+
+    funct = y * dno1
+    if (y > 1.e-8).any():
+        q = 1.0
+        yn = y
+        for i in range(2, 51):
+            dn = (x * dno1 + dno2) * (-2. / i)
+            dno2 = dno1
+            dno1 = dn
+            if (i % 2) == 1:
+                q = -q
+                yn = yn * y2
+                g = dn.astype(na.float64) * yn
+                funct = funct + q * g
+                if na.max(na.abs(g / funct)) <= 1.e-8: break
+
+    k1 = u1 - 1.12837917 * funct
+    k1 = k1.astype(na.float64).clip(0)
+    return k1
+
+def tau_profile(lam0, fval, gamma, vkms, column_density, 
+                deltav=None, delta_lambda=None,
+                lambda_bins=None, n_lambda=12000, dlambda=0.01):
+    """
+    Create an optical depth vs. wavelength profile for an 
+    absorption line using a voigt profile.
+    :param lam0 (float): central wavelength (angstroms).
+    :param fval (float): f-value.
+    :param gamma (float): gamma value.
+    :param vkms (float): doppler b-parameter.
+    :param column_density (float): column density (cm^-2).
+    :param deltav (float): velocity offset from lam0 (km/s).
+    Default: None (no shift).
+    :param delta_lambda (float): wavelength offset in angstroms.
+    Default: None (no shift).
+    :param lambda_bins (array): array of wavelengths in angstroms.
+    Default: None
+    :param n_lambda (float): size of lambda bins to create 
+    array if lambda_bins is None.  Default: 12000
+    :param dlambda (float): lambda bin width if lambda_bins is 
+    None. Default: 0.01
+    """
+
+    ## constants
+    me = 1.6726231e-24 / 1836.        # grams mass electron 
+    e = 4.8032e-10                    # esu 
+    c = 2.99792456e5                  # km/s
+    ccgs = c * 1.e5                   # cm/s 
+
+    ## shift lam0 by deltav
+    if deltav is not None:
+        lam1 = lam0 * (1 + deltav / c)
+    elif delta_lambda is not None:
+        lam1 = lam0 + delta_lambda
+    else:
+        lam1 = lam0
+
+    ## conversions
+    vdop = vkms * 1.e5                # in cm/s
+    lam0cgs = lam0 / 1.e8             # rest wavelength in cm
+    lam1cgs = lam1 / 1.e8             # line wavelength in cm
+    nu1 = ccgs / lam1cgs              # line freq in Hz
+    nudop = vdop / ccgs * nu1         # doppler width in Hz
+    lamdop = vdop / ccgs * lam1       # doppler width in Ang
+
+    ## create wavelength
+    if lambda_bins is None:
+        lambda_bins = lam1 + \
+            na.arange(n_lambda, dtype=na.float) * dlambda - \
+            n_lambda * dlambda / 2    # wavelength vector (angstroms)
+    nua = ccgs / (lambda_bins / 1.e8) # frequency vector (Hz)
+
+    ## tau_0
+    tau_X = na.sqrt(na.pi) * e**2 / (me * ccgs) * \
+        column_density * fval / vdop
+    tau1 = tau_X * lam1cgs
+    tau0 = tau_X * lam0cgs
+
+    # dimensionless frequency offset in units of doppler freq
+    x = (nua - nu1) / nudop
+    a = gamma / (4 * na.pi * nudop)   # damping parameter 
+    phi = voigt(a, x)                 # profile
+    tauphi = tau0 * phi               # profile scaled with tau0
+
+    return (lambda_bins, tauphi)


--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/yt/analysis_modules/absorption_spectrum/absorption_spectrum.py	Wed Jun 08 08:15:25 2011 -0700
@@ -0,0 +1,285 @@
+"""
+AbsorptionSpectrum class and member functions.
+
+Author: Britton Smith <brittonsmith at gmail.com>
+Affiliation: Michigan State University
+Homepage: http://yt.enzotools.org/
+License:
+  Copyright (C) 2008-2011 Britton Smith.  All Rights Reserved.
+
+  This file is part of yt.
+
+  yt is free software; you can redistribute it and/or modify
+  it under the terms of the GNU General Public License as published by
+  the Free Software Foundation; either version 3 of the License, or
+  (at your option) any later version.
+
+  This program is distributed in the hope that it will be useful,
+  but WITHOUT ANY WARRANTY; without even the implied warranty of
+  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+  GNU General Public License for more details.
+
+  You should have received a copy of the GNU General Public License
+  along with this program.  If not, see <http://www.gnu.org/licenses/>.
+"""
+
+import h5py
+import numpy as na
+
+from absorption_line import tau_profile
+
+from yt.funcs import get_pbar
+from yt.utilities.physical_constants import \
+    amu_cgs, boltzmann_constant_cgs, \
+    speed_of_light_cgs, km_per_cm
+
+speed_of_light_kms = speed_of_light_cgs * km_per_cm
+
+class AbsorptionSpectrum(object):
+    def __init__(self, lambda_min, lambda_max, n_lambda):
+        """
+        Create an absorption spectrum object.
+        :param lambda_min (float): lower wavelength bound in angstroms.
+        :param lambda_max (float): upper wavelength bound in angstroms.
+        :param n_lambda (float): number of wavelength bins.
+        """
+
+        self.n_lambda = n_lambda
+        self.tau_field = None
+        self.flux_field = None
+        self.spectrum_line_list = None
+        self.lambda_bins = na.linspace(lambda_min, lambda_max, n_lambda)
+        self.bin_width = (lambda_max - lambda_min) / float(n_lambda - 1)
+        self.line_list = []
+        self.continuum_list = []
+
+    def add_line(self, label, field_name, wavelength, 
+                 f_value, gamma, atomic_mass,
+                 label_threshold=None):
+        """
+        Add an absorption line to the list of lines included in the spectrum.
+        :param label (string): label for the line.
+        :param field_name (string): field name from ray data for column densities.
+        :param wavelength (float): line rest wavelength in angstroms.
+        :param f_value (float): line f-value.
+        :param gamma (float): line gamme value.
+        :param atomic_mass (float): mass of atom in amu.
+        """
+
+        self.line_list.append({'label': label, 'field_name': field_name, 
+                               'wavelength': wavelength, 'f_value': f_value, 
+                               'gamma': gamma, 'atomic_mass': atomic_mass,
+                               'label_threshold': label_threshold})
+
+    def add_continuum(self, label, field_name, wavelength, 
+                      normalization, index):
+        """
+        Add a continuum feature that follows a power-law.
+        :param label (string): label for the feature.
+        :param field_name (string): field name from ray data for column densities.
+        :param wavelength (float): line rest wavelength in angstroms.
+        :param normalization (float): the column density normalization.
+        :param index (float): the power-law index for the wavelength dependence.
+        """
+
+        self.continuum_list.append({'label': label, 'field_name': field_name, 
+                                    'wavelength': wavelength, 
+                                    'normalization': normalization,
+                                    'index': index})
+
+    def make_spectrum(self, input_file, output_file='spectrum.h5',
+                      line_list_file='lines.txt', 
+                      use_peculiar_velocity=True):
+        """
+        Make spectrum from ray data using the line list.
+        :param input_file (string): path to input ray data.
+        :param output_file (string): path for output file.
+               File formats are chosen based on the filename extension.
+                    - .h5: hdf5.
+                    - .fits: fits.
+                    - anything else: ascii.
+        :param use_peculiar_velocity (bool): if True, include line of sight 
+        velocity for shifting lines.
+        """
+
+        input_fields = ['dl', 'redshift', 'Temperature']
+        field_data = {}
+        if use_peculiar_velocity: input_fields.append('los_velocity')
+        for feature in self.line_list + self.continuum_list:
+            if not feature['field_name'] in input_fields:
+                input_fields.append(feature['field_name'])
+
+        input = h5py.File(input_file, 'r')
+        for field in input_fields:
+            field_data[field] = input[field].value
+        input.close()
+
+        self.tau_field = na.zeros(self.lambda_bins.size)
+        self.spectrum_line_list = []
+
+        self._add_lines_to_spectrum(field_data, use_peculiar_velocity)
+        self._add_continua_to_spectrum(field_data, use_peculiar_velocity)
+
+        self.flux_field = na.exp(-self.tau_field)
+
+        if output_file.endswith('.h5'):
+            self._write_spectrum_hdf5(output_file)
+        elif output_file.endswith('.fits'):
+            self._write_spectrum_fits(output_file)
+        else:
+            self._write_spectrum_ascii(output_file)
+        self._write_spectrum_line_list(line_list_file)
+
+        del field_data
+        return (self.lambda_bins, self.flux_field)
+
+    def _add_continua_to_spectrum(self, field_data, use_peculiar_velocity):
+        """
+        Add continuum features to the spectrum.
+        """
+        # Only add continuum features down to tau of 1.e-4.
+        tau_min = 1.e-4
+
+        for continuum in self.continuum_list:
+            column_density = field_data[continuum['field_name']] * field_data['dl']
+            delta_lambda = continuum['wavelength'] * field_data['redshift']
+            if use_peculiar_velocity:
+                delta_lambda += continuum['wavelength'] * field_data['los_velocity'] / \
+                    speed_of_light_cgs
+            this_wavelength = delta_lambda + continuum['wavelength']
+            right_index = na.digitize(this_wavelength, self.lambda_bins).clip(0, self.n_lambda)
+            left_index = na.digitize((this_wavelength * 
+                                     na.power((tau_min * continuum['normalization'] / 
+                                               column_density), (1. / continuum['index']))),
+                                    self.lambda_bins).clip(0, self.n_lambda)
+
+            valid_continuua = na.where(((column_density /
+                                         continuum['normalization']) > tau_min) &
+                                       (right_index - left_index > 1))[0]
+            pbar = get_pbar("Adding continuum feature - %s [%f A]: " % \
+                                (continuum['label'], continuum['wavelength']),
+                            valid_continuua.size)
+            for i, lixel in enumerate(valid_continuua):
+                line_tau = na.power((self.lambda_bins[left_index[lixel]:right_index[lixel]] / 
+                                     this_wavelength[lixel]), continuum['index']) * \
+                                     column_density[lixel] / continuum['normalization']
+                self.tau_field[left_index[lixel]:right_index[lixel]] += line_tau
+                pbar.update(i)
+            pbar.finish()
+
+    def _add_lines_to_spectrum(self, field_data, use_peculiar_velocity):
+        """
+        Add the absorption lines to the spectrum.
+        """
+        # Only make voigt profile for slice of spectrum that is 10 times the line width.
+        spectrum_bin_ratio = 5
+
+        for line in self.line_list:
+            column_density = field_data[line['field_name']] * field_data['dl']
+            delta_lambda = line['wavelength'] * field_data['redshift']
+            if use_peculiar_velocity:
+                delta_lambda += line['wavelength'] * field_data['los_velocity'] / \
+                    speed_of_light_cgs
+            thermal_b = km_per_cm * na.sqrt((2 * boltzmann_constant_cgs * 
+                                             field_data['Temperature']) / 
+                                            (amu_cgs * line['atomic_mass']))
+            center_bins = na.digitize((delta_lambda + line['wavelength']), 
+                                      self.lambda_bins)
+
+            # ratio of line width to bin width
+            width_ratio = (line['wavelength'] + delta_lambda) * \
+                thermal_b / speed_of_light_kms / self.bin_width
+
+            # do voigt profiles for a subset of the full spectrum
+            left_index  = (center_bins - 
+                           spectrum_bin_ratio * width_ratio).astype(int).clip(0, self.n_lambda)
+            right_index = (center_bins + 
+                           spectrum_bin_ratio * width_ratio).astype(int).clip(0, self.n_lambda)
+
+            # loop over all lines wider than the bin width
+            valid_lines = na.where((width_ratio >= 1.0) & 
+                                   (right_index - left_index > 1))[0]
+            pbar = get_pbar("Adding line - %s [%f A]: " % (line['label'], line['wavelength']),
+                            valid_lines.size)
+            for i, lixel in enumerate(valid_lines):
+                    lambda_bins, line_tau = \
+                        tau_profile(line['wavelength'], line['f_value'],
+                                    line['gamma'], thermal_b[lixel], 
+                                    column_density[lixel], 
+                                    delta_lambda=delta_lambda[lixel],
+                                    lambda_bins=self.lambda_bins[left_index[lixel]:right_index[lixel]])
+                    self.tau_field[left_index[lixel]:right_index[lixel]] += line_tau
+                    if line['label_threshold'] is not None and \
+                            column_density[lixel] >= line['label_threshold']:
+                        if use_peculiar_velocity:
+                            peculiar_velocity = km_per_cm * field_data['los_velocity'][lixel]
+                        else:
+                            peculiar_velocity = 0.0
+                        self.spectrum_line_list.append({'label': line['label'],
+                                                        'wavelength': (line['wavelength'] +
+                                                                       delta_lambda[lixel]),
+                                                        'column_density': column_density[lixel],
+                                                        'b_thermal': thermal_b[lixel],
+                                                        'redshift': field_data['redshift'][lixel],
+                                                        'v_pec': peculiar_velocity})
+                    pbar.update(i)
+            pbar.finish()
+
+            del column_density, delta_lambda, thermal_b, \
+                center_bins, width_ratio, left_index, right_index
+
+    def _write_spectrum_line_list(self, filename):
+        """
+        Write out list of spectral lines.
+        """
+        print "Writing spectral line list: %s." % filename
+        self.spectrum_line_list.sort(key=lambda obj: obj['wavelength'])
+        f = open(filename, 'w')
+        f.write('#%-14s %-14s %-12s %-12s %-12s %-12s\n' % 
+                ('Wavelength', 'Line', 'N [cm^-2]', 'b [km/s]', 'z', 'v_pec [km/s]'))
+        for line in self.spectrum_line_list:
+            f.write('%-14.6f %-14ls %e %e %e %e.\n' % (line['wavelength'], line['label'],
+                                                line['column_density'], line['b_thermal'],
+                                                line['redshift'], line['v_pec']))
+        f.close()
+
+    def _write_spectrum_ascii(self, filename):
+        """
+        Write spectrum to an ascii file.
+        """
+        print "Writing spectrum to ascii file: %s." % filename
+        f = open(filename, 'w')
+        f.write("# wavelength[A] tau flux\n")
+        for i in xrange(self.lambda_bins.size):
+            f.write("%e %e %e\n" % (self.lambda_bins[i], 
+                                    self.tau_field[i], self.flux_field[i]))
+        f.close()
+
+    def _write_spectrum_fits(self, filename):
+        """
+        Write spectrum to a fits file.
+        """
+        try:
+            import pyfits
+        except:
+            print "Could not import the pyfits module.  Please install pyfits."
+            return
+
+        print "Writing spectrum to fits file: %s." % filename
+        col1 = pyfits.Column(name='wavelength', format='E', array=self.lambda_bins)
+        col2 = pyfits.Column(name='flux', format='E', array=self.flux_field)
+        cols = pyfits.ColDefs([col1, col2])
+        tbhdu = pyfits.new_table(cols)
+        tbhdu.writeto(filename, clobber=True)
+
+    def _write_spectrum_hdf5(self, filename):
+        """
+        Write spectrum to an hdf5 file.
+
+        """
+        print "Writing spectrum to hdf5 file: %s." % filename
+        output = h5py.File(filename, 'w')
+        output.create_dataset('wavelength', data=self.lambda_bins)
+        output.create_dataset('tau', data=self.tau_field)
+        output.create_dataset('flux', data=self.flux_field)
+        output.close()


--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/yt/analysis_modules/absorption_spectrum/api.py	Wed Jun 08 08:15:25 2011 -0700
@@ -0,0 +1,32 @@
+"""
+API for absorption_spectrum
+
+Author: Matthew Turk <matthewturk at gmail.com>
+Affiliation: UCSD
+Author: J.S. Oishi <jsoishi at gmail.com>
+Affiliation: KIPAC/SLAC/Stanford
+Author: Britton Smith <brittonsmith at gmail.com>
+Affiliation: MSU
+Homepage: http://yt.enzotools.org/
+License:
+  Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
+
+  This file is part of yt.
+
+  yt is free software; you can redistribute it and/or modify
+  it under the terms of the GNU General Public License as published by
+  the Free Software Foundation; either version 3 of the License, or
+  (at your option) any later version.
+
+  This program is distributed in the hope that it will be useful,
+  but WITHOUT ANY WARRANTY; without even the implied warranty of
+  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+  GNU General Public License for more details.
+
+  You should have received a copy of the GNU General Public License
+  along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+"""
+
+from .absorption_spectrum import \
+    AbsorptionSpectrum


--- a/yt/analysis_modules/api.py	Fri Jun 03 18:04:40 2011 -0700
+++ b/yt/analysis_modules/api.py	Wed Jun 08 08:15:25 2011 -0700
@@ -28,6 +28,9 @@
 
 """
 
+from .absorption_spectrum.api import \
+    AbsorptionSpectrum
+
 from .coordinate_transformation.api import \
     spherical_regrid
 


--- a/yt/analysis_modules/light_cone/light_cone.py	Fri Jun 03 18:04:40 2011 -0700
+++ b/yt/analysis_modules/light_cone/light_cone.py	Wed Jun 08 08:15:25 2011 -0700
@@ -49,25 +49,29 @@
                  set_parameters=None, output_dir='LC', output_prefix='LightCone', **kwargs):
         """
         Initialize a LightCone object.
-        :param initial_redshift (float): the initial (highest) redshift for the light cone.  Default: 1.0.
-        :param final_redshift (float): the final (lowest) redshift for the light cone.  Default: 0.0.
+        :param initial_redshift (float): the initial (highest) redshift for the light cone.  
+        Default: 1.0.
+        :param final_redshift (float): the final (lowest) redshift for the light cone.  
+        Default: 0.0.
         :param observer_redshift (float): the redshift of the observer.  Default: 0.0.
-        :param field_of_view_in_arcminutes (float): the field of view of the image in units of arcminutes.  
-               Default: 600.0.
-        :param image_resolution_in_arcseconds (float): the size of each image pixel in units of arcseconds.  
-               Default: 60.0.
-                             :param use_minimum_datasets (bool): if True, the minimum number of datasets is used to connect the 
-               initial and final redshift.  If false, the light cone solution will contain as many entries 
-               as possible within the redshift interval.  Default: True.
-        :param deltaz_min (float): specifies the minimum :math:`\Delta z` between consecutive datasets in 
-               the returned list.  Default: 0.0.
-        :param minimum_coherent_box_fraction (float): used with use_minimum_datasets set to False, this 
-               parameter specifies the fraction of the total box size to be traversed before rerandomizing 
-               the projection axis and center.  This was invented to allow light cones with thin slices to 
-               sample coherent large scale structure, but in practice does not work so well.  Try setting 
-               this parameter to 1 and see what happens.  Default: 0.0.
-        :param set_parameters (dict): dictionary of parameters to attach to pf.parameters.  Default: None.
-        :param output_dir (str): the directory in which images and data files will be written.  Default: 'LC'.
+        :param field_of_view_in_arcminutes (float): the field of view of the image in units of 
+        arcminutes.  Default: 600.0.
+        :param image_resolution_in_arcseconds (float): the size of each image pixel in units of 
+        arcseconds.  Default: 60.0.
+        :param use_minimum_datasets (bool): if True, the minimum number of datasets is used to 
+        connect the initial and final redshift.  If false, the light cone solution will contain 
+        as many entries as possible within the redshift interval.  Default: True.
+        :param deltaz_min (float): specifies the minimum :math:`\Delta z` between consecutive 
+        datasets in the returned list.  Default: 0.0.
+        :param minimum_coherent_box_fraction (float): used with use_minimum_datasets set to 
+        False, this parameter specifies the fraction of the total box size to be traversed before 
+        rerandomizing the projection axis and center.  This was invented to allow light cones with 
+        thin slices to sample coherent large scale structure, but in practice does not work so 
+        well.  Try setting this parameter to 1 and see what happens.  Default: 0.0.
+        :param set_parameters (dict): dictionary of parameters to attach to pf.parameters.  
+        Default: None.
+        :param output_dir (str): the directory in which images and data files will be written.  
+        Default: 'LC'.
         :param output_prefix (str): the prefix of all images and data files.  Default: 'LightCone'.
         """
 


--- a/yt/analysis_modules/light_ray/light_ray.py	Fri Jun 03 18:04:40 2011 -0700
+++ b/yt/analysis_modules/light_ray/light_ray.py	Wed Jun 08 08:15:25 2011 -0700
@@ -46,13 +46,39 @@
 from yt.convenience import load
 
 class LightRay(EnzoSimulation):
-    def __init__(self, EnzoParameterFile, FinalRedshift, InitialRedshift, 
+    def __init__(self, enzo_parameter_file, final_redshift, initial_redshift, 
                  deltaz_min=0.0, use_minimum_datasets=True, 
                  minimum_coherent_box_fraction=0.0, **kwargs):
+        """
+        Create a LightRay object.  A light ray is much like a light cone, in that 
+        it stacks together multiple datasets in order to extend a redshift interval.  
+        Unlike a light cone, which does randomly oriented projections for each dataset, 
+        a light ray consists of randomly oriented single rays.  The purpose of these 
+        is to create synthetic QSO lines of sight.
 
-        EnzoSimulation.__init__(self, EnzoParameterFile, 
-                                initial_redshift=InitialRedshift,
-                                final_redshift=FinalRedshift, links=True,
+        Once the LightRay object is set up, use LightRay.make_light_ray to begin making 
+        rays.  Different randomizations can be created with a single object by providing 
+        different random seeds to make_light_ray.
+
+        :param enzo_parameter_file (string): path to simulation parameter file.
+        :param final_redshift (float): lower bound of the ray redshift interval.
+        :param initial_redshift (float): upper bound of the ray redshift interval.
+        :param deltaz_min (float): minimum delta z between consecutive datasets.
+        Default: 0.0.
+        :param use_minimum_datasets (bool): if True, the minimum number of datasets is 
+        used to connect the initial and final redshift.  If false, the light ray 
+        solution will contain as many entries as possible within the redshift interval.  
+        Default: True.
+        minimum_coherent_box_fraction (float): used with use_minimum_datasets set to False, 
+        this parameter specifies the fraction of the total box size to be traversed before 
+        rerandomizing the projection axis and center.  This was invented to allow light cones 
+        with thin slices to sample coherent large scale structure, but in practice does not 
+        work so well.  It is not very clear what this will do to a light ray.  Default: 0.0.
+        """
+
+        EnzoSimulation.__init__(self, enzo_parameter_file, 
+                                initial_redshift=initial_redshift,
+                                final_redshift=final_redshift, links=True,
                                 enzo_parameters={'CosmologyComovingBoxSize':float}, 
                                 **kwargs)
 
@@ -85,7 +111,7 @@
 
         for q in range(len(self.light_ray_solution)):
             if (q == len(self.light_ray_solution) - 1):
-                z_next = self.FinalRedshift
+                z_next = self.final_redshift
             else:
                 z_next = self.light_ray_solution[q+1]['redshift']
 
@@ -130,16 +156,66 @@
             boxFractionUsed += self.light_ray_solution[q]['TraversalBoxFraction']
 
         if filename is not None:
-            self._write_light_ray_solution(filename, \
-                                               extra_info={'EnzoParameterFile':self.EnzoParameterFile, 
-                                                           'RandomSeed':seed,
-                                                           'InitialRedshift':self.InitialRedshift, 
-                                                           'FinalRedshift':self.FinalRedshift})
+            self._write_light_ray_solution(filename, 
+                                           extra_info={'enzo_parameter_file':self.enzo_parameter_file, 
+                                                       'RandomSeed':seed,
+                                                       'initial_redshift':self.initial_redshift, 
+                                                       'final_redshift':self.final_redshift})
 
     def make_light_ray(self, seed=None, fields=None, 
                        solution_filename=None, data_filename=None,
-                       get_nearest_galaxy=False, get_los_velocity=False, **kwargs):
-        "Create a light ray and get field values for each lixel."
+                       get_nearest_galaxy=False, get_los_velocity=False, 
+                       halo_mass_field='TotalMassMsun_200', **kwargs):
+        """
+        Create a light ray and get field values for each lixel.  A light ray consists of 
+        a list of field values for cells intersected by the ray and the path length of 
+        the ray through those cells.  Light ray data can be written out to an hdf5 file.
+
+        :param seed (int): seed for the random number generator.  Default: None.
+        :param fields (list): a list of fields for which to get data.  Default: None.
+        :param solution_filename (string): path to a text file where the trajectories of each 
+        subray is written out.  Default: None.
+        :param data_filename (string): path to output file for ray data.  Default: None.
+        :param get_nearest_galaxy (bool): if True, the HaloProfiler will be used to calculate 
+        the distance and mass of the nearest halo for each point in the ray.  This option 
+        requires additional information to be included.  See below for an example.  
+        Default: False.
+        :param get_los_velocity (bool): if True, the line of sight velocity is calculated for 
+        each point in the ray.  Default: False.
+
+        GETTING THE NEAREST GALAXIES
+        The light ray tool will use the HaloProfiler to calculate the distance and mass 
+        of the nearest halo to that pixel.  In order to do this, four additional keyword 
+        arguments must be supplied to tell the HaloProfiler what to do.
+
+        :param halo_profiler_kwargs (dict): a dictionary of standard HaloProfiler keyword 
+        arguments and values to be given to the HaloProfiler.
+               EXAMPLE: halo_profiler_kwargs = {'halo_list_format': {'id':0, 
+                                                                     'center':[4, 5, 6]},
+                                                                     'TotalMassMsun':1},
+                                                'halo_list_file': 'HopAnalysis.out'}
+
+        :param halo_profiler_actions (list): a list of actions to be performed by the 
+        HaloProfiler.  Each item in the list should be a dictionary with the following 
+        entries: "function", "args", and "kwargs", for the function to be performed, 
+        the arguments supplied to that function, and the keyword arguments.
+               EXAMPLE: halo_profiler_actions = [{'function': make_profiles,
+                                                  'args': None,
+                                                  'kwargs': {'filename': 'VirializedHalos.out'}},
+                                                 {'function': add_halo_filter,
+                                                  'args': VirialFilter,
+                                                  'kwargs': {'overdensity_field': 'ActualOverdensity',
+                                                             'virial_overdensity': 200,
+                                                             'virial_filters': [['TotalMassMsun','>=','1e14']],
+                                                             'virial_quantities': ['TotalMassMsun','RadiusMpc']}}]
+
+        :param halo_list (string): 'all' to use the full halo list, or 'filtered' to use 
+        the filtered halo list created after calling make_profiles.
+               EXAMPLE: halo_list = 'filtered'
+
+        :param halo_mass_field (string): the field from the halo list to use for mass.  
+        Default: 'TotalMassMsun_200'.
+        """
 
         # Calculate solution.
         self._calculate_light_ray_solution(seed=seed, filename=solution_filename)
@@ -163,7 +239,7 @@
             mylog.info("Proc %04d: creating ray segment at z = %f." % 
                        (my_rank, segment['redshift']))
             if segment['next'] is None:
-                next_redshift = self.FinalRedshift
+                next_redshift = self.final_redshift
             else:
                 next_redshift = segment['next']['redshift']
 
@@ -221,7 +297,8 @@
             # Calculate distance to nearest object on halo list for each lixel.
             if get_nearest_galaxy:
                 sub_data['nearest_galaxy'], sub_data['nearest_galaxy_mass'] = \
-                    self._get_nearest_galaxy_distance(sub_data, halo_list)
+                    self._get_nearest_galaxy_distance(sub_data, halo_list,
+                                                      halo_mass_field=halo_mass_field)
                 sub_data['nearest_galaxy'] *= pf.units['mpccm']
 
             # Remove empty lixels.
@@ -299,7 +376,8 @@
         del hp
         return return_list
 
-    def _get_nearest_galaxy_distance(self, data, halo_list):
+    def _get_nearest_galaxy_distance(self, data, halo_list, 
+                                     halo_mass_field='TotalMassMsun_200'):
         """
         Calculate distance to nearest object in halo list for each lixel in data.
         Return list of distances and masses of nearest objects.
@@ -307,7 +385,7 @@
 
         # Create position array from halo list.
         halo_centers = na.array(map(lambda halo: halo['center'], halo_list))
-        halo_mass = na.array(map(lambda halo: halo['TotalMassMsun'], halo_list))
+        halo_mass = na.array(map(lambda halo: halo[halo_mass_field], halo_list))
 
         nearest_distance = na.zeros(data['x'].shape)
         nearest_mass = na.zeros(data['x'].shape)


--- a/yt/analysis_modules/setup.py	Fri Jun 03 18:04:40 2011 -0700
+++ b/yt/analysis_modules/setup.py	Wed Jun 08 08:15:25 2011 -0700
@@ -6,6 +6,7 @@
     config = Configuration('analysis_modules',parent_package,top_path)
     config.make_config_py() # installs __config__.py
     config.make_svn_version_py()
+    config.add_subpackage("absorption_spectrum")
     config.add_subpackage("coordinate_transformation")
     config.add_subpackage("halo_finding")
     config.add_subpackage("halo_mass_function")


--- a/yt/analysis_modules/simulation_handler/enzo_simulation.py	Fri Jun 03 18:04:40 2011 -0700
+++ b/yt/analysis_modules/simulation_handler/enzo_simulation.py	Wed Jun 08 08:15:25 2011 -0700
@@ -43,7 +43,7 @@
     Super class for performing the same operation over all data dumps in 
     a simulation from one redshift to another.
     """
-    def __init__(self, EnzoParameterFile, initial_time=None, final_time=None, initial_redshift=None, final_redshift=None,
+    def __init__(self, enzo_parameter_file, initial_time=None, final_time=None, initial_redshift=None, final_redshift=None,
                  links=False, enzo_parameters=None, get_time_outputs=True, get_redshift_outputs=True, get_available_data=False,
                  get_data_by_force=False):
         """
@@ -70,14 +70,14 @@
                is loaded up to get the time and redshift manually.  This is useful with collapse simulations that use 
                OutputFirstTimeAtLevel or with simulations that make outputs based on cycle numbers.  Default: False.
         """
-        self.EnzoParameterFile = EnzoParameterFile
+        self.enzo_parameter_file = enzo_parameter_file
         self.enzoParameters = {}
         self.redshift_outputs = []
         self.allOutputs = []
         self.InitialTime = initial_time
         self.FinalTime = final_time
-        self.InitialRedshift = initial_redshift
-        self.FinalRedshift = final_redshift
+        self.initial_redshift = initial_redshift
+        self.final_redshift = final_redshift
         self.links = links
         self.get_time_outputs = get_time_outputs
         self.get_redshift_outputs = get_redshift_outputs
@@ -202,20 +202,20 @@
         """
 
         # Check for sufficient starting/ending parameters.
-        if self.InitialTime is None and self.InitialRedshift is None:
+        if self.InitialTime is None and self.initial_redshift is None:
             if self.enzoParameters['ComovingCoordinates'] and \
                'CosmologyInitialRedshift' in self.enzoParameters:
-                self.InitialRedshift = self.enzoParameters['CosmologyInitialRedshift']
+                self.initial_redshift = self.enzoParameters['CosmologyInitialRedshift']
             elif 'InitialTime' in self.enzoParameters:
                 self.InitialTime = self.enzoParameters['InitialTime']
             else:
                 mylog.error("Couldn't find parameter for initial time or redshift from parameter file.")
                 return None
 
-        if self.FinalTime is None and self.FinalRedshift is None:
+        if self.FinalTime is None and self.final_redshift is None:
             if self.enzoParameters['ComovingCoordinates'] and \
                'CosmologyFinalRedshift' in self.enzoParameters:
-                self.FinalRedshift = self.enzoParameters['CosmologyFinalRedshift']
+                self.final_redshift = self.enzoParameters['CosmologyFinalRedshift']
             elif 'StopTime' in self.enzoParameters:
                 self.FinalTime = self.enzoParameters['StopTime']
             else:
@@ -236,11 +236,11 @@
                                                 OmegaMatterNow = self.enzoParameters['CosmologyOmegaMatterNow'],
                                                 OmegaLambdaNow = self.enzoParameters['CosmologyOmegaLambdaNow'],
                                                 InitialRedshift = self.enzoParameters['CosmologyInitialRedshift'])
-            if self.InitialRedshift is not None:
-                self.InitialTime = self.enzo_cosmology.ComputeTimeFromRedshift(self.InitialRedshift) / \
+            if self.initial_redshift is not None:
+                self.InitialTime = self.enzo_cosmology.ComputeTimeFromRedshift(self.initial_redshift) / \
                     self.enzo_cosmology.TimeUnits
-            if self.FinalRedshift is not None:
-                self.FinalTime = self.enzo_cosmology.ComputeTimeFromRedshift(self.FinalRedshift) / \
+            if self.final_redshift is not None:
+                self.FinalTime = self.enzo_cosmology.ComputeTimeFromRedshift(self.final_redshift) / \
                     self.enzo_cosmology.TimeUnits
 
         # Get initial time of simulation.
@@ -286,7 +286,7 @@
 
     def _read_enzo_parameter_file(self):
         "Reads an Enzo parameter file looking for cosmology and output parameters."
-        lines = open(self.EnzoParameterFile).readlines()
+        lines = open(self.enzo_parameter_file).readlines()
         for line in lines:
             if line.find("#") >= 0: # Keep the commented lines
                 line=line[:line.find("#")]
@@ -383,8 +383,8 @@
                the lowest redshift dataset present will be used.  Default: None.
         """
 
-        if initial_redshift is None: initial_redshift = self.InitialRedshift
-        if final_redshift is None: final_redshift = self.FinalRedshift
+        if initial_redshift is None: initial_redshift = self.initial_redshift
+        if final_redshift is None: final_redshift = self.final_redshift
 
         # Calculate maximum delta z for each data dump.
         self._calculate_deltaz_max()


--- a/yt/data_objects/data_containers.py	Fri Jun 03 18:04:40 2011 -0700
+++ b/yt/data_objects/data_containers.py	Wed Jun 08 08:15:25 2011 -0700
@@ -1421,7 +1421,7 @@
 class AMRQuadTreeProjBase(AMR2DData):
     _top_node = "/Projections"
     _key_fields = AMR2DData._key_fields + ['weight_field']
-    _type_name = "quad_proj"
+    _type_name = "proj"
     _con_args = ('axis', 'field', 'weight_field')
     def __init__(self, axis, field, weight_field = None,
                  max_level = None, center = None, pf = None,
@@ -1705,7 +1705,7 @@
 class AMRProjBase(AMR2DData):
     _top_node = "/Projections"
     _key_fields = AMR2DData._key_fields + ['weight_field']
-    _type_name = "proj"
+    _type_name = "overlap_proj"
     _con_args = ('axis', 'field', 'weight_field')
     def __init__(self, axis, field, weight_field = None,
                  max_level = None, center = None, pf = None,


--- a/yt/data_objects/derived_quantities.py	Fri Jun 03 18:04:40 2011 -0700
+++ b/yt/data_objects/derived_quantities.py	Wed Jun 08 08:15:25 2011 -0700
@@ -109,7 +109,10 @@
         rv = []
         for my_list in self.retvals:
             data = na.array(my_list).transpose()
-            rv.append(self._mpi_catarray(data).transpose())
+            old_shape = data.shape
+            data = self._mpi_catarray(data).transpose()
+            if len(data.shape) != old_shape: data = data.squeeze()
+            rv.append(data)
         self.retvals = rv
         
     def _call_func_unlazy(self, args, kwargs):


--- a/yt/frontends/castro/data_structures.py	Fri Jun 03 18:04:40 2011 -0700
+++ b/yt/frontends/castro/data_structures.py	Wed Jun 08 08:15:25 2011 -0700
@@ -46,6 +46,8 @@
            StaticOutput
 from yt.utilities.definitions import \
     mpc_conversion
+from yt.utilities.amr_utils import \
+    get_box_grids_level
 
 from .definitions import \
     castro2enzoDict, \
@@ -382,8 +384,13 @@
             grid._setup_dx()
 
     def __setup_grid_tree(self):
+        mask = na.empty(self.grids.size, dtype='int32')
         for i, grid in enumerate(self.grids):
-            children = self._get_grid_children(grid)
+            get_box_grids_level(grid.LeftEdge, grid.RightEdge, grid.Level + 1,
+                                self.grid_left_edge, self.grid_right_edge,
+                                self.grid_levels, mask)
+            children = self.grids[mask.astype("bool")]
+            #assert(len(children) == len(self._get_grid_children(grid)))
             for child in children:
                 self.gridReverseTree[child.id].append(i)
                 self.gridTree[i].append(weakref.proxy(child))


--- a/yt/frontends/ramses/_ramses_reader.pyx	Fri Jun 03 18:04:40 2011 -0700
+++ b/yt/frontends/ramses/_ramses_reader.pyx	Wed Jun 08 08:15:25 2011 -0700
@@ -397,10 +397,10 @@
         # We now have to get our field names to fill our array
         self.trees = <RAMSES_tree**>\
             malloc(sizeof(RAMSES_tree*) * self.rsnap.m_header.ncpu)
+        for ii in range(self.ndomains): self.trees[ii] = NULL
         self.hydro_datas = <RAMSES_hydro_data ***>\
                        malloc(sizeof(RAMSES_hydro_data**) * self.rsnap.m_header.ncpu)
         self.ndomains = self.rsnap.m_header.ncpu
-        #for ii in range(self.ndomains): self.trees[ii] = NULL
         # Note we don't do ncpu + 1
         for idomain in range(self.rsnap.m_header.ncpu):
             # we don't delete local_tree
@@ -415,8 +415,8 @@
                     new RAMSES_hydro_data(deref(local_tree))
             self.trees[idomain] = local_tree
             # We do not delete the final snapshot, which we'll use later
-            if idomain + 1 < self.rsnap.m_header.ncpu:
-                del local_hydro_data
+            #if idomain + 1 < self.rsnap.m_header.ncpu:
+            #    del local_hydro_data
         # Only once, we read all the field names
         self.nfields = local_hydro_data.m_nvars
         cdef string *field_name
@@ -434,7 +434,6 @@
             self.field_names.append(field_name.c_str())
             self.field_ind[self.field_names[-1]] = ifield
         # This all needs to be cleaned up in the deallocator
-        del local_hydro_data
 
     def __dealloc__(self):
         import traceback; traceback.print_stack()
@@ -457,6 +456,8 @@
         if self.snapshot_name != NULL: del self.snapshot_name
         if self.rsnap != NULL: del self.rsnap
         
+    @cython.boundscheck(False)
+    @cython.wraparound(False)
     def count_zones(self):
         # We need to do simulation domains here
 
@@ -468,25 +469,23 @@
 
         # All the loop-local pointers must be declared up here
 
-        cell_count = []
+        cdef np.ndarray[np.int64_t, ndim=1] cell_count
+        cell_count = np.zeros(self.rsnap.m_header.levelmax + 1, 'int64')
         cdef int local_count = 0
-        for ilevel in range(self.rsnap.m_header.levelmax + 1):
-            cell_count.append(0)
+        cdef int tree_count
         for idomain in range(1, self.rsnap.m_header.ncpu + 1):
-            local_tree = new RAMSES_tree(deref(self.rsnap), idomain,
-                                         self.rsnap.m_header.levelmax, 0)
-            local_tree.read()
-            local_hydro_data = new RAMSES_hydro_data(deref(local_tree))
+            local_tree = self.trees[idomain - 1]
             for ilevel in range(local_tree.m_maxlevel + 1):
                 local_count = 0
+                tree_count = 0
                 local_level = &local_tree.m_AMR_levels[ilevel]
                 grid_it = local_tree.begin(ilevel)
                 grid_end = local_tree.end(ilevel)
                 while grid_it != grid_end:
                     local_count += (grid_it.get_domain() == idomain)
+                    tree_count += 1
                     grid_it.next()
                 cell_count[ilevel] += local_count
-            del local_tree, local_hydro_data
 
         return cell_count
 
@@ -545,6 +544,9 @@
 
         return header_info
 
+    @cython.cdivision(True)
+    @cython.boundscheck(False)
+    @cython.wraparound(False)
     def fill_hierarchy_arrays(self, 
                               np.ndarray[np.int32_t, ndim=1] top_grid_dims,
                               np.ndarray[np.float64_t, ndim=2] left_edges,
@@ -575,13 +577,10 @@
 
         cdef np.int32_t rr
         cdef int i
-        cell_count = []
-        level_cell_counts = {}
+        cdef np.ndarray[np.int64_t, ndim=1] level_cell_counts
+        level_cell_counts = np.zeros(self.rsnap.m_header.levelmax + 1, 'int64')
         for idomain in range(1, self.rsnap.m_header.ncpu + 1):
-            local_tree = new RAMSES_tree(deref(self.rsnap), idomain,
-                                         self.rsnap.m_header.levelmax, 0)
-            local_tree.read()
-            local_hydro_data = new RAMSES_hydro_data(deref(local_tree))
+            local_tree = self.trees[idomain - 1]
             for ilevel in range(local_tree.m_maxlevel + 1):
                 # this gets overwritten for every domain, which is okay
                 level_cell_counts[ilevel] = grid_ind 
@@ -622,7 +621,6 @@
                     grid_ind += 1
                     grid_aind += 1
                     grid_it.next()
-            del local_tree, local_hydro_data
 
     def read_oct_grid(self, char *field, int level, int domain, int grid_id):
 
@@ -711,13 +709,29 @@
                         to_fill += 1
         return to_fill
 
+#def recursive_patch_splitting(ProtoSubgrid psg,
+#        np.ndarray[np.int64_t, ndim=1] dims,
+#        np.ndarray[np.int64_t, ndim=1] inds,
+#        np.ndarray[np.int64_t, ndim=2] left_index,
+#        np.ndarray[np.int64_t, ndim=2] right_index,
+#        np.ndarray[np.int64_t, ndim=2] gdims,
+#        np.ndarray[np.int64_t, ndim=2] fl,
+#        int num_deep = 0):
+#    cdef float min_eff = 0.1
+#    if num_deep > 40:
+#        psg.efficiency = min_eff
+#        return [psg]
+#    if psg.efficiency > min_eff or psg.efficiency < 0.0:
+#        return [psg]
+#    cdef 
+#
 cdef class ProtoSubgrid:
     cdef np.int64_t *signature[3]
     cdef np.int64_t left_edge[3]
     cdef np.int64_t right_edge[3]
     cdef np.int64_t dimensions[3]
     cdef public np.float64_t efficiency
-    cdef public object sigs
+    cdef np.int64_t *sigs[3]
     cdef public object grid_file_locations
     cdef public object dd
         
@@ -727,8 +741,6 @@
                    np.ndarray[np.int64_t, ndim=1] left_index,
                    np.ndarray[np.int64_t, ndim=1] dimensions, 
                    np.ndarray[np.int64_t, ndim=2] left_edges,
-                   np.ndarray[np.int64_t, ndim=2] right_edges,
-                   np.ndarray[np.int64_t, ndim=2] grid_dimensions,
                    np.ndarray[np.int64_t, ndim=2] grid_file_locations):
         # This also includes the shrinking step.
         cdef int i, ci, ng = left_edges.shape[0]
@@ -736,23 +748,22 @@
         cdef int l0, r0, l1, r1, l2, r2, i0, i1, i2
         cdef np.int64_t temp_l[3], temp_r[3], ncells
         cdef np.float64_t efficiency
-        self.sigs = []
         for i in range(3):
             temp_l[i] = left_index[i] + dimensions[i]
             temp_r[i] = left_index[i]
             self.signature[i] = NULL
         for gi in range(ng):
             if left_edges[gi,0] > left_index[0]+dimensions[0] or \
-               right_edges[gi,0] < left_index[0] or \
+               left_edges[gi,0] + 2 < left_index[0] or \
                left_edges[gi,1] > left_index[1]+dimensions[1] or \
-               right_edges[gi,1] < left_index[1] or \
+               left_edges[gi,1] + 2 < left_index[1] or \
                left_edges[gi,2] > left_index[2]+dimensions[2] or \
-               right_edges[gi,2] < left_index[2]:
+               left_edges[gi,2] + 2 < left_index[2]:
                #print "Skipping grid", gi, "which lies outside out box"
                continue
             for i in range(3):
                 temp_l[i] = i64min(left_edges[gi,i], temp_l[i])
-                temp_r[i] = i64max(right_edges[gi,i], temp_r[i])
+                temp_r[i] = i64max(left_edges[gi,i] + 2, temp_r[i])
         for i in range(3):
             self.left_edge[i] = i64max(temp_l[i], left_index[i])
             self.right_edge[i] = i64min(temp_r[i], left_index[i] + dimensions[i])
@@ -760,13 +771,14 @@
             if self.dimensions[i] <= 0:
                 self.efficiency = -1.0
                 return
-            self.sigs.append(np.zeros(self.dimensions[i], 'int64'))
-        #print self.sigs[0].size, self.sigs[1].size, self.sigs[2].size
+            self.sigs[i] = <np.int64_t *> malloc(
+                                sizeof(np.int64_t) * self.dimensions[i])
+            for gi in range(self.dimensions[i]): self.sigs[i][gi] = 0
         
         # My guess is that this whole loop could be done more efficiently.
         # However, this is clear and straightforward, so it is a good first
         # pass.
-        cdef np.ndarray[np.int64_t, ndim=1] sig0, sig1, sig2
+        cdef np.int64_t *sig0, *sig1, *sig2
         sig0 = self.sigs[0]
         sig1 = self.sigs[1]
         sig2 = self.sigs[2]
@@ -776,15 +788,15 @@
         for gi in range(ng):
             used = 0
             nnn = 0
-            for l0 in range(grid_dimensions[gi, 0]):
+            for l0 in range(2):
                 i0 = left_edges[gi, 0] + l0
                 if i0 < self.left_edge[0]: continue
                 if i0 >= self.right_edge[0]: break
-                for l1 in range(grid_dimensions[gi, 1]):
+                for l1 in range(2):
                     i1 = left_edges[gi, 1] + l1
                     if i1 < self.left_edge[1]: continue
                     if i1 >= self.right_edge[1]: break
-                    for l2 in range(grid_dimensions[gi, 2]):
+                    for l2 in range(2):
                         i2 = left_edges[gi, 2] + l2
                         if i2 < self.left_edge[2]: continue
                         if i2 >= self.right_edge[2]: break
@@ -809,6 +821,11 @@
         #print "Efficiency is %0.3e" % (efficiency)
         self.efficiency = efficiency
 
+    def __dealloc__(self):
+        free(self.sigs[0])
+        free(self.sigs[1])
+        free(self.sigs[2])
+
     @cython.boundscheck(False)
     @cython.wraparound(False)
     def find_split(self):
@@ -817,7 +834,7 @@
         cdef np.ndarray[ndim=1, dtype=np.int64_t] axes
         cdef np.int64_t strength, zcstrength, zcp
         axes = np.argsort(self.dd)[::-1]
-        cdef np.ndarray[np.int64_t] sig
+        cdef np.int64_t *sig
         for axi in range(3):
             ax = axes[axi]
             center = self.dimensions[ax] / 2
@@ -950,3 +967,36 @@
         hilbert_indices[o] = h
     return hilbert_indices
 
+ at cython.boundscheck(False)
+ at cython.wraparound(False)
+def get_array_indices_lists(np.ndarray[np.int64_t, ndim=1] ind,
+                            np.ndarray[np.int64_t, ndim=1] uind):
+    cdef np.ndarray[np.int64_t, ndim=1] count = np.zeros(uind.shape[0], 'int64')
+    cdef int n, i
+    cdef np.int64_t mi, mui
+    for i in range(ind.shape[0]):
+        mi = ind[i]
+        for n in range(uind.shape[0]):
+            if uind[n] == mi:
+                count[n] += 1
+                break
+    cdef np.int64_t **inds
+    inds = <np.int64_t **> malloc(sizeof(np.int64_t *) * uind.shape[0])
+    cdef int *li = <int *> malloc(sizeof(int) * uind.shape[0])
+    cdef np.ndarray[np.int64_t, ndim=1] indices
+    all_indices = []
+    for n in range(uind.shape[0]):
+        indices = np.zeros(count[n], 'int64')
+        all_indices.append(indices)
+        inds[n] = <np.int64_t *> indices.data
+        li[n] = 0
+    for i in range(ind.shape[0]):
+        mi = ind[i]
+        for n in range(uind.shape[0]):
+            if uind[n] == mi:
+                inds[n][li[n]] = i
+                li[n] += 1
+                break
+    free(inds) # not inds[...]
+    free(li)
+    return all_indices


--- a/yt/frontends/ramses/data_structures.py	Fri Jun 03 18:04:40 2011 -0700
+++ b/yt/frontends/ramses/data_structures.py	Wed Jun 08 08:15:25 2011 -0700
@@ -38,6 +38,8 @@
 from .fields import RAMSESFieldInfo
 from yt.utilities.definitions import \
     mpc_conversion
+from yt.utilities.amr_utils import \
+    get_box_grids_level
 from yt.utilities.io_handler import \
     io_registry
 from yt.data_objects.field_info_container import \
@@ -168,7 +170,6 @@
             # left_index is integers of the index, with respect to this level
             left_index = na.rint((ogrid_left_edge[ggi,:]) * nd / DW ).astype('int64')
             # we've got octs, so it's +2
-            right_index = left_index + 2
             pbar = get_pbar("Re-gridding ", left_index.shape[0])
             dlp = [None, None, None]
             i = 0
@@ -182,39 +183,42 @@
             # Strictly speaking, we don't care about the index of any
             # individual oct at this point.  So we can then split them up.
             unique_indices = na.unique(hilbert_indices)
-            for curve_index in unique_indices:
+            print "Level % 2i has % 10i unique indices for %0.3e octs" % (
+                        level, unique_indices.size, hilbert_indices.size)
+            all_indices = _ramses_reader.get_array_indices_lists(
+                        hilbert_indices, unique_indices)
+            for curve_index, my_octs in zip(unique_indices, all_indices):
                 #print "Handling", curve_index
-                my_octs = (hilbert_indices == curve_index)
+                #my_octs = (hilbert_indices == curve_index)
                 dleft_index = left_index[my_octs,:]
-                dright_index = left_index[my_octs,:] + 2
-                ddims = (dright_index * 0) + 2
                 dfl = fl[my_octs,:]
                 initial_left = na.min(dleft_index, axis=0)
-                idims = (na.max(dright_index, axis=0) - initial_left).ravel()
-                #if level > 6: insert_ipython()
+                idims = (na.max(dleft_index, axis=0) - initial_left).ravel()+2
+                #if level > 10: insert_ipython()
                 #print initial_left, idims
                 psg = _ramses_reader.ProtoSubgrid(initial_left, idims,
-                                dleft_index, dright_index, ddims, dfl)
+                                dleft_index, dfl)
                 if psg.efficiency <= 0: continue
                 self.num_deep = 0
                 psgs.extend(self._recursive_patch_splitting(
                     psg, idims, initial_left, 
-                    dleft_index, dright_index, ddims, dfl))
+                    dleft_index, dfl))
+            print "Done with level % 2i" % (level)
             pbar.finish()
             self.proto_grids.append(psgs)
             sums = na.zeros(3, dtype='int64')
             mylog.info("Final grid count: %s", len(self.proto_grids[level]))
             if len(self.proto_grids[level]) == 1: continue
-            for g in self.proto_grids[level]:
-                sums += [s.sum() for s in g.sigs]
-            assert(na.all(sums == dims.prod(axis=1).sum()))
+            #for g in self.proto_grids[level]:
+            #    sums += [s.sum() for s in g.sigs]
+            #assert(na.all(sums == dims.prod(axis=1).sum()))
         self.num_grids = sum(len(l) for l in self.proto_grids)
 
     num_deep = 0
 
     @num_deep_inc
     def _recursive_patch_splitting(self, psg, dims, ind,
-            left_index, right_index, gdims, fl):
+            left_index, fl):
         min_eff = 0.1 # This isn't always respected.
         if self.num_deep > 40:
             # If we've recursed more than 100 times, we give up.
@@ -234,13 +238,13 @@
         li_l = ind.copy()
         if na.any(dims_l <= 0): return [psg]
         L = _ramses_reader.ProtoSubgrid(
-                li_l, dims_l, left_index, right_index, gdims, fl)
+                li_l, dims_l, left_index, fl)
         #print " " * self.num_deep + "L", tt, L.efficiency
         if L.efficiency > 1.0: raise RuntimeError
         if L.efficiency <= 0.0: L = []
         elif L.efficiency < min_eff:
             L = self._recursive_patch_splitting(L, dims_l, li_l,
-                    left_index, right_index, gdims, fl)
+                    left_index, fl)
         else:
             L = [L]
         dims_r = dims.copy()
@@ -249,13 +253,13 @@
         li_r[ax] += fp
         if na.any(dims_r <= 0): return [psg]
         R = _ramses_reader.ProtoSubgrid(
-                li_r, dims_r, left_index, right_index, gdims, fl)
+                li_r, dims_r, left_index, fl)
         #print " " * self.num_deep + "R", tt, R.efficiency
         if R.efficiency > 1.0: raise RuntimeError
         if R.efficiency <= 0.0: R = []
         elif R.efficiency < min_eff:
             R = self._recursive_patch_splitting(R, dims_r, li_r,
-                    left_index, right_index, gdims, fl)
+                    left_index, fl)
         else:
             R = [R]
         return L + R
@@ -278,18 +282,16 @@
                 gi += 1
         self.grids = na.array(grids, dtype='object')
 
-    def _get_grid_parents(self, grid, LE, RE):
-        mask = na.zeros(self.num_grids, dtype='bool')
-        grids, grid_ind = self.get_box_grids(LE, RE)
-        mask[grid_ind] = True
-        mask = na.logical_and(mask, (self.grid_levels == (grid.Level-1)).flat)
-        return self.grids[mask]
-
     def _populate_grid_objects(self):
+        mask = na.empty(self.grids.size, dtype='int32')
+        print self.grid_levels.dtype
         for gi,g in enumerate(self.grids):
-            parents = self._get_grid_parents(g,
-                            self.grid_left_edge[gi,:],
-                            self.grid_right_edge[gi,:])
+            get_box_grids_level(self.grid_left_edge[gi,:],
+                                self.grid_right_edge[gi,:],
+                                g.Level - 1,
+                                self.grid_left_edge, self.grid_right_edge,
+                                self.grid_levels, mask)
+            parents = self.grids[mask.astype("bool")]
             if len(parents) > 0:
                 g.Parent.extend(parents.tolist())
                 for p in parents: p.Children.append(g)


--- a/yt/utilities/_amr_utils/QuadTree.pyx	Fri Jun 03 18:04:40 2011 -0700
+++ b/yt/utilities/_amr_utils/QuadTree.pyx	Wed Jun 08 08:15:25 2011 -0700
@@ -41,18 +41,17 @@
     np.float64_t *val
     np.float64_t weight_val
     np.int64_t pos[2]
-    int level
-    int nvals
     QuadTreeNode *children[2][2]
 
 cdef void QTN_add_value(QuadTreeNode *self,
-        np.float64_t *val, np.float64_t weight_val):
+        np.float64_t *val, np.float64_t weight_val,
+        int nvals):
     cdef int i
-    for i in range(self.nvals):
+    for i in range(nvals):
         self.val[i] += val[i]
     self.weight_val += weight_val
 
-cdef void QTN_refine(QuadTreeNode *self):
+cdef void QTN_refine(QuadTreeNode *self, int nvals):
     cdef int i, j, i1, j1
     cdef np.int64_t npos[2]
     cdef QuadTreeNode *node
@@ -62,27 +61,22 @@
             npos[1] = self.pos[1] * 2 + j
             # We have to be careful with allocation...
             self.children[i][j] = QTN_initialize(
-                        npos,
-                        self.nvals, self.val, self.weight_val,
-                        self.level + 1)
-    for i in range(self.nvals): self.val[i] = 0.0
+                        npos, nvals, self.val, self.weight_val)
+    for i in range(nvals): self.val[i] = 0.0
     self.weight_val = 0.0
 
 cdef QuadTreeNode *QTN_initialize(np.int64_t pos[2], int nvals,
-                        np.float64_t *val, np.float64_t weight_val,
-                        int level):
+                        np.float64_t *val, np.float64_t weight_val):
     cdef QuadTreeNode *node
     cdef int i, j
     node = <QuadTreeNode *> malloc(sizeof(QuadTreeNode))
     node.pos[0] = pos[0]
     node.pos[1] = pos[1]
-    node.nvals = nvals
     node.val = <np.float64_t *> malloc(
                 nvals * sizeof(np.float64_t))
     for i in range(2):
         for j in range(2):
             node.children[i][j] = NULL
-    node.level = level
     if val != NULL:
         for i in range(nvals):
             node.val[i] = val[i]
@@ -106,6 +100,7 @@
     cdef QuadTreeNode ***root_nodes
     cdef np.int64_t top_grid_dims[2]
     cdef int merged
+    cdef int num_cells
 
     def __cinit__(self, np.ndarray[np.int64_t, ndim=1] top_grid_dims,
                   int nvals):
@@ -136,7 +131,8 @@
             for j in range(top_grid_dims[1]):
                 pos[1] = j
                 self.root_nodes[i][j] = QTN_initialize(
-                    pos, nvals, vals, weight_val, 0)
+                    pos, nvals, vals, weight_val)
+        self.num_cells = self.top_grid_dims[0] * self.top_grid_dims[1]
 
     cdef int count_total_cells(self, QuadTreeNode *root):
         cdef int total = 0
@@ -147,6 +143,8 @@
                 total += self.count_total_cells(root.children[i][j])
         return total + 1
 
+    @cython.boundscheck(False)
+    @cython.wraparound(False)
     cdef int fill_buffer(self, QuadTreeNode *root, int curpos,
                           np.ndarray[np.int32_t, ndim=1] refined,
                           np.ndarray[np.float64_t, ndim=2] values,
@@ -164,6 +162,8 @@
                                  refined, values, wval)
         return curpos
 
+    @cython.boundscheck(False)
+    @cython.wraparound(False)
     cdef int unfill_buffer(self, QuadTreeNode *root, int curpos,
                           np.ndarray[np.int32_t, ndim=1] refined,
                           np.ndarray[np.float64_t, ndim=2] values,
@@ -180,28 +180,30 @@
             for j in range(2):
                 pos[0] = root.pos[0]*2 + i
                 pos[1] = root.pos[1]*2 + j
-                child = QTN_initialize(pos, self.nvals, NULL, 0.0, root.level+1)
+                child = QTN_initialize(pos, self.nvals, NULL, 0.0)
                 root.children[i][j] = child
                 curpos = self.unfill_buffer(child, curpos, refined, values, wval)
         return curpos
 
 
+    @cython.boundscheck(False)
+    @cython.wraparound(False)
     def frombuffer(self, np.ndarray[np.int32_t, ndim=1] refined,
                          np.ndarray[np.float64_t, ndim=2] values,
                          np.ndarray[np.float64_t, ndim=1] wval):
         self.merged = 1 # Just on the safe side
         cdef int curpos = 0
         cdef QuadTreeNode *root
+        self.num_cells = wval.shape[0]
         for i in range(self.top_grid_dims[0]):
             for j in range(self.top_grid_dims[1]):
                 curpos = self.unfill_buffer(self.root_nodes[i][j], curpos,
                                  refined, values, wval)
 
+    @cython.boundscheck(False)
+    @cython.wraparound(False)
     def tobuffer(self):
-        cdef int total = 0
-        for i in range(self.top_grid_dims[0]):
-            for j in range(self.top_grid_dims[1]):
-                total += self.count_total_cells(self.root_nodes[i][j])
+        cdef int total = self.num_cells
         # We now have four buffers:
         # Refined or not (total,) int32
         # Values in each node (total, nvals) float64
@@ -226,20 +228,22 @@
                  int level, np.int64_t pos[2],
                  np.float64_t *val,
                  np.float64_t weight_val):
-        cdef int i, j
+        cdef int i, j, L
         cdef QuadTreeNode *node
         node = self.find_on_root_level(pos, level)
         cdef np.int64_t fac
         for L in range(level):
             if node.children[0][0] == NULL:
-                QTN_refine(node)
+                QTN_refine(node, self.nvals)
+                self.num_cells += 4
             # Maybe we should use bitwise operators?
             fac = self.po2[level - L - 1]
             i = (pos[0] >= fac*(2*node.pos[0]+1))
             j = (pos[1] >= fac*(2*node.pos[1]+1))
             node = node.children[i][j]
-        QTN_add_value(node, val, weight_val)
+        QTN_add_value(node, val, weight_val, self.nvals)
             
+    @cython.cdivision(True)
     cdef QuadTreeNode *find_on_root_level(self, np.int64_t pos[2], int level):
         # We need this because the root level won't just have four children
         # So we find on the root level, then we traverse the tree.
@@ -266,6 +270,7 @@
             pos[0] = pxs[p]
             pos[1] = pys[p]
             self.add_to_position(level, pos, vals, pweight_vals[p])
+        return
 
     def add_grid_to_tree(self, int level,
                          np.ndarray[np.int64_t, ndim=1] start_index,
@@ -282,7 +287,7 @@
         vals = []
         for i in range(self.top_grid_dims[0]):
             for j in range(self.top_grid_dims[1]):
-                total += self.count_at_level(self.root_nodes[i][j], level)
+                total += self.count_at_level(self.root_nodes[i][j], level, 0)
         if count_only: return total
         # Allocate our array
         cdef np.ndarray[np.int64_t, ndim=2] npos
@@ -303,14 +308,14 @@
                 for vi in range(self.nvals): vtoadd[vi] = 0.0
                 wtoadd = 0.0
                 curpos += self.fill_from_level(self.root_nodes[i][j],
-                    level, curpos, pdata, vdata, wdata, vtoadd, wtoadd)
+                    level, curpos, pdata, vdata, wdata, vtoadd, wtoadd, 0)
         return npos, nvals, nwvals
 
-    cdef int count_at_level(self, QuadTreeNode *node, int level):
+    cdef int count_at_level(self, QuadTreeNode *node, int level, int cur_level):
         cdef int i, j
         # We only really return a non-zero, calculated value if we are at the
         # level in question.
-        if node.level == level:
+        if cur_level == level:
             # We return 1 if there are no finer points at this level and zero
             # if there are
             return (node.children[0][0] == NULL)
@@ -318,7 +323,8 @@
         cdef int count = 0
         for i in range(2):
             for j in range(2):
-                count += self.count_at_level(node.children[i][j], level)
+                count += self.count_at_level(node.children[i][j], level,
+                                             cur_level + 1)
         return count
 
     cdef int fill_from_level(self, QuadTreeNode *node, int level,
@@ -327,9 +333,10 @@
                               np.float64_t *vdata,
                               np.float64_t *wdata,
                               np.float64_t *vtoadd,
-                              np.float64_t wtoadd):
+                              np.float64_t wtoadd,
+                              int cur_level):
         cdef int i, j
-        if node.level == level:
+        if cur_level == level:
             if node.children[0][0] != NULL: return 0
             for i in range(self.nvals):
                 vdata[self.nvals * curpos + i] = node.val[i] + vtoadd[i]
@@ -347,7 +354,7 @@
             for j in range(2):
                 added += self.fill_from_level(node.children[i][j],
                         level, curpos + added, pdata, vdata, wdata,
-                        vtoadd, wtoadd)
+                        vtoadd, wtoadd, cur_level + 1)
         if self.merged == 1:
             for i in range(self.nvals):
                 vtoadd[i] -= node.val[i]
@@ -362,7 +369,7 @@
             free(self.root_nodes[i])
         free(self.root_nodes)
 
-cdef void QTN_merge_nodes(QuadTreeNode *n1, QuadTreeNode *n2):
+cdef void QTN_merge_nodes(QuadTreeNode *n1, QuadTreeNode *n2, int nvals):
     # We have four choices when merging nodes.
     # 1. If both nodes have no refinement, then we add values of n2 to n1.
     # 2. If both have refinement, we call QTN_merge_nodes on all four children.
@@ -371,13 +378,13 @@
     # 4. If n1 has refinement and n2 does not, we add the value of n2 to n1.
     cdef int i, j
 
-    QTN_add_value(n1, n2.val, n2.weight_val)
+    QTN_add_value(n1, n2.val, n2.weight_val, nvals)
     if n1.children[0][0] == n2.children[0][0] == NULL:
         pass
     elif n1.children[0][0] != NULL and n2.children[0][0] != NULL:
         for i in range(2):
             for j in range(2):
-                QTN_merge_nodes(n1.children[i][j], n2.children[i][j])
+                QTN_merge_nodes(n1.children[i][j], n2.children[i][j], nvals)
     elif n1.children[0][0] == NULL and n2.children[0][0] != NULL:
         for i in range(2):
             for j in range(2):
@@ -390,8 +397,12 @@
 
 def merge_quadtrees(QuadTree qt1, QuadTree qt2):
     cdef int i, j
+    qt1.num_cells = 0
     for i in range(qt1.top_grid_dims[0]):
         for j in range(qt1.top_grid_dims[1]):
             QTN_merge_nodes(qt1.root_nodes[i][j],
-                            qt2.root_nodes[i][j])
+                            qt2.root_nodes[i][j],
+                            qt1.nvals)
+            qt1.num_cells += qt1.count_total_cells(
+                                qt1.root_nodes[i][j])
     qt1.merged = 1


--- a/yt/utilities/_amr_utils/misc_utilities.pyx	Fri Jun 03 18:04:40 2011 -0700
+++ b/yt/utilities/_amr_utils/misc_utilities.pyx	Wed Jun 08 08:15:25 2011 -0700
@@ -50,3 +50,26 @@
             if v < mi: mi = v
             if v > ma: ma = v
     return (mi, ma)
+
+def get_box_grids_level(np.ndarray[np.float64_t, ndim=1] left_edge,
+                        np.ndarray[np.float64_t, ndim=1] right_edge,
+                        int level,
+                        np.ndarray[np.float64_t, ndim=2] left_edges,
+                        np.ndarray[np.float64_t, ndim=2] right_edges,
+                        np.ndarray[np.int32_t, ndim=2] levels,
+                        np.ndarray[np.int32_t, ndim=1] mask):
+    cdef int i, n
+    cdef int nx = left_edges.shape[0]
+    cdef int inside 
+    for i in range(nx):
+        if levels[i,0] != level:
+            mask[i] = 0
+            continue
+        inside = 1
+        for n in range(3):
+            if left_edge[n] > right_edges[i,n] or \
+               right_edge[n] < left_edges[i,n]:
+                inside = 0
+                break
+        if inside == 1: mask[i] = 1
+        else: mask[i] = 0


--- a/yt/utilities/command_line.py	Fri Jun 03 18:04:40 2011 -0700
+++ b/yt/utilities/command_line.py	Wed Jun 08 08:15:25 2011 -0700
@@ -89,9 +89,9 @@
                    help="Desired units"),
     center  = dict(short="-c", long="--center",
                    action="store", type="float",
-                   dest="center", default=[0.5, 0.5, 0.5],
+                   dest="center", default=None,
                    nargs=3,
-                   help="Center (-1,-1,-1 for max)"),
+                   help="Center, command separated (-1 -1 -1 for max)"),
     bn      = dict(short="-b", long="--basename",
                    action="store", type="string",
                    dest="basename", default=None,
@@ -478,6 +478,8 @@
         if opts.center == (-1,-1,-1):
             mylog.info("No center fed in; seeking.")
             v, center = pf.h.find_max("Density")
+        elif opts.center is None:
+            center = 0.5*(pf.domain_left_edge + pf.domain_right_edge)
         center = na.array(center)
         pc=PlotCollection(pf, center=center)
         if opts.axis == 4:
@@ -633,12 +635,60 @@
         pp = PostInventory()
         pp.add_post(arg, desc=opts.desc)
 
+    @cmdln.option("-l", "--language", action="store",
+                  default = None, dest="language",
+                  help="Use syntax highlighter for the file in language")
+    @cmdln.option("-L", "--languages", action="store_true",
+                  default = False, dest="languages",
+                  help="Retrive a list of supported languages")
+    @cmdln.option("-e", "--encoding", action="store",
+                  default = 'utf-8', dest="encoding",
+                  help="Specify the encoding of a file (default is "
+                        "utf-8 or guessing if available)")
+    @cmdln.option("-b", "--open-browser", action="store_true",
+                  default = False, dest="open_browser",
+                  help="Open the paste in a web browser")
+    @cmdln.option("-p", "--private", action="store_true",
+                  default = False, dest="private",
+                  help="Paste as private")
+    @cmdln.option("-c", "--clipboard", action="store_true",
+                  default = False, dest="clipboard",
+                  help="File to output to; else, print.")
+    def do_pastebin(self, subcmd, opts, arg):
+        """
+        Post a script to an anonymous pastebin.
+
+        Usage: yt pastebin [options] <script>
+
+        ${cmd_option_list}
+        """
+        import yt.utilities.lodgeit as lo
+        lo.main( arg, languages=opts.languages, language=opts.language,
+                 encoding=opts.encoding, open_browser=opts.open_browser,
+                 private=opts.private, clipboard=opts.clipboard)
+
+    def do_pastebin_grab(self, subcmd, opts, arg):
+        """
+        Print an online pastebin to STDOUT for local use. Paste ID is 
+        the number at the end of the url.  So to locally access pastebin:
+        http://paste.enzotools.org/show/1688/
+
+        Usage: yt pastebin_grab <Paste ID> 
+        Ex: yt pastebin_grab 1688 > script.py
+
+        """
+        import yt.utilities.lodgeit as lo
+        lo.main( None, download=arg )
+
     @cmdln.option("-o", "--output", action="store",
                   default = None, dest="output_fn",
                   help="File to output to; else, print.")
-    def do_pastegrab(self, subcmd, opts, username, paste_id):
+    def do_pasteboard_grab(self, subcmd, opts, username, paste_id):
         """
         Download from your or another user's pasteboard.
+
+        ${cmd_usage} 
+        ${cmd_option_list}
         """
         from yt.utilities.pasteboard import retrieve_pastefile
         retrieve_pastefile(username, paste_id, opts.output_fn)
@@ -646,19 +696,39 @@
     def do_bugreport(self, subcmd, opts):
         """
         Report a bug in yt
+
+        ${cmd_usage} 
+        ${cmd_option_list}
         """
         print "==============================================================="
         print
         print "Hi there!  Welcome to the yt bugreport taker."
         print
         print "==============================================================="
+        print "At any time in advance of the upload of the bug, you should feel free"
+        print "to ctrl-C out and submit the bug report manually by going here:"
+        print "   http://hg.enzotools.org/yt/issues/new"
+        print 
+        print "Also, in order to submit a bug through this interface, you"
+        print "need a Bitbucket account. If you don't have one, exit this "
+        print "bugreport now and run the 'yt bootstrap_dev' command to create one."
         print
-        print "At any time in advance of the upload of the bug, you"
-        print "should feel free to ctrl-C out and submit the bug "
-        print "report manually by going here:"
-        print "   http://hg.enzotools.org/yt/issues/new"
+        print "Have you checked the existing bug reports to make"
+        print "sure your bug has not already been recorded by someone else?"
+        print "   http://hg.enzotools.org/yt/issues?status=new&status=open"
         print
-        print "First off, how about a nice, pithy summary of the bug?"
+        print "Finally, are you sure that your bug is, in fact, a bug? It might"
+        print "simply be a misunderstanding that could be cleared up by"
+        print "visiting the yt irc channel or getting advice on the email list:"
+        print "   http://yt.enzotools.org/irc.html"
+        print "   http://lists.spacepope.org/listinfo.cgi/yt-users-spacepope.org"
+        print
+        summary = raw_input("Press <enter> if you remain firm in your conviction to continue.")
+        print
+        print
+        print "Okay, sorry about that. How about a nice, pithy ( < 12 words )"
+        print "summary of the bug?  (e.g. 'Particle overlay problem with parallel "
+        print "projections')"
         print
         try:
             current_version = get_yt_version()
@@ -667,11 +737,12 @@
         summary = raw_input("Summary? ")
         bugtype = "bug"
         data = dict(title = summary, type=bugtype)
+        print
         print "Okay, now let's get a bit more information."
         print
         print "Remember that if you want to submit a traceback, you can run"
         print "any script with --paste or --detailed-paste to submit it to"
-        print "the pastebin."
+        print "the pastebin and then include the link in this bugreport."
         if "EDITOR" in os.environ:
             print
             print "Press enter to spawn your editor, %s" % os.environ["EDITOR"]
@@ -728,7 +799,8 @@
         print 
         print "==============================================================="
         print
-        print "Thanks for your bug report!  You can view it here:"
+        print "Thanks for your bug report!  Together we'll make yt totally bug free!"
+        print "You can view bug report here:"
         print "   %s" % url
         print
         print "Keep in touch!"
@@ -737,6 +809,9 @@
     def do_bootstrap_dev(self, subcmd, opts):
         """
         Bootstrap a yt development environment
+
+        ${cmd_usage} 
+        ${cmd_option_list}
         """
         from mercurial import hg, ui, commands
         import imp


--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/yt/utilities/lodgeit.py	Wed Jun 08 08:15:25 2011 -0700
@@ -0,0 +1,317 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+"""
+    LodgeIt!
+    ~~~~~~~~
+
+    A script that pastes stuff into the enzotools pastebin on
+    paste.enztools.org.
+
+    Modified (very, very slightly) from the original script by the authors
+    below.
+
+    .lodgeitrc / _lodgeitrc
+    -----------------------
+
+    Under UNIX create a file called ``~/.lodgeitrc``, under Windows
+    create a file ``%APPDATA%/_lodgeitrc`` to override defaults::
+
+        language=default_language
+        clipboard=true/false
+        open_browser=true/false
+        encoding=fallback_charset
+
+    :authors: 2007-2008 Georg Brandl <georg at python.org>,
+              2006 Armin Ronacher <armin.ronacher at active-4.com>,
+              2006 Matt Good <matt at matt-good.net>,
+              2005 Raphael Slinckx <raphael at slinckx.net>
+"""
+import os
+import sys
+from optparse import OptionParser
+
+
+SCRIPT_NAME = os.path.basename(sys.argv[0])
+VERSION = '0.3'
+SERVICE_URL = 'http://paste.enzotools.org/'
+SETTING_KEYS = ['author', 'title', 'language', 'private', 'clipboard',
+                'open_browser']
+
+# global server proxy
+_xmlrpc_service = None
+
+
+def fail(msg, code):
+    """Bail out with an error message."""
+    print >> sys.stderr, 'ERROR: %s' % msg
+    sys.exit(code)
+
+
+def load_default_settings():
+    """Load the defaults from the lodgeitrc file."""
+    settings = {
+        'language':     None,
+        'clipboard':    True,
+        'open_browser': False,
+        'encoding':     'iso-8859-15'
+    }
+    rcfile = None
+    if os.name == 'posix':
+        rcfile = os.path.expanduser('~/.lodgeitrc')
+    elif os.name == 'nt' and 'APPDATA' in os.environ:
+        rcfile = os.path.expandvars(r'$APPDATA\_lodgeitrc')
+    if rcfile:
+        try:
+            f = open(rcfile)
+            for line in f:
+                if line.strip()[:1] in '#;':
+                    continue
+                p = line.split('=', 1)
+                if len(p) == 2:
+                    key = p[0].strip().lower()
+                    if key in settings:
+                        if key in ('clipboard', 'open_browser'):
+                            settings[key] = p[1].strip().lower() in \
+                                            ('true', '1', 'on', 'yes')
+                        else:
+                            settings[key] = p[1].strip()
+            f.close()
+        except IOError:
+            pass
+    settings['tags'] = []
+    settings['title'] = None
+    return settings
+
+
+def make_utf8(text, encoding):
+    """Convert a text to UTF-8, brute-force."""
+    try:
+        u = unicode(text, 'utf-8')
+        uenc = 'utf-8'
+    except UnicodeError:
+        try:
+            u = unicode(text, encoding)
+            uenc = 'utf-8'
+        except UnicodeError:
+            u = unicode(text, 'iso-8859-15', 'ignore')
+            uenc = 'iso-8859-15'
+    try:
+        import chardet
+    except ImportError:
+        return u.encode('utf-8')
+    d = chardet.detect(text)
+    if d['encoding'] == uenc:
+        return u.encode('utf-8')
+    return unicode(text, d['encoding'], 'ignore').encode('utf-8')
+
+
+def get_xmlrpc_service():
+    """Create the XMLRPC server proxy and cache it."""
+    global _xmlrpc_service
+    import xmlrpclib
+    if _xmlrpc_service is None:
+        try:
+            _xmlrpc_service = xmlrpclib.ServerProxy(SERVICE_URL + 'xmlrpc/',
+                                                    allow_none=True)
+        except Exception, err:
+            fail('Could not connect to Pastebin: %s' % err, -1)
+    return _xmlrpc_service
+
+
+def copy_url(url):
+    """Copy the url into the clipboard."""
+    # try windows first
+    try:
+        import win32clipboard
+    except ImportError:
+        # then give pbcopy a try.  do that before gtk because
+        # gtk might be installed on os x but nobody is interested
+        # in the X11 clipboard there.
+        from subprocess import Popen, PIPE
+        try:
+            client = Popen(['pbcopy'], stdin=PIPE)
+        except OSError:
+            try:
+                import pygtk
+                pygtk.require('2.0')
+                import gtk
+                import gobject
+            except ImportError:
+                return
+            gtk.clipboard_get(gtk.gdk.SELECTION_CLIPBOARD).set_text(url)
+            gobject.idle_add(gtk.main_quit)
+            gtk.main()
+        else:
+            client.stdin.write(url)
+            client.stdin.close()
+            client.wait()
+    else:
+        win32clipboard.OpenClipboard()
+        win32clipboard.EmptyClipboard()
+        win32clipboard.SetClipboardText(url)
+        win32clipboard.CloseClipboard()
+
+
+def open_webbrowser(url):
+    """Open a new browser window."""
+    import webbrowser
+    webbrowser.open(url)
+
+
+def language_exists(language):
+    """Check if a language alias exists."""
+    xmlrpc = get_xmlrpc_service()
+    langs = xmlrpc.pastes.getLanguages()
+    return language in langs
+
+
+def get_mimetype(data, filename):
+    """Try to get MIME type from data."""
+    try:
+        import gnomevfs
+    except ImportError:
+        from mimetypes import guess_type
+        if filename:
+            return guess_type(filename)[0]
+    else:
+        if filename:
+            return gnomevfs.get_mime_type(os.path.abspath(filename))
+        return gnomevfs.get_mime_type_for_data(data)
+
+
+def print_languages():
+    """Print a list of all supported languages, with description."""
+    xmlrpc = get_xmlrpc_service()
+    languages = xmlrpc.pastes.getLanguages().items()
+    languages.sort(lambda a, b: cmp(a[1].lower(), b[1].lower()))
+    print 'Supported Languages:'
+    for alias, name in languages:
+        print '    %-30s%s' % (alias, name)
+
+
+def download_paste(uid):
+    """Download a paste given by ID."""
+    xmlrpc = get_xmlrpc_service()
+    paste = xmlrpc.pastes.getPaste(uid)
+    if not paste:
+        fail('Paste "%s" does not exist.' % uid, 5)
+    print paste['code'].encode('utf-8')
+
+
+def create_paste(code, language, filename, mimetype, private):
+    """Create a new paste."""
+    xmlrpc = get_xmlrpc_service()
+    rv = xmlrpc.pastes.newPaste(language, code, None, filename, mimetype,
+                                private)
+    if not rv:
+        fail('Could not create paste. Something went wrong '
+             'on the server side.', 4)
+    return rv
+
+
+def compile_paste(filenames, langopt):
+    """Create a single paste out of zero, one or multiple files."""
+    def read_file(f):
+        try:
+            return f.read()
+        finally:
+            f.close()
+    mime = ''
+    lang = langopt or ''
+    if not filenames:
+        data = read_file(sys.stdin)
+        if not langopt:
+            mime = get_mimetype(data, '') or ''
+        fname = ""
+    elif len(filenames) == 1:
+        fname = filenames[0]
+        data = read_file(open(filenames[0], 'rb'))
+        if not langopt:
+            mime = get_mimetype(data, filenames[0]) or ''
+    else:
+        result = []
+        for fname in filenames:
+            data = read_file(open(fname, 'rb'))
+            if langopt:
+                result.append('### %s [%s]\n\n' % (fname, langopt))
+            else:
+                result.append('### %s\n\n' % fname)
+            result.append(data)
+            result.append('\n\n')
+        data = ''.join(result)
+        lang = 'multi'
+    return data, lang, fname, mime
+
+
+def main( filename, languages=False, language=None, encoding='utf-8', 
+          open_browser=False, private=False, clipboard=False, 
+          download=None ):
+    """Paste a given script into a pastebin using the Lodgeit tool."""
+
+#    usage = ('Usage: %%prog [options] [FILE ...]\n\n'
+#             'Read the files and paste their contents to %s.\n'
+#             'If no file is given, read from standard input.\n'
+#             'If multiple files are given, they are put into a single paste.'
+#             % SERVICE_URL)
+#    parser = OptionParser(usage=usage)
+#
+#    settings = load_default_settings()
+#
+#    parser.add_option('-v', '--version', action='store_true',
+#                      help='Print script version')
+#    parser.add_option('-L', '--languages', action='store_true', default=False,
+#                      help='Retrieve a list of supported languages')
+#    parser.add_option('-l', '--language', default=settings['language'],
+#                      help='Used syntax highlighter for the file')
+#    parser.add_option('-e', '--encoding', default=settings['encoding'],
+#                      help='Specify the encoding of a file (default is '
+#                           'utf-8 or guessing if available)')
+#    parser.add_option('-b', '--open-browser', dest='open_browser',
+#                      action='store_true',
+#                      default=settings['open_browser'],
+#                      help='Open the paste in a web browser')
+#    parser.add_option('-p', '--private', action='store_true', default=False,
+#                      help='Paste as private')
+#    parser.add_option('--no-clipboard', dest='clipboard',
+#                      action='store_false',
+#                      default=settings['clipboard'],
+#                      help="Don't copy the url into the clipboard")
+#    parser.add_option('--download', metavar='UID',
+#                      help='Download a given paste')
+#
+#    opts, args = parser.parse_args()
+#
+    if languages:
+        print_languages()
+        return
+    elif download:
+        download_paste(download)
+        return
+
+    # check language if given
+    if language and not language_exists(language):
+        print 'Language %s is not supported.' % language
+        return
+
+    # load file(s)
+    args = [ filename ]
+    try:
+        data, language, filename, mimetype = compile_paste(args, language)
+    except Exception, err:
+        fail('Error while reading the file(s): %s' % err, 2)
+    if not data:
+        fail('Aborted, no content to paste.', 4)
+
+    # create paste
+    code = make_utf8(data, encoding)
+    pid = create_paste(code, language, filename, mimetype, private)
+    url = '%sshow/%s/' % (SERVICE_URL, pid)
+    print url
+    if open_browser:
+        open_webbrowser(url)
+    if clipboard:
+        copy_url(url)
+
+
+if __name__ == '__main__':
+    sys.exit(main())


--- a/yt/utilities/parallel_tools/parallel_analysis_interface.py	Fri Jun 03 18:04:40 2011 -0700
+++ b/yt/utilities/parallel_tools/parallel_analysis_interface.py	Wed Jun 08 08:15:25 2011 -0700
@@ -1293,13 +1293,13 @@
         while mask < size:
             if (mask & rank) != 0:
                 target = (rank & ~mask) % size
-                print "SENDING FROM %02i to %02i" % (rank, target)
+                #print "SENDING FROM %02i to %02i" % (rank, target)
                 self._send_quadtree(target, qt, tgd, args)
                 #qt = self._recv_quadtree(target, tgd, args)
             else:
                 target = (rank | mask)
                 if target < size:
-                    print "RECEIVING FROM %02i on %02i" % (target, rank)
+                    #print "RECEIVING FROM %02i on %02i" % (target, rank)
                     qto = self._recv_quadtree(target, tgd, args)
                     merge_quadtrees(qt, qto)
                     del qto


--- a/yt/utilities/physical_constants.py	Fri Jun 03 18:04:40 2011 -0700
+++ b/yt/utilities/physical_constants.py	Wed Jun 08 08:15:25 2011 -0700
@@ -5,6 +5,7 @@
 # Masses
 mass_hydrogen_cgs = 1.67e-24 # g
 mass_electron_cgs = 9.11e-28 # g
+amu_cgs           = 1.66053886e-24 # g
 # Velocities
 speed_of_light_cgs = 2.99792458e10 # cm/s, exact
 
@@ -24,7 +25,7 @@
 mass_mean_atomic_cosmology = 1.22
 mass_mean_atomic_galactic = 2.3
 
-# Conversion Factors:  X au * mpc_per_au = Y au
+# Conversion Factors:  X au * mpc_per_au = Y mpc
 # length
 mpc_per_mpc   = 1
 mpc_per_kpc   = 1e-3


--- a/yt/visualization/_colormap_data.py	Fri Jun 03 18:04:40 2011 -0700
+++ b/yt/visualization/_colormap_data.py	Wed Jun 08 08:15:25 2011 -0700
@@ -974,3 +974,185 @@
         1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.]),
    )
 
+### kamae ###
+
+color_map_luts['kamae'] = \
+   (
+array([ 0.10189667,  0.11463894,  0.12755761,  0.1405963 ,  0.15373037,
+        0.16694395,  0.18022534,  0.1935652 ,  0.2069557 ,  0.22039001,
+        0.23386204,  0.24736623,  0.26089743,  0.27445076,  0.28802162,
+        0.30160555,  0.31519826,  0.32879554,  0.34239328,  0.35598743,
+        0.36957396,  0.38314889,  0.39670823,  0.41024801,  0.42376423,
+        0.43725291,  0.45071   ,  0.46413146,  0.47751319,  0.49085107,
+        0.5041409 ,  0.51737848,  0.53055952,  0.54367968,  0.55673458,
+        0.56971976,  0.58263072,  0.59546287,  0.60821158,  0.62087213,
+        0.63343975,  0.6459096 ,  0.65827678,  0.6705363 ,  0.68268311,
+        0.69471212,  0.70661814,  0.71839593,  0.73004018,  0.74154553,
+        0.75290655,  0.76411775,  0.7751736 ,  0.7860685 ,  0.79679679,
+        0.8073528 ,  0.81773078,  0.82792495,  0.83792951,  0.8477386 ,
+        0.85734636,  0.86674688,  0.87593426,  0.88490256,  0.89364586,
+        0.90215822,  0.9104337 ,  0.9184664 ,  0.92625041,  0.93377986,
+        0.94104891,  0.94805175,  0.95478263,  0.96123586,  0.9674058 ,
+        0.97328689,  0.97887365,  0.98416071,  0.98914277,  0.99381466,
+        0.99817132,  0.9998451 ,  1.        ,  1.        ,  1.        ,
+        1.        ,  1.        ,  1.        ,  1.        ,  1.        ,
+        1.        ,  1.        ,  1.        ,  1.        ,  1.        ,
+        1.        ,  1.        ,  1.        ,  1.        ,  1.        ,
+        1.        ,  1.        ,  1.        ,  1.        ,  0.99786575,
+        0.99359386,  0.98863281,  0.98330578,  0.97761561,  0.97156558,
+        0.96515943,  0.95840133,  0.95129595,  0.94384841,  0.93606432,
+        0.92794977,  0.91951136,  0.91075617,  0.90169178,  0.8923263 ,
+        0.88266832,  0.87272695,  0.86251183,  0.85203309,  0.84130138,
+        0.83032786,  0.81912419,  0.80770256,  0.79607564,  0.78425658,
+        0.77225906,  0.76009721,  0.74778563,  0.7353394 ,  0.72277404,
+        0.71010551,  0.69735019,  0.68452487,  0.67164675,  0.65873339,
+        0.6458027 ,  0.63287295,  0.6199627 ,  0.60709082,  0.59427645,
+        0.58153897,  0.56889796,  0.55637322,  0.54398469,  0.53175245,
+        0.51969665,  0.50783755,  0.4961954 ,  0.48479047,  0.47364299,
+        0.46277308,  0.45220078,  0.44194595,  0.43202827,  0.42246716,
+        0.41328178,  0.40449095,  0.39611313,  0.38816637,  0.38066827,
+        0.3736359 ,  0.36708582,  0.36103397,  0.35549565,  0.3504855 ,
+        0.34601741,  0.34210448,  0.33875902,  0.33599244,  0.33381526,
+        0.33223703,  0.33126631,  0.3309106 ,  0.33117634,  0.33206882,
+        0.33359218,  0.33574936,  0.33854206,  0.3419707 ,  0.34603442,
+        0.35073099,  0.35605684,  0.362007  ,  0.36857511,  0.37575334,
+        0.38353244,  0.39190166,  0.4008488 ,  0.41036015,  0.42042051,
+        0.43101317,  0.44211993,  0.4537211 ,  0.46579548,  0.47832042,
+        0.49127178,  0.504624  ,  0.51835009,  0.53242166,  0.54680899,
+        0.561481  ,  0.57640533,  0.59154841,  0.60687543,  0.62235047,
+        0.63793653,  0.65359557,  0.66928861,  0.68497576,  0.70061634,
+        0.71616893,  0.73159144,  0.74684123,  0.76187518,  0.77664977,
+        0.79112121,  0.80524551,  0.81897859,  0.83227639,  0.84509499,
+        0.85739069,  0.86912016,  0.88024052,  0.8907095 ,  0.9004855 ,
+        0.90952776,  0.91779647,  0.92525286,  0.93185938,  0.93757975,
+        0.94237913,  0.94622424,  0.94908343,  0.95092687,  0.9517266 ,
+        0.95145666,  0.95009324,  0.94761471,  0.9440018 ,  0.93923763,
+        0.93330785,  0.92620073,  0.9179072 ,  0.90842097,  0.89773857,
+        0.88585945,  0.87278601,  0.85852366,  0.84308084,  0.8264691 ,
+        0.80870311]),
+array([ 0.09276197,  0.12828558,  0.14148276,  0.15073629,  0.15792564,
+        0.16377875,  0.16867726,  0.17285336,  0.17646219,  0.17961453,
+        0.18239341,  0.18486353,  0.18707683,  0.18907613,  0.19089745,
+        0.19257166,  0.19412563,  0.19558306,  0.1969651 ,  0.19829085,
+        0.19957764,  0.2008414 ,  0.20209683,  0.20335757,  0.20463637,
+        0.20594518,  0.20729526,  0.20869724,  0.21016118,  0.21169665,
+        0.21331273,  0.21501809,  0.21682099,  0.2187293 ,  0.22075054,
+        0.22289189,  0.2251602 ,  0.22756199,  0.23010349,  0.23279061,
+        0.23562896,  0.23862389,  0.24178042,  0.24510329,  0.24859697,
+        0.25226561,  0.25611312,  0.26014306,  0.26435875,  0.2687632 ,
+        0.27335911,  0.27814891,  0.28313472,  0.28831836,  0.29370134,
+        0.29928488,  0.30506987,  0.31105692,  0.31724629,  0.32363796,
+        0.33023158,  0.33702647,  0.34402165,  0.35121581,  0.35860731,
+        0.3661942 ,  0.37397421,  0.38194473,  0.39010283,  0.39844526,
+        0.40696847,  0.41566854,  0.42454128,  0.43358216,  0.44278633,
+        0.45214864,  0.46166362,  0.47132551,  0.48112825,  0.49106545,
+        0.50113048,  0.51131639,  0.52161595,  0.53202168,  0.54252581,
+        0.55312033,  0.56379697,  0.57454721,  0.58536232,  0.59623332,
+        0.60715103,  0.61810606,  0.62908884,  0.64008959,  0.65109838,
+        0.66210512,  0.67309958,  0.68407136,  0.69500998,  0.70590484,
+        0.71674524,  0.7275204 ,  0.73821948,  0.74883161,  0.75934584,
+        0.76975124,  0.78003687,  0.79019179,  0.80020509,  0.81006592,
+        0.81976349,  0.82928706,  0.83862603,  0.84776989,  0.85670824,
+        0.86543087,  0.87392769,  0.88218883,  0.8902046 ,  0.89796551,
+        0.90546232,  0.91268605,  0.91962797,  0.92627962,  0.93263287,
+        0.93867988,  0.94441315,  0.94982552,  0.95491021,  0.95966079,
+        0.96407124,  0.96813594,  0.97184967,  0.97520769,  0.97820564,
+        0.98083967,  0.98310638,  0.98500283,  0.98652659,  0.98767574,
+        0.98844883,  0.98884498,  0.98886377,  0.98850537,  0.98777045,
+        0.98666024,  0.98517652,  0.98332159,  0.98109835,  0.97851023,
+        0.9755612 ,  0.97225582,  0.9685992 ,  0.96459698,  0.96025538,
+        0.95558116,  0.95058161,  0.94526459,  0.93963846,  0.93371212,
+        0.92749498,  0.92099698,  0.91422853,  0.90720053,  0.89992437,
+        0.89241189,  0.88467538,  0.87672756,  0.86858155,  0.86025091,
+        0.85174954,  0.84309173,  0.8342921 ,  0.8253656 ,  0.81632747,
+        0.80719325,  0.79797873,  0.78869994,  0.7793731 ,  0.77001465,
+        0.76064116,  0.75126936,  0.74191609,  0.73259824,  0.7233328 ,
+        0.71413676,  0.70502712,  0.69602085,  0.68713487,  0.678386  ,
+        0.66979095,  0.6613663 ,  0.65312845,  0.64509359,  0.63727769,
+        0.62969646,  0.6223653 ,  0.61529933,  0.60851328,  0.60202154,
+        0.59583809,  0.58997647,  0.58444976,  0.57927057,  0.574451  ,
+        0.5700026 ,  0.56593638,  0.56226276,  0.55899154,  0.55613191,
+        0.5536924 ,  0.55168088,  0.55010452,  0.54896977,  0.5482824 ,
+        0.54804739,  0.54826898,  0.54895066,  0.5500951 ,  0.55170422,
+        0.5537791 ,  0.55632002,  0.55932646,  0.56279704,  0.56672959,
+        0.57112109,  0.57596767,  0.58126467,  0.58700657,  0.59318702,
+        0.59979888,  0.60683417,  0.61428411,  0.62213911,  0.63038884,
+        0.63902214,  0.64802713,  0.65739119,  0.66710095,  0.67714237,
+        0.68750069,  0.69816052,  0.70910582,  0.72031994,  0.73178563,
+        0.74348511,  0.75540005,  0.76751163,  0.77980056,  0.79224712,
+        0.80483118,  0.81753226,  0.83032956,  0.84320195,  0.85612808,
+        0.86908639]),
+array([ 0.03921569,  0.14755333,  0.15484709,  0.15910372,  0.16221813,
+        0.164691  ,  0.16675074,  0.16852161,  0.17007905,  0.17147245,
+        0.17273603,  0.17389454,  0.17496647,  0.17596607,  0.17690456,
+        0.17779096,  0.17863263,  0.1794357 ,  0.1802053 ,  0.18094581,
+        0.18166101,  0.18235416,  0.18302812,  0.18368543,  0.18432833,
+        0.18495885,  0.18557879,  0.1861898 ,  0.1867934 ,  0.18739095,
+        0.18798372,  0.18857288,  0.18915953,  0.18974468,  0.19032928,
+        0.19091421,  0.19150034,  0.19208844,  0.19267928,  0.19327356,
+        0.19387199,  0.1944752 ,  0.19508383,  0.19569847,  0.19631971,
+        0.1969481 ,  0.19758419,  0.19822848,  0.1988815 ,  0.19954371,
+        0.20021562,  0.20089767,  0.20159031,  0.202294  ,  0.20300915,
+        0.2037362 ,  0.20447555,  0.20522761,  0.20599277,  0.20677142,
+        0.20756395,  0.20837073,  0.20919213,  0.21002852,  0.21088026,
+        0.21174769,  0.21263118,  0.21353106,  0.21444767,  0.21538136,
+        0.21633245,  0.21730128,  0.21828817,  0.21929344,  0.22031742,
+        0.22136043,  0.22242277,  0.22350476,  0.22460671,  0.22572894,
+        0.22687174,  0.22803542,  0.22922029,  0.23042665,  0.2316548 ,
+        0.23290505,  0.23417769,  0.23547304,  0.23679141,  0.23813309,
+        0.2394984 ,  0.24088767,  0.24230121,  0.24373936,  0.24520245,
+        0.24669084,  0.2482049 ,  0.24974502,  0.25131159,  0.25290506,
+        0.25452588,  0.25617456,  0.25785162,  0.25955767,  0.26129335,
+        0.26305936,  0.26485651,  0.26668567,  0.26854783,  0.27044409,
+        0.27237568,  0.27434399,  0.2763506 ,  0.27839725,  0.28048592,
+        0.28261885,  0.28479853,  0.28702776,  0.28930969,  0.29164781,
+        0.29404606,  0.29650878,  0.29904082,  0.30164751,  0.30433477,
+        0.30710909,  0.3099776 ,  0.31294808,  0.31602901,  0.31922957,
+        0.3225597 ,  0.32603009,  0.32965219,  0.33343823,  0.33740118,
+        0.34155475,  0.34591336,  0.35049209,  0.3553066 ,  0.36037309,
+        0.36570818,  0.37132881,  0.37725211,  0.38349527,  0.39007533,
+        0.39700904,  0.40431267,  0.41200173,  0.42009082,  0.42859331,
+        0.43752115,  0.44688456,  0.45669178,  0.46694881,  0.47765913,
+        0.48882341,  0.50043933,  0.51250128,  0.52500019,  0.53792329,
+        0.55125402,  0.56497186,  0.57905222,  0.59346649,  0.60818194,
+        0.62316185,  0.6383656 ,  0.65374881,  0.66926358,  0.68485877,
+        0.70048031,  0.71607158,  0.73157387,  0.7469268 ,  0.76206891,
+        0.77693813,  0.79147243,  0.80561039,  0.81929181,  0.83245833,
+        0.84505406,  0.85702617,  0.86832547,  0.87890699,  0.88873043,
+        0.89776066,  0.90596815,  0.91332926,  0.91982658,  0.92544909,
+        0.93019232,  0.93405839,  0.937056  ,  0.93920033,  0.94051286,
+        0.94102111,  0.94075835,  0.93976317,  0.93807909,  0.93575405,
+        0.93283987,  0.92939168,  0.92546733,  0.92112679,  0.91643153,
+        0.91144386,  0.9062264 ,  0.90084141,  0.89535028,  0.88981297,
+        0.88428753,  0.87882965,  0.87349223,  0.86832508,  0.86337457,
+        0.85868342,  0.8542905 ,  0.85023073,  0.84653492,  0.84322986,
+        0.84033823,  0.83787874,  0.83586622,  0.83431172,  0.83322277,
+        0.83260351,  0.83245497,  0.83277526,  0.83355988,  0.83480196,
+        0.83649253,  0.83862079,  0.84117439,  0.84413967,  0.84750191,
+        0.8512456 ,  0.85535461,  0.85981245,  0.86460243,  0.86970782,
+        0.87511204,  0.88079878,  0.88675211,  0.89295658,  0.89939731,
+        0.90606008,  0.91293132,  0.91999822,  0.92724869,  0.93467142,
+        0.94225589,  0.94999229,  0.9578716 ,  0.96588553,  0.97402646,
+        0.98228749]),
+array([ 1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,
+        1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,
+        1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,
+        1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,
+        1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,
+        1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,
+        1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,
+        1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,
+        1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,
+        1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,
+        1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,
+        1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,
+        1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,
+        1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,
+        1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,
+        1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,
+        1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,
+        1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,
+        1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,
+        1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.]),
+   )
+


--- a/yt/visualization/image_writer.py	Fri Jun 03 18:04:40 2011 -0700
+++ b/yt/visualization/image_writer.py	Wed Jun 08 08:15:25 2011 -0700
@@ -291,7 +291,8 @@
     return mapped.copy("C")
 
 def strip_colormap_data(fn = "color_map_data.py",
-            cmaps = ("jet", "algae", "hot", "gist_stern", "RdBu")):
+            cmaps = ("jet", "algae", "hot", "gist_stern", "RdBu",
+                     "kamae")):
     import pprint
     import color_maps as rcm
     f = open(fn, "w")


--- a/yt/visualization/volume_rendering/camera.py	Fri Jun 03 18:04:40 2011 -0700
+++ b/yt/visualization/volume_rendering/camera.py	Wed Jun 08 08:15:25 2011 -0700
@@ -497,7 +497,7 @@
         --------
 
         >>> for i, snapshot in enumerate(cam.rotation(na.pi, 10)):
-        ...     iw.write_bitmap(snapshot, "rotation_%04i.png" % i)
+        ...     iw.write_bitmap(snapshot, 'rotation_%04i.png' % i)
         """
 
         dtheta = (1.0*theta)/n_steps


http://bitbucket.org/yt_analysis/yt/changeset/7a5f50ea6a76/
changeset:   7a5f50ea6a76
branch:      deliberate_fields
user:        MatthewTurk
date:        2011-06-09 00:32:51
summary:     Moving unknown field setup into the base class.  Next up is actually modifying
all the frontends to know about KnownFields.
affected #:  2 files (-1 bytes)

--- a/yt/data_objects/hierarchy.py	Wed Jun 08 08:15:25 2011 -0700
+++ b/yt/data_objects/hierarchy.py	Wed Jun 08 15:32:51 2011 -0700
@@ -126,6 +126,34 @@
             self._add_object_class(name, cname, cls, dd)
         self.object_types.sort()
 
+    def _setup_unknown_fields(self):
+        known_fields = self.parameter_file._fieldinfo_known
+        for field in self.field_list:
+            if field in self.parameter_file.field_info:
+                ff = self.parameter_file.field_info[field]
+                # By allowing a backup, we don't mandate that it's found in our
+                # current field info.  This means we'll instead simply override
+                # it.
+                self.parameter_file.field_info.pop(field, None)
+            if field not in known_fields:
+                mylog.info("Adding unknown field %s to list of fields", field)
+                cf = None
+                if self.parameter_file.has_key(field):
+                    def external_wrapper(f):
+                        def _convert_function(data):
+                            return data.convert(f)
+                        return _convert_function
+                    cf = external_wrapper(field)
+                # Note that we call add_field on the field_info directly.  This
+                # will allow the same field detection mechanism to work for 1D, 2D
+                # and 3D fields.
+                self.pf.field_info.add_field(
+                        field, NullFunc,
+                        convert_function=cf, take_log=False, units=r"Unknown")
+            else:
+                mylog.info("Adding known field %s to list of fields", field)
+                self.parameter_file.field_info[field] = known_fields[field]
+            
     # Now all the object related stuff
 
     def all_data(self, find_max=False):


--- a/yt/frontends/enzo/data_structures.py	Wed Jun 08 08:15:25 2011 -0700
+++ b/yt/frontends/enzo/data_structures.py	Wed Jun 08 15:32:51 2011 -0700
@@ -408,35 +408,6 @@
         self.save_data(list(field_list),"/","DataFields",passthrough=True)
         self.field_list = list(field_list)
 
-    def _setup_unknown_fields(self):
-        for field in self.field_list:
-            if field in self.parameter_file.field_info:
-                ff = self.parameter_file.field_info[field]
-                if "lambda" in str(ff._function): continue
-                # By allowing a backup, we don't mandate that it's found in our
-                # current field info.  This means we'll instead simply override
-                # it.
-                self.parameter_file.field_info.pop(field, None)
-            if field not in KnownEnzoFields:
-                mylog.info("Adding unknown field %s to list of fields", field)
-                cf = None
-                if self.parameter_file.has_key(field):
-                    def external_wrapper(f):
-                        def _convert_function(data):
-                            return data.convert(f)
-                        return _convert_function
-                    cf = external_wrapper(field)
-                # Note that we call add_field on the field_info directly.  This
-                # will allow the same field detection mechanism to work for 1D, 2D
-                # and 3D fields.
-                self.pf.field_info.add_field(
-                        field, NullFunc,
-                        convert_function=cf, take_log=False, units=r"Unknown")
-            else:
-                mylog.info("Adding known field %s to list of fields", field)
-                self.parameter_file.field_info[field] = KnownEnzoFields[field]
-            
-
     def _setup_derived_fields(self):
         self.derived_field_list = []
         for field in self.parameter_file.field_info:
@@ -643,6 +614,7 @@
     """
     _hierarchy_class = EnzoHierarchy
     _fieldinfo_fallback = EnzoFieldInfo
+    _fieldinfo_known = KnownEnzoFields
     def __init__(self, filename, data_style=None,
                  parameter_override = None,
                  conversion_override = None,


http://bitbucket.org/yt_analysis/yt/changeset/96a142ba965f/
changeset:   96a142ba965f
branch:      deliberate_fields
user:        MatthewTurk
date:        2011-06-09 19:27:13
summary:     Converting all the data_structures files to use Known fields.
affected #:  11 files (-1 bytes)

--- a/yt/frontends/art/data_structures.py	Wed Jun 08 15:32:51 2011 -0700
+++ b/yt/frontends/art/data_structures.py	Thu Jun 09 10:27:13 2011 -0700
@@ -38,9 +38,9 @@
 from yt.data_objects.static_output import \
       StaticOutput
 from yt.data_objects.field_info_container import \
-    FieldInfoContainer
-from .fields import ARTFieldInfo, add_art_field
-from .fields import add_art_field
+    FieldInfoContainer, NullFunc
+from .fields import \
+    ARTFieldInfo, add_art_field, KnownARTFields
 from yt.utilities.definitions import \
     mpc_conversion
 from yt.utilities.io_handler import \
@@ -344,20 +344,6 @@
             g._setup_dx()
         self.max_level = self.grid_levels.max()
 
-    def _setup_unknown_fields(self):
-        for field in self.field_list:
-            if field in self.parameter_file.field_info: continue
-            mylog.info("Adding %s to list of fields", field)
-            cf = None
-            if self.parameter_file.has_key(field):
-                def external_wrapper(f):
-                    def _convert_function(data):
-                        return data.convert(f)
-                    return _convert_function
-                cf = external_wrapper(field)
-            add_art_field(field, lambda a, b: None,
-                      convert_function=cf, take_log=False)
-
     def _setup_derived_fields(self):
         self.derived_field_list = []
 
@@ -371,6 +357,7 @@
 class ARTStaticOutput(StaticOutput):
     _hierarchy_class = ARTHierarchy
     _fieldinfo_fallback = ARTFieldInfo
+    _fieldinfo_known = KnownARTFields
     _handle = None
     
     def __init__(self, filename, data_style='art',


--- a/yt/frontends/castro/data_structures.py	Wed Jun 08 15:32:51 2011 -0700
+++ b/yt/frontends/castro/data_structures.py	Thu Jun 09 10:27:13 2011 -0700
@@ -58,10 +58,9 @@
     boxlib_bool_to_int
 
 from yt.data_objects.field_info_container import \
-    FieldInfoContainer
+    FieldInfoContainer, NullFunc
 from .fields import \
-    CastroFieldInfo, \
-    add_castro_field
+    CastroFieldInfo, add_castro_field, KnownCastroFields
 
 
 class CastroGrid(AMRGridPatch):
@@ -458,21 +457,6 @@
     def _detect_fields(self):
         pass
 
-    def _setup_unknown_fields(self):
-        for field in self.field_list:
-            if field in self.parameter_file.field_info: continue
-            mylog.info("Adding %s to list of fields", field)
-            cf = None
-            if self.parameter_file.has_key(field):
-                def external_wrapper(f):
-                    def _convert_function(data):
-                        return data.convert(f)
-                    return _convert_function
-                cf = external_wrapper(field)
-            add_castro_field(field, lambda a, b: None,
-                      convert_function=cf, take_log=False)
-
-
     def _setup_derived_fields(self):
         pass
 
@@ -499,6 +483,7 @@
     """
     _hierarchy_class = CastroHierarchy
     _fieldinfo_fallback = CastroFieldInfo
+    _fieldinfo_known = KnownCastroFields
 
     def __init__(self, plotname, paramFilename=None, fparamFilename=None,
                  data_style='castro_native', paranoia=False,


--- a/yt/frontends/chombo/data_structures.py	Wed Jun 08 15:32:51 2011 -0700
+++ b/yt/frontends/chombo/data_structures.py	Thu Jun 09 10:27:13 2011 -0700
@@ -56,8 +56,8 @@
      parallel_root_only
 
 from yt.data_objects.field_info_container import \
-    FieldInfoContainer
-from .fields import ChomboFieldInfo
+    FieldInfoContainer, NullFunc
+from .fields import ChomboFieldInfo, KnownChomboFields
 
 class ChomboGrid(AMRGridPatch):
     _id_offset = 0
@@ -163,9 +163,6 @@
                 g1.Parent.append(g)
         self.max_level = self.grid_levels.max()
 
-    def _setup_unknown_fields(self):
-        pass
-
     def _setup_derived_fields(self):
         self.derived_field_list = []
 
@@ -178,6 +175,7 @@
 class ChomboStaticOutput(StaticOutput):
     _hierarchy_class = ChomboHierarchy
     _fieldinfo_fallback = ChomboFieldInfo
+    _fieldinfo_known = KnownChomboFields
     
     def __init__(self, filename, data_style='chombo_hdf5',
                  storage_filename = None, ini_filename = None):


--- a/yt/frontends/flash/data_structures.py	Wed Jun 08 15:32:51 2011 -0700
+++ b/yt/frontends/flash/data_structures.py	Thu Jun 09 10:27:13 2011 -0700
@@ -41,10 +41,9 @@
     io_registry
 
 from .fields import \
-    FLASHFieldInfo, \
-    add_flash_field
+    FLASHFieldInfo, add_flash_field, KnownFLASHFields
 from yt.data_objects.field_info_container import \
-    FieldInfoContainer
+    FieldInfoContainer, NullFunc
 
 class FLASHGrid(AMRGridPatch):
     _id_offset = 1
@@ -147,22 +146,6 @@
             g._setup_dx()
         self.max_level = self.grid_levels.max()
 
-    def _setup_unknown_fields(self):
-        for field in self.field_list:
-            if field in self.parameter_file.field_info: continue
-            pfield = field.startswith("particle_")
-            mylog.info("Adding %s to list of fields", field)
-            cf = None
-            if self.parameter_file.has_key(field):
-                def external_wrapper(f):
-                    def _convert_function(data):
-                        return data.convert(f)
-                    return _convert_function
-                cf = external_wrapper(field)
-            add_field(field, lambda a, b: None,
-                      convert_function=cf, take_log=False,
-                      particle_type=pfield)
-
     def _setup_derived_fields(self):
         self.derived_field_list = []
         for field in self.parameter_file.field_info:
@@ -183,6 +166,7 @@
 class FLASHStaticOutput(StaticOutput):
     _hierarchy_class = FLASHHierarchy
     _fieldinfo_fallback = FLASHFieldInfo
+    _fieldinfo_known = KnownFLASHFields
     _handle = None
     
     def __init__(self, filename, data_style='flash_hdf5',


--- a/yt/frontends/gadget/data_structures.py	Wed Jun 08 15:32:51 2011 -0700
+++ b/yt/frontends/gadget/data_structures.py	Thu Jun 09 10:27:13 2011 -0700
@@ -37,9 +37,9 @@
 from yt.data_objects.static_output import \
     StaticOutput
 
-from .fields import GadgetFieldInfo
+from .fields import GadgetFieldInfo, KnownGadgetFields
 from yt.data_objects.field_info_container import \
-    FieldInfoContainer
+    FieldInfoContainer, NullFunc
 
 class GadgetGrid(AMRGridPatch):
     _id_offset = 0
@@ -137,16 +137,14 @@
             g._prepare_grid()
             g._setup_dx()
             
-        
-    def _setup_unknown_fields(self):
-        pass
-
     def _setup_derived_fields(self):
         self.derived_field_list = []
 
 class GadgetStaticOutput(StaticOutput):
     _hierarchy_class = GadgetHierarchy
-    _fieldinfo_fallback = GadgetFieldContainer
+    _fieldinfo_fallback = GadgetFieldInfo
+    _fieldinfo_known = KnownGadgetFields
+
     def __init__(self, filename,storage_filename=None) :
         self.storage_filename = storage_filename
         self.field_info = FieldInfoContainer.create_with_fallback(


--- a/yt/frontends/gdf/data_structures.py	Wed Jun 08 15:32:51 2011 -0700
+++ b/yt/frontends/gdf/data_structures.py	Thu Jun 09 10:27:13 2011 -0700
@@ -32,9 +32,9 @@
 from yt.data_objects.static_output import \
            StaticOutput
 
-from .fields import GDFFieldContainer
+from .fields import GDFFieldContainer, KnownGDFFields
 from yt.data_objects.field_info_container import \
-    FieldInfoContainer
+    FieldInfoContainer, NullFunc
 
 class GDFGrid(AMRGridPatch):
     _id_offset = 0
@@ -130,9 +130,6 @@
                 g1.Parent.append(g)
         self.max_level = self.grid_levels.max()
 
-    def _setup_unknown_fields(self):
-        pass
-
     def _setup_derived_fields(self):
         self.derived_field_list = []
 
@@ -145,6 +142,7 @@
 class GDFStaticOutput(StaticOutput):
     _hierarchy_class = GDFHierarchy
     _fieldinfo_fallback = GDFFieldContainer
+    _fieldinfo_known = GDFKnownFields
     
     def __init__(self, filename, data_style='grid_data_format',
                  storage_filename = None):


--- a/yt/frontends/maestro/data_structures.py	Wed Jun 08 15:32:51 2011 -0700
+++ b/yt/frontends/maestro/data_structures.py	Thu Jun 09 10:27:13 2011 -0700
@@ -55,10 +55,11 @@
     maestro_FAB_header_pattern
 
 from yt.data_objects.field_info_container import \
-    FieldInfoContainer
+    FieldInfoContainer, NullFunc
 from .fields import \
     MaestroFieldInfo, \
-    add_maestro_field
+    add_maestro_field, \
+    KnownMaestroFields
 
 
 class MaestroGrid(AMRGridPatch):
@@ -393,21 +394,6 @@
     def _detect_fields(self):
         pass
 
-    def _setup_unknown_fields(self):
-        for field in self.field_list:
-            if field in self.parameter_file.field_info: continue
-            mylog.info("Adding %s to list of fields", field)
-            cf = None
-            if self.parameter_file.has_key(field):
-                def external_wrapper(f):
-                    def _convert_function(data):
-                        return data.convert(f)
-                    return _convert_function
-                cf = external_wrapper(field)
-            add_field(field, lambda a, b: None,
-                      convert_function=cf, take_log=False)
-
-
     def _setup_derived_fields(self):
         pass
 
@@ -434,6 +420,7 @@
     """
     _hierarchy_class = MaestroHierarchy
     _fieldinfo_fallback = MaestroFieldInfo
+    _fieldinfo_known = KnownMaestroFields
 
     def __init__(self, plotname, paramFilename=None, 
                  data_style='maestro', paranoia=False,


--- a/yt/frontends/orion/data_structures.py	Wed Jun 08 15:32:51 2011 -0700
+++ b/yt/frontends/orion/data_structures.py	Thu Jun 09 10:27:13 2011 -0700
@@ -56,10 +56,11 @@
 
 from .fields import \
     OrionFieldInfo, \
-    add_orion_field
+    add_orion_field, \
+    KnownOrionFields
 
 from yt.data_objects.field_info_container import \
-    FieldInfoContainer
+    FieldInfoContainer, NullFunc
 
 
 class OrionGrid(AMRGridPatch):
@@ -402,21 +403,6 @@
     def _detect_fields(self):
         pass
 
-    def _setup_unknown_fields(self):
-        for field in self.field_list:
-            if field in self.parameter_file.field_info: continue
-            mylog.info("Adding %s to list of fields", field)
-            cf = None
-            if self.parameter_file.has_key(field):
-                def external_wrapper(f):
-                    def _convert_function(data):
-                        return data.convert(f)
-                    return _convert_function
-                cf = external_wrapper(field)
-            add_orion_field(field, lambda a, b: None,
-                      convert_function=cf, take_log=False)
-
-
     def _setup_derived_fields(self):
         pass
 
@@ -443,6 +429,7 @@
     """
     _hierarchy_class = OrionHierarchy
     _fieldinfo_fallback = OrionFieldInfo
+    _fieldinfo_known = KnownOrionFields
 
     def __init__(self, plotname, paramFilename=None, fparamFilename=None,
                  data_style='orion_native', paranoia=False,


--- a/yt/frontends/ramses/data_structures.py	Wed Jun 08 15:32:51 2011 -0700
+++ b/yt/frontends/ramses/data_structures.py	Thu Jun 09 10:27:13 2011 -0700
@@ -35,7 +35,7 @@
 from yt.data_objects.static_output import \
       StaticOutput
 import _ramses_reader
-from .fields import RAMSESFieldInfo
+from .fields import RAMSESFieldInfo, KnownRAMSESFields
 from yt.utilities.definitions import \
     mpc_conversion
 from yt.utilities.amr_utils import \
@@ -43,7 +43,7 @@
 from yt.utilities.io_handler import \
     io_registry
 from yt.data_objects.field_info_container import \
-    FieldInfoContainer
+    FieldInfoContainer, NullFunc
 
 def num_deep_inc(f):
     def wrap(self, *args, **kwargs):
@@ -299,20 +299,6 @@
             g._setup_dx()
         self.max_level = self.grid_levels.max()
 
-    def _setup_unknown_fields(self):
-        for field in self.field_list:
-            if field in self.parameter_file.field_info: continue
-            mylog.info("Adding %s to list of fields", field)
-            cf = None
-            if self.parameter_file.has_key(field):
-                def external_wrapper(f):
-                    def _convert_function(data):
-                        return data.convert(f)
-                    return _convert_function
-                cf = external_wrapper(field)
-            add_field(field, lambda a, b: None,
-                      convert_function=cf, take_log=False)
-
     def _setup_derived_fields(self):
         self.derived_field_list = []
 
@@ -322,6 +308,7 @@
 class RAMSESStaticOutput(StaticOutput):
     _hierarchy_class = RAMSESHierarchy
     _fieldinfo_fallback = RAMSESFieldInfo
+    _fieldinfo_known = KnownRAMSESFields
     _handle = None
     
     def __init__(self, filename, data_style='ramses',


--- a/yt/frontends/stream/data_structures.py	Wed Jun 08 15:32:51 2011 -0700
+++ b/yt/frontends/stream/data_structures.py	Thu Jun 09 10:27:13 2011 -0700
@@ -36,10 +36,13 @@
 from yt.data_objects.static_output import \
     StaticOutput
 from yt.utilities.logger import ytLogger as mylog
+from yt.data_objects.field_info_container import \
+    FieldInfoContainer, NullFunc
 
 from .fields import \
     StreamFieldContainer, \
-    add_stream_field
+    add_stream_field, \
+    KnownStreamFields
 
 class StreamGrid(AMRGridPatch):
     """
@@ -216,6 +219,7 @@
 class StreamStaticOutput(StaticOutput):
     _hierarchy_class = StreamHierarchy
     _fieldinfo_class = StreamFieldContainer
+    _fieldinfo_known = KnownStreamFields
     _data_style = 'stream'
 
     def __init__(self, stream_handler):


--- a/yt/frontends/tiger/data_structures.py	Wed Jun 08 15:32:51 2011 -0700
+++ b/yt/frontends/tiger/data_structures.py	Thu Jun 09 10:27:13 2011 -0700
@@ -31,7 +31,9 @@
 from yt.data_objects.static_output import \
            StaticOutput
 
-from .fields import TigerFieldInfo
+from yt.data_objects.field_info_container import \
+    FieldInfoContainer, NullFunc
+from .fields import TigerFieldInfo, KnownTigerFields
 
 class TigerGrid(AMRGridPatch):
     _id_offset = 0
@@ -125,16 +127,13 @@
     def field_list(self):
         return self.file_mapping.keys()
 
-    def _setup_unknown_fields(self):
-        for field in self.field_list:
-            add_tiger_field(field, lambda a, b: None)
-
     def _setup_derived_fields(self):
         self.derived_field_list = []
 
 class TigerStaticOutput(StaticOutput):
     _hierarchy_class = TigerHierarchy
     _fieldinfo_fallback = TigerFieldInfo
+    _fieldinfo_known = KnownTigerFields
 
     def __init__(self, rhobname, root_size, max_grid_size=128,
                  data_style='tiger', storage_filename = None):


http://bitbucket.org/yt_analysis/yt/changeset/2ea9f36a8871/
changeset:   2ea9f36a8871
branch:      deliberate_fields
user:        MatthewTurk
date:        2011-06-10 19:58:17
summary:     Continuing to convert things to two-level field detection.  Added a
TranslationFunc as well.
affected #:  4 files (-1 bytes)

--- a/yt/data_objects/field_info_container.py	Thu Jun 09 10:27:13 2011 -0700
+++ b/yt/data_objects/field_info_container.py	Fri Jun 10 10:58:17 2011 -0700
@@ -71,6 +71,11 @@
         obj.fallback = fallback
         return obj
 
+def TranslationFunc(field_name):
+    def _TranslationFunc(field, data):
+        return data[field]
+    return _TranslationFunc
+
 def NullFunc(field, data):
     return
 


--- a/yt/frontends/art/fields.py	Thu Jun 09 10:27:13 2011 -0700
+++ b/yt/frontends/art/fields.py	Fri Jun 10 10:58:17 2011 -0700
@@ -26,6 +26,8 @@
 from yt.data_objects.field_info_container import \
     FieldInfoContainer, \
     FieldInfo, \
+    NullFunc, \
+    TranslationFunc, \
     ValidateParameter, \
     ValidateDataField, \
     ValidateProperty, \
@@ -36,9 +38,10 @@
     boltzmann_constant_cgs, mass_hydrogen_cgs
 
 ARTFieldInfo = FieldInfoContainer.create_with_fallback(FieldInfo)
-add_art_field = ARTFieldInfo.add_field
+add_field = ARTFieldInfo.add_field
 
-add_field = add_art_field
+KnownARTFields = FieldInfoContainer()
+add_art_field = KnownARTFields.add_field
 
 translation_dict = {"Density":"density",
                     "TotalEnergy":"TotalEnergy",
@@ -51,32 +54,29 @@
                    }
 
 def _generate_translation(mine, theirs):
-    add_field(theirs, function=lambda a, b: b[mine], take_log=True)
 
 for f,v in translation_dict.items():
-    if v not in ARTFieldInfo:
-        add_field(v, function=lambda a,b: None, take_log=False,
+    add_art_field(v, function=NullFunc, take_log=False,
                   validators = [ValidateDataField(v)])
-    #print "Setting up translator from %s to %s" % (v, f)
-    _generate_translation(v, f)
+    add_art_field(f, function=TranslationFunc(v), take_log=True)
 
 #def _convertMetallicity(data):
 #    return data.convert("Metal_Density1")
-#ARTFieldInfo["Metal_Density1"]._units = r"1"
-#ARTFieldInfo["Metal_Density1"]._projected_units = r"1"
-#ARTFieldInfo["Metal_Density1"]._convert_function=_convertMetallicity
+#KnownARTFields["Metal_Density1"]._units = r"1"
+#KnownARTFields["Metal_Density1"]._projected_units = r"1"
+#KnownARTFields["Metal_Density1"]._convert_function=_convertMetallicity
 
 
 def _convertDensity(data):
     return data.convert("Density")
-ARTFieldInfo["Density"]._units = r"\rm{g}/\rm{cm}^3"
-ARTFieldInfo["Density"]._projected_units = r"\rm{g}/\rm{cm}^2"
-ARTFieldInfo["Density"]._convert_function=_convertDensity
+KnownARTFields["Density"]._units = r"\rm{g}/\rm{cm}^3"
+KnownARTFields["Density"]._projected_units = r"\rm{g}/\rm{cm}^2"
+KnownARTFields["Density"]._convert_function=_convertDensity
 
 def _convertEnergy(data):
     return data.convert("GasEnergy")
-ARTFieldInfo["GasEnergy"]._units = r"\rm{ergs}/\rm{g}"
-ARTFieldInfo["GasEnergy"]._convert_function=_convertEnergy
+KnownARTFields["GasEnergy"]._units = r"\rm{ergs}/\rm{g}"
+KnownARTFields["GasEnergy"]._convert_function=_convertEnergy
 
 def _Temperature(field, data):
     tr  = data["GasEnergy"] / data["Density"]
@@ -85,9 +85,9 @@
     return tr
 def _convertTemperature(data):
     return data.convert("Temperature")
-add_field("Temperature", function=_Temperature, units = r"\mathrm{K}")
-ARTFieldInfo["Temperature"]._units = r"\mathrm{K}"
-ARTFieldInfo["Temperature"]._convert_function=_convertTemperature
+add_art_field("Temperature", function=_Temperature, units = r"\mathrm{K}")
+KnownARTFields["Temperature"]._units = r"\mathrm{K}"
+KnownARTFields["Temperature"]._convert_function=_convertTemperature
 
 def _MetallicitySNII(field, data):
     #get the dimensionless mass fraction
@@ -95,8 +95,8 @@
     tr *= data.pf.conversion_factors["Density"]    
     return tr
     
-add_field("MetallicitySNII", function=_MetallicitySNII, units = r"\mathrm{K}")
-ARTFieldInfo["MetallicitySNII"]._units = r"\mathrm{K}"
+add_art_field("MetallicitySNII", function=_MetallicitySNII, units = r"\mathrm{K}")
+KnownARTFields["MetallicitySNII"]._units = r"\mathrm{K}"
 
 def _MetallicitySNIa(field, data):
     #get the dimensionless mass fraction
@@ -104,8 +104,8 @@
     tr *= data.pf.conversion_factors["Density"]    
     return tr
     
-add_field("MetallicitySNIa", function=_MetallicitySNIa, units = r"\mathrm{K}")
-ARTFieldInfo["MetallicitySNIa"]._units = r"\mathrm{K}"
+add_art_field("MetallicitySNIa", function=_MetallicitySNIa, units = r"\mathrm{K}")
+KnownARTFields["MetallicitySNIa"]._units = r"\mathrm{K}"
 
 def _Metallicity(field, data):
     #get the dimensionless mass fraction of the total metals
@@ -114,14 +114,14 @@
     tr *= data.pf.conversion_factors["Density"]    
     return tr
     
-add_field("Metallicity", function=_Metallicity, units = r"\mathrm{K}")
-ARTFieldInfo["Metallicity"]._units = r"\mathrm{K}"
+add_art_field("Metallicity", function=_Metallicity, units = r"\mathrm{K}")
+KnownARTFields["Metallicity"]._units = r"\mathrm{K}"
 
 def _Metal_Density(field,data):
     return data["Metal_DensitySNII"]+data["Metal_DensitySNIa"]
 def _convert_Metal_Density(data):
     return data.convert("Metal_Density")
 
-add_field("Metal_Density", function=_Metal_Density, units = r"\mathrm{K}")
-ARTFieldInfo["Metal_Density"]._units = r"\mathrm{K}"
-ARTFieldInfo["Metal_Density"]._convert_function=_convert_Metal_Density
+add_art_field("Metal_Density", function=_Metal_Density, units = r"\mathrm{K}")
+KnownARTFields["Metal_Density"]._units = r"\mathrm{K}"
+KnownARTFields["Metal_Density"]._convert_function=_convert_Metal_Density


--- a/yt/frontends/castro/fields.py	Thu Jun 09 10:27:13 2011 -0700
+++ b/yt/frontends/castro/fields.py	Fri Jun 10 10:58:17 2011 -0700
@@ -27,6 +27,8 @@
 from yt.data_objects.field_info_container import \
     FieldInfoContainer, \
     FieldInfo, \
+    NullFunc, \
+    TranslationFunc, \
     ValidateParameter, \
     ValidateDataField, \
     ValidateProperty, \
@@ -35,32 +37,31 @@
 import yt.data_objects.universal_fields
 
 CastroFieldInfo = FieldInfoContainer.create_with_fallback(FieldInfo)
-add_castro_field = CastroFieldInfo.add_field
+add_field = CastroFieldInfo.add_field
 
-
-add_field = add_castro_field
+KnownCastroFields = FieldInfoContainer()
+add_castro_field = KnownCastroFields.add_field
 
 # def _convertDensity(data):
 #     return data.convert("Density")
-add_field("density", function=lambda a, b: None, take_log=True,
-          validators = [ValidateDataField("density")],
-          units=r"\rm{g}/\rm{cm}^3")
+add_castro_field("density", function=NullFunc, take_log=True,
+          units=r"\rm{g}/\rm{cm}^3",
 CastroFieldInfo["density"]._projected_units =r"\rm{g}/\rm{cm}^2"
 #CastroFieldInfo["density"]._convert_function=_convertDensity
 
-add_field("eden", function=lambda a, b: None, take_log=True,
+add_castro_field("eden", function=NullFunc, take_log=True,
           validators = [ValidateDataField("eden")],
           units=r"\rm{erg}/\rm{cm}^3")
 
-add_field("xmom", function=lambda a, b: None, take_log=False,
+add_castro_field("xmom", function=NullFunc, take_log=False,
           validators = [ValidateDataField("xmom")],
           units=r"\rm{g}/\rm{cm^2\ s}")
 
-add_field("ymom", function=lambda a, b: None, take_log=False,
+add_castro_field("ymom", function=NullFunc, take_log=False,
           validators = [ValidateDataField("ymom")],
           units=r"\rm{gm}/\rm{cm^2\ s}")
 
-add_field("zmom", function=lambda a, b: None, take_log=False,
+add_castro_field("zmom", function=NullFunc, take_log=False,
           validators = [ValidateDataField("zmom")],
           units=r"\rm{g}/\rm{cm^2\ s}")
 
@@ -75,16 +76,11 @@
                     "z-momentum": "zmom"
                    }
 
-def _generate_translation(mine, theirs):
-    add_field(theirs, function=lambda a, b: b[mine], take_log=True)
+for f, v in translation_dict.items():
+    add_field(theirs, function=TranslationFunc(mine),
+              take_log=KnownCastroFields[theirs].take_log)
 
-for f, v in translation_dict.items():
-    if v not in CastroFieldInfo:
-        add_field(v, function=lambda a, b: None, take_log=False,
-                  validators = [ValidateDataField(v)])
-    #print "Setting up translator from %s to %s" % (v, f)
-    _generate_translation(v, f)
-
+# Now fallbacks, in case these fields are not output
 def _xVelocity(field, data):
     """generate x-velocity from x-momentum and density
 
@@ -97,9 +93,6 @@
     """generate y-velocity from y-momentum and density
 
     """
-    #try:
-    #    return data["xvel"]
-    #except KeyError:
     return data["ymom"]/data["density"]
 add_field("y-velocity", function=_yVelocity, take_log=False,
           units=r'\rm{cm}/\rm{s}')
@@ -147,3 +140,27 @@
           particle_type=True, convert_function=_convertParticleMassMsun,
           particle_convert_function=_ParticleMassMsun)
 
+# Fundamental fields that are usually/always output:
+#   density
+#   xmom
+#   ymom
+#   zmom
+#   rho_E
+#   rho_e
+#   Temp
+#
+# "Derived" fields that are sometimes output:
+#   x_velocity
+#   y_velocity
+#   z_velocity
+#   magvel
+#   grav_x
+#   grav_y
+#   grav_z
+#   maggrav
+#   magvort
+#   pressure
+#   entropy
+#   divu
+#   eint_e (e as derived from the "rho e" variable)
+#   eint_E (e as derived from the "rho E" variable)


--- a/yt/frontends/enzo/fields.py	Thu Jun 09 10:27:13 2011 -0700
+++ b/yt/frontends/enzo/fields.py	Fri Jun 10 10:58:17 2011 -0700
@@ -28,6 +28,7 @@
 from yt.data_objects.field_info_container import \
     FieldInfoContainer, \
     NullFunc, \
+    TranslationFunc, \
     FieldInfo, \
     ValidateParameter, \
     ValidateDataField, \


http://bitbucket.org/yt_analysis/yt/changeset/3320e23e3a29/
changeset:   3320e23e3a29
branch:      deliberate_fields
user:        MatthewTurk
date:        2011-06-16 23:28:01
summary:     Converting some chombo fields
affected #:  1 file (-1 bytes)

--- a/yt/frontends/chombo/fields.py	Fri Jun 10 10:58:17 2011 -0700
+++ b/yt/frontends/chombo/fields.py	Thu Jun 16 14:28:01 2011 -0700
@@ -38,38 +38,38 @@
 
 add_field = add_chombo_field
 
-add_field("density", function=lambda a,b: None, take_log=True,
+add_field("density", function=NullFunc, take_log=True,
           validators = [ValidateDataField("density")],
           units=r"\rm{g}/\rm{cm}^3")
 
 ChomboFieldInfo["density"]._projected_units =r"\rm{g}/\rm{cm}^2"
 
-add_field("X-momentum", function=lambda a,b: None, take_log=False,
+add_field("X-momentum", function=NullFunc, take_log=False,
           validators = [ValidateDataField("X-Momentum")],
           units=r"",display_name=r"B_x")
 ChomboFieldInfo["X-momentum"]._projected_units=r""
 
-add_field("Y-momentum", function=lambda a,b: None, take_log=False,
+add_field("Y-momentum", function=NullFunc, take_log=False,
           validators = [ValidateDataField("Y-Momentum")],
           units=r"",display_name=r"B_y")
 ChomboFieldInfo["Y-momentum"]._projected_units=r""
 
-add_field("Z-momentum", function=lambda a,b: None, take_log=False,
+add_field("Z-momentum", function=NullFunc, take_log=False,
           validators = [ValidateDataField("Z-Momentum")],
           units=r"",display_name=r"B_z")
 ChomboFieldInfo["Z-momentum"]._projected_units=r""
 
-add_field("X-magnfield", function=lambda a,b: None, take_log=False,
+add_field("X-magnfield", function=NullFunc, take_log=False,
           validators = [ValidateDataField("X-Magnfield")],
           units=r"",display_name=r"B_x")
 ChomboFieldInfo["X-magnfield"]._projected_units=r""
 
-add_field("Y-magnfield", function=lambda a,b: None, take_log=False,
+add_field("Y-magnfield", function=NullFunc, take_log=False,
           validators = [ValidateDataField("Y-Magnfield")],
           units=r"",display_name=r"B_y")
 ChomboFieldInfo["Y-magnfield"]._projected_units=r""
 
-add_field("Z-magnfield", function=lambda a,b: None, take_log=False,
+add_field("Z-magnfield", function=NullFunc, take_log=False,
           validators = [ValidateDataField("Z-Magnfield")],
           units=r"",display_name=r"B_z")
 ChomboFieldInfo["Z-magnfield"]._projected_units=r""


http://bitbucket.org/yt_analysis/yt/changeset/b4c92eb1db78/
changeset:   b4c92eb1db78
branch:      deliberate_fields
user:        MatthewTurk
date:        2011-06-16 23:29:11
summary:     Merge
affected #:  45 files (-1 bytes)

--- a/doc/activate	Thu Jun 16 14:28:01 2011 -0700
+++ b/doc/activate	Thu Jun 16 14:29:11 2011 -0700
@@ -74,7 +74,7 @@
 export PYTHONPATH
 
 _OLD_VIRTUAL_LD_LIBRARY_PATH="$LD_LIBRARY_PATH"
-LD_LIBRARY_PATH="$VIRTUAL_ENV/lib"
+LD_LIBRARY_PATH="$VIRTUAL_ENV/lib:$LD_LIBRARY_PATH"
 export LD_LIBRARY_PATH
 ### End extra env vars for yt
 


--- a/yt/analysis_modules/halo_profiler/multi_halo_profiler.py	Thu Jun 16 14:28:01 2011 -0700
+++ b/yt/analysis_modules/halo_profiler/multi_halo_profiler.py	Thu Jun 16 14:29:11 2011 -0700
@@ -566,7 +566,7 @@
                                                     antialias=False)
                         dataset_name = "%s_%s" % (hp['field'], hp['weight_field'])
                         if save_cube:
-                            if dataset_name in output.listnames(): del output[dataset_name]
+                            if dataset_name in output: del output[dataset_name]
                             output.create_dataset(dataset_name, data=frb[hp['field']])
                         if save_images:
                             filename = "%s/Halo_%04d_%s_%s.png" % (my_output_dir, halo['id'], 


--- a/yt/analysis_modules/level_sets/api.py	Thu Jun 16 14:28:01 2011 -0700
+++ b/yt/analysis_modules/level_sets/api.py	Thu Jun 16 14:29:11 2011 -0700
@@ -29,7 +29,6 @@
 """
 
 from .contour_finder import \
-    GridConsiderationQueue, \
     coalesce_join_tree, \
     identify_contours
 


--- a/yt/analysis_modules/level_sets/contour_finder.py	Thu Jun 16 14:28:01 2011 -0700
+++ b/yt/analysis_modules/level_sets/contour_finder.py	Thu Jun 16 14:29:11 2011 -0700
@@ -30,202 +30,6 @@
 import yt.utilities.data_point_utilities as data_point_utilities
 import yt.utilities.amr_utils as amr_utils
 
-class GridConsiderationQueue:
-    def __init__(self, white_list, priority_func=None):
-        """
-        This class exists to serve the contour finder.  It ensures that
-        we can create a cascading set of queue dependencies, and if
-        a grid is touched again ahead of time we can bump it to the top
-        of the queue again.  It like has few uses.
-        """
-        self.to_consider = []
-        self.considered = set()
-        self.n = 0
-        self.white_list = set(white_list)
-        self.priority_func = priority_func
-
-    def add(self, grids, force=False):
-        if not hasattr(grids,'size'):
-            grids = ensure_list(grids)
-        i = self.n
-        to_check = self.white_list.intersection(grids)
-        if not force: to_check.difference_update(self.considered)
-        for g in sorted(to_check, key=self.priority_func):
-            try:
-                # We only delete from subsequent checks
-                del self.to_consider[self.to_consider.index(g, i)]
-                self.to_consider.insert(i,g)
-                i += 1
-            except ValueError:
-                self.to_consider.append(g)
-
-    def __iter__(self):
-        return self
-
-    def next(self):
-        if self.n >= len(self.to_consider):
-            raise StopIteration
-        tr = self.to_consider[self.n]
-        self.considered.add(tr)
-        self.n += 1
-        return tr
-
-    def progress(self):
-        return self.n, len(self.to_consider)
-
-# We want an algorithm that deals with growing a given contour to *all* the
-# cells in a grid.
-
-def old_identify_contours(data_source, field, min_val, max_val, cached_fields=None):
-    """
-    Given a *data_source*, we will search for topologically connected sets
-    in *field* between *min_val* and *max_val*.
-    """
-    if cached_fields is None: cached_fields = defaultdict(lambda: dict())
-    maxn_cells = na.sum([g.ActiveDimensions.prod() for g in data_source._grids])
-    contour_ind = na.where( (data_source[field] > min_val)
-                          & (data_source[field] < max_val))[0]
-    np = contour_ind.size
-    if np == 0:
-        return {}
-    cur_max_id = maxn_cells - np
-    contour_ids = na.arange(maxn_cells, cur_max_id, -1) + 1 # Minimum of 1
-    data_source["tempContours"] = na.ones(data_source[field].shape, dtype='int32')*-1
-    mylog.info("Contouring over %s cells with %s candidates", contour_ids[0],np)
-    data_source["tempContours"][contour_ind] = contour_ids[:]
-    data_source._flush_data_to_grids("tempContours", -1, dtype='int32')
-    my_queue = GridConsiderationQueue(white_list = data_source._grids,
-                    priority_func = lambda g: -1*g["tempContours"].max())
-    my_queue.add(data_source._grids)
-    for i,grid in enumerate(my_queue):
-        mylog.info("Examining %s of %s", *my_queue.progress())
-        max_before = grid["tempContours"].max()
-        to_get = ["tempContours"]
-        if field in cached_fields[grid.id] and \
-            not na.any( (cached_fields[grid.id][field] > min_val)
-                      & (cached_fields[grid.id][field] < max_val)):
-            continue
-        for f in [field, "GridIndices"]:
-            if f not in cached_fields[grid.id]: to_get.append(f)
-        cg = grid.retrieve_ghost_zones(1,to_get)
-        for f in [field, "GridIndices"]:
-            if f in cached_fields[grid.id]:
-                cg.data[f] = cached_fields[grid.id][f]
-            else:
-                cached_fields[grid.id][f] = cg[f] 
-        local_ind = na.where( (cg[field] > min_val)
-                            & (cg[field] < max_val)
-                            & (cg["tempContours"] == -1) )
-        if local_ind[0].size > 0:
-            kk = na.arange(cur_max_id, cur_max_id-local_ind[0].size, -1)
-            cg["tempContours"][local_ind] = kk[:]
-            cur_max_id -= local_ind[0].size
-        fd = cg["tempContours"].astype('int64')
-        fd_original = fd.copy()
-        if na.all(fd > -1):
-            fd[:] = fd.max()
-        else:
-            xi_u,yi_u,zi_u = na.where(fd > -1)
-            cor_order = na.argsort(-1*fd[(xi_u,yi_u,zi_u)])
-            xi = xi_u[cor_order]
-            yi = yi_u[cor_order]
-            zi = zi_u[cor_order]
-            while data_point_utilities.FindContours(fd, xi, yi, zi) < 0: pass
-        cg["tempContours"] = fd.copy().astype('float64')
-        cg.flush_data("tempContours")
-        my_queue.add(cg._grids)
-        force_ind = na.unique(cg["GridIndices"][na.where(
-            (cg["tempContours"] > fd_original)
-          & (cg["GridIndices"] != grid.id-grid._id_offset) )])
-        if len(force_ind) > 0:
-            my_queue.add(data_source.hierarchy.grids[force_ind.astype('int32')], force=True)
-        for ax in 'xyz':
-            if not iterable(grid['d%s'%ax]):
-                grid['d%s'%ax] = grid['d%s'%ax]*na.ones(grid.ActiveDimensions)
-    del data_source.data["tempContours"] # Force a reload from the grids
-    data_source.get_data("tempContours", in_grids=True)
-    i = 0
-    contour_ind = {}
-    for contour_id in na.unique(data_source["tempContours"]):
-        if contour_id == -1: continue
-        contour_ind[i] = na.where(data_source["tempContours"] == contour_id)
-        mylog.debug("Contour id %s has %s cells", i, contour_ind[i][0].size)
-        i += 1
-    mylog.info("Identified %s contours between %0.5e and %0.5e",
-               len(contour_ind.keys()),min_val,max_val)
-    for grid in chain(data_source._grids, cg._grids):
-        grid.data.pop("tempContours", None)
-    del data_source.data["tempContours"]
-    return contour_ind
-
-def check_neighbors(data_object, field="Contours"):
-    """
-    This method is a means of error checking in the contour finder.
-    """
-    n_bad = na.zeros(1, dtype='int32')
-    for cid in na.unique(data_object[field]):
-        if cid == -1: continue
-        ids = na.where(data_object[field] == cid)
-        mx = data_object['x'][ids].copy()
-        my = data_object['y'][ids].copy()
-        mz = data_object['z'][ids].copy()
-        mdx = data_object['dx'][ids].copy()
-        mdy = data_object['dy'][ids].copy()
-        mdz = data_object['dz'][ids].copy()
-        grid_ids_m = data_object['GridIndices'][ids].copy()
-        grid_levels_m = data_object['GridLevel'][ids].copy()
-        mp = mx.size
-        ids = na.where( (data_object[field] != cid)
-                      & (data_object[field] >=  0 ))
-        nx = data_object['x'][ids].copy()
-        ny = data_object['y'][ids].copy()
-        nz = data_object['z'][ids].copy()
-        ndx = data_object['dx'][ids].copy()
-        ndy = data_object['dy'][ids].copy()
-        ndz = data_object['dz'][ids].copy()
-        grid_ids_n = data_object['GridIndices'][ids].copy()
-        grid_levels_n = data_object['GridLevel'][ids].copy()
-        np = nx.size
-        weave.inline(check_cell_distance,
-                   ['mx','my','mz','mdx','mdy','mdz','mp',
-                    'nx','ny','nz','ndx','ndy','ndz','np','n_bad',
-                    'grid_ids_m', 'grid_levels_m', 'grid_ids_n', 'grid_levels_n'],
-                    compiler='gcc', type_converters=converters.blitz,
-                    auto_downcast=0, verbose=2)
-    return n_bad[0]
-
-check_cell_distance = \
-r"""
-using namespace std;
-int i, j, k;
-long double cell_dist, rad_m, rad_n;
-k=0;
-for(i=0;i<mp;i++){
-  for(j=0;j<np;j++){
-    /*
-   cell_dist = sqrtl(pow(mx(i)-nx(j),2) +
-                     pow(my(i)-ny(j),2) +
-                     pow(mz(i)-nz(j),2));
-   rad_m = sqrtl(pow(mdx(i),2) +
-                 pow(mdy(i),2) +
-                 pow(mdz(i),2));
-   rad_n = sqrtl(pow(ndx(j),2) +
-                 pow(ndy(j),2) +
-                 pow(ndz(j),2));
-    */
-   //if(cell_dist > 1.01 * (rad_n/2.0+rad_m/2.0)) continue;
-   if(fabsl(mx(i)-nx(j))>(mdx(i)+ndx(j))/2.0) continue;
-   if(fabsl(my(i)-ny(j))>(mdy(i)+ndy(j))/2.0) continue;
-   if(fabsl(mz(i)-nz(j))>(mdz(i)+ndz(j))/2.0) continue;
-   k++;
-   break;
-   cout << cell_dist << "\t" << 1.01*(rad_n/2.0+rad_m/2.0) << "\t";
-   cout << grid_ids_m(i) << "\t" << grid_ids_n(j) << endl;
-  }
-}
-n_bad(0) += k;
-"""
-
 def coalesce_join_tree(jtree1):
     joins = defaultdict(set)
     nj = jtree1.shape[0]


--- a/yt/analysis_modules/light_cone/light_cone.py	Thu Jun 16 14:28:01 2011 -0700
+++ b/yt/analysis_modules/light_cone/light_cone.py	Thu Jun 16 14:29:11 2011 -0700
@@ -520,7 +520,7 @@
 
         output = h5py.File(filename, "a")
 
-        node_exists = field_node in output.listnames()
+        node_exists = field_node in output
 
         if node_exists:
             if over_write:


--- a/yt/analysis_modules/star_analysis/sfr_spectrum.py	Thu Jun 16 14:28:01 2011 -0700
+++ b/yt/analysis_modules/star_analysis/sfr_spectrum.py	Thu Jun 16 14:29:11 2011 -0700
@@ -328,6 +328,8 @@
             if star_metallicity_constant is not None:
                 self.star_metal = na.ones(self.star_mass.size, dtype='float64') * \
                     star_metallicity_constant
+            if star_metallicity_fraction is not None:
+                self.star_metal = star_metallicity_fraction
         else:
             # Get the data we need.
             ct = self._data_source["creation_time"]


--- a/yt/data_objects/data_containers.py	Thu Jun 16 14:28:01 2011 -0700
+++ b/yt/data_objects/data_containers.py	Thu Jun 16 14:29:11 2011 -0700
@@ -1431,14 +1431,14 @@
         This is a data object corresponding to a line integral through the
         simulation domain.
 
-        This object is typically accessed through the `quad_proj` object that
+        This object is typically accessed through the `proj` object that
         hangs off of hierarchy objects.  AMRQuadProj is a projection of a
         `field` along an `axis`.  The field can have an associated
         `weight_field`, in which case the values are multiplied by a weight
         before being summed, and then divided by the sum of that weight; the
         two fundamental modes of operating are direct line integral (no
         weighting) and average along a line of sight (weighting.)  What makes
-        `quad_proj` different from the standard projection mechanism is that it
+        `proj` different from the standard projection mechanism is that it
         utilizes a quadtree data structure, rather than the old mechanism for
         projections.  It will not run in parallel, but serial runs should be
         substantially faster.  Note also that lines of sight are integrated at
@@ -1536,7 +1536,7 @@
     def _get_tree(self, nvals):
         xd = self.pf.domain_dimensions[x_dict[self.axis]]
         yd = self.pf.domain_dimensions[y_dict[self.axis]]
-        return QuadTree(na.array([xd,yd]), nvals)
+        return QuadTree(na.array([xd,yd], dtype='int64'), nvals)
 
     def _get_dls(self, grid, fields):
         # Place holder for a time when maybe we will not be doing just
@@ -2189,6 +2189,7 @@
         for i,grid in enumerate(self._get_grids()):
             mylog.debug("Getting fields from %s", i)
             self._get_data_from_grid(grid, fields_to_get, dls)
+        mylog.info("IO completed; summing")
         for field in fields_to_get:
             self[field] = self._mpi_Allsum_double(self[field])
             conv = self.pf.units[self.pf.field_info[field].projection_conversion]


--- a/yt/data_objects/derived_quantities.py	Thu Jun 16 14:28:01 2011 -0700
+++ b/yt/data_objects/derived_quantities.py	Thu Jun 16 14:29:11 2011 -0700
@@ -109,10 +109,7 @@
         rv = []
         for my_list in self.retvals:
             data = na.array(my_list).transpose()
-            old_shape = data.shape
-            data = self._mpi_catarray(data).transpose()
-            if len(data.shape) != old_shape: data = data.squeeze()
-            rv.append(data)
+            rv.append(self._mpi_catarray(data).transpose())
         self.retvals = rv
         
     def _call_func_unlazy(self, args, kwargs):
@@ -227,6 +224,23 @@
     amz = data["SpecificAngularMomentumZ"]*data["CellMassMsun"]
     j_mag = [amx.sum(), amy.sum(), amz.sum()]
     return [j_mag]
+
+def _StarAngularMomentumVector(data):
+    """
+    This function returns the mass-weighted average angular momentum vector 
+    for stars.
+    """
+    is_star = data["creation_time"] > 0
+    star_mass = data["ParticleMassMsun"][is_star]
+    sLx = data["ParticleSpecificAngularMomentumX"][is_star]
+    sLy = data["ParticleSpecificAngularMomentumY"][is_star]
+    sLz = data["ParticleSpecificAngularMomentumZ"][is_star]
+    amx = sLx * star_mass
+    amy = sLy * star_mass
+    amz = sLz * star_mass
+    j_mag = [amx.sum(), amy.sum(), amz.sum()]
+    return [j_mag]
+
 def _combAngularMomentumVector(data, j_mag):
     if len(j_mag.shape) < 2: j_mag = na.expand_dims(j_mag, 0)
     L_vec = j_mag.sum(axis=0)
@@ -235,6 +249,9 @@
 add_quantity("AngularMomentumVector", function=_AngularMomentumVector,
              combine_function=_combAngularMomentumVector, n_ret=1)
 
+add_quantity("StarAngularMomentumVector", function=_StarAngularMomentumVector,
+             combine_function=_combAngularMomentumVector, n_ret=1)
+
 def _BaryonSpinParameter(data):
     """
     This function returns the spin parameter for the baryons, but it uses


--- a/yt/data_objects/hierarchy.py	Thu Jun 16 14:28:01 2011 -0700
+++ b/yt/data_objects/hierarchy.py	Thu Jun 16 14:29:11 2011 -0700
@@ -202,8 +202,12 @@
         self._barrier()
         if not writeable and not exists: return
         if writeable:
-            self._data_mode = 'a'
-            if not exists: self.__create_data_file(fn)
+            try:
+                if not exists: self.__create_data_file(fn)
+                self._data_mode = 'a'
+            except IOError:
+                self._data_mode = None
+                return
         else:
             self._data_mode = 'r'
 
@@ -228,15 +232,15 @@
 
         if self._data_mode != 'a': return
         if "ArgsError" in dir(h5py.h5):
-            exception = h5py.h5.ArgsError
+            exception = (h5py.h5.ArgsError, KeyError)
         else:
-            exception = h5py.h5.H5Error
+            exception = (h5py.h5.H5Error, KeyError)
         try:
             node_loc = self._data_file[node]
-            if name in node_loc.listnames() and force:
+            if name in node_loc and force:
                 mylog.info("Overwriting node %s/%s", node, name)
                 del self._data_file[node][name]
-            elif name in node_loc.listnames() and passthrough:
+            elif name in node_loc and passthrough:
                 return
         except exception:
             pass
@@ -296,10 +300,10 @@
         myGroup = self._data_file['/']
         for group in node.split('/'):
             if group:
-                if group not in myGroup.listnames():
+                if group not in myGroup:
                     return None
                 myGroup = myGroup[group]
-        if name not in myGroup.listnames():
+        if name not in myGroup:
             return None
 
         full_name = "%s/%s" % (node, name)


--- a/yt/data_objects/static_output.py	Thu Jun 16 14:28:01 2011 -0700
+++ b/yt/data_objects/static_output.py	Thu Jun 16 14:29:11 2011 -0700
@@ -62,12 +62,13 @@
             _cached_pfs[apath] = obj
         return _cached_pfs[apath]
 
-    def __init__(self, filename, data_style=None):
+    def __init__(self, filename, data_style=None, file_style=None):
         """
         Base class for generating new output types.  Principally consists of
         a *filename* and a *data_style* which will be passed on to children.
         """
         self.data_style = data_style
+        self.file_style = file_style
         self.parameter_filename = str(filename)
         self.basename = os.path.basename(filename)
         self.directory = os.path.expanduser(os.path.dirname(filename))


--- a/yt/frontends/art/data_structures.py	Thu Jun 16 14:28:01 2011 -0700
+++ b/yt/frontends/art/data_structures.py	Thu Jun 16 14:29:11 2011 -0700
@@ -47,7 +47,10 @@
     io_registry
 import yt.utilities.amr_utils as amr_utils
 
-import yt.frontends.ramses._ramses_reader as _ramses_reader
+try:
+    import yt.frontends.ramses._ramses_reader as _ramses_reader
+except ImportError:
+    _ramses_reader = None
 
 from yt.utilities.physical_constants import \
     mass_hydrogen_cgs
@@ -362,6 +365,8 @@
     
     def __init__(self, filename, data_style='art',
                  storage_filename = None):
+        if _ramses_reader is None:
+            import yt.frontends.ramses._ramses_reader as _ramses_reader
         StaticOutput.__init__(self, filename, data_style)
         self.storage_filename = storage_filename
         


--- a/yt/frontends/castro/data_structures.py	Thu Jun 16 14:28:01 2011 -0700
+++ b/yt/frontends/castro/data_structures.py	Thu Jun 16 14:29:11 2011 -0700
@@ -353,8 +353,8 @@
             g._particle_offset = pg[2]
         self.grid_particle_count[:,0] = self.pgrid_info[:,1]
         del self.pgrid_info
-        self.grid_levels = na.concatenate([level.ngrids*[level.level] for level in self.levels])
-        self.grid_levels = self.grid_levels.reshape((self.num_grids,1))
+        gls = na.concatenate([level.ngrids*[level.level] for level in self.levels])
+        self.grid_levels[:] = gls.reshape((self.num_grids,1))
         grid_dcs = na.concatenate([level.ngrids*[self.dx[level.level]] for level in self.levels], axis=0)
         self.grid_dxs = grid_dcs[:,0].reshape((self.num_grids,1))
         self.grid_dys = grid_dcs[:,1].reshape((self.num_grids,1))
@@ -608,6 +608,7 @@
             line = a_file.readline().strip()
             a_file.close()
             self.parameters["CosmologyCurrentRedshift"] = 1/float(line) - 1
+            self.cosmological_scale_factor = float(line)
             self.current_redshift = self.parameters["CosmologyCurrentRedshift"]
         else:
             self.current_redshift = self.omega_lambda = self.omega_matter = \
@@ -656,7 +657,18 @@
         self.time_units = {}
         if len(self.parameters) == 0:
             self._parse_parameter_file()
-        self._setup_nounits_units()
+        if self.cosmological_simulation:
+            cf = 1e5*(self.cosmological_scale_factor)
+            for ax in 'xyz':
+                self.units['particle_velocity_%s' % ax] = cf
+            self.units['particle_mass'] = 1.989e33
+        mylog.warning("Setting 1.0 in code units to be 1.0 cm")
+        if not self.has_key("TimeUnits"):
+            mylog.warning("No time units.  Setting 1.0 = 1 second.")
+            self.conversion_factors["Time"] = 1.0
+        for unit in mpc_conversion.keys():
+            self.units[unit] = mpc_conversion[unit] / mpc_conversion["cm"]
+        
         self.conversion_factors = defaultdict(lambda: 1.0)
         self.time_units['1'] = 1
         self.units['1'] = 1.0
@@ -671,10 +683,3 @@
 
     def _setup_nounits_units(self):
         z = 0
-        mylog.warning("Setting 1.0 in code units to be 1.0 cm")
-        if not self.has_key("TimeUnits"):
-            mylog.warning("No time units.  Setting 1.0 = 1 second.")
-            self.conversion_factors["Time"] = 1.0
-        for unit in mpc_conversion.keys():
-            self.units[unit] = mpc_conversion[unit] / mpc_conversion["cm"]
-


--- a/yt/frontends/chombo/data_structures.py	Thu Jun 16 14:28:01 2011 -0700
+++ b/yt/frontends/chombo/data_structures.py	Thu Jun 16 14:29:11 2011 -0700
@@ -104,7 +104,7 @@
         self._fhandle = h5py.File(self.hierarchy_filename)
 
         self.float_type = self._fhandle['/level_0']['data:datatype=0'].dtype.name
-        self._levels = self._fhandle.listnames()[1:]
+        self._levels = [fn for fn in self._fhandle if fn != "Chombo_global"]
         AMRHierarchy.__init__(self,pf,data_style)
 
         self._fhandle.close()
@@ -131,7 +131,7 @@
         
         # this relies on the first Group in the H5 file being
         # 'Chombo_global'
-        levels = f.listnames()[1:]
+        levels = [fn for fn in f if fn != "Chombo_global"]
         self.grids = []
         i = 0
         for lev in levels:
@@ -302,8 +302,7 @@
     def _is_valid(self, *args, **kwargs):
         try:
             fileh = h5py.File(args[0],'r')
-            if (fileh.listnames())[0] == 'Chombo_global':
-                return True
+            return "Chombo_global" in fileh["/"]
         except:
             pass
         return False


--- a/yt/frontends/enzo/data_structures.py	Thu Jun 16 14:28:01 2011 -0700
+++ b/yt/frontends/enzo/data_structures.py	Thu Jun 16 14:29:11 2011 -0700
@@ -139,6 +139,10 @@
     def __init__(self, pf, data_style):
         
         self.data_style = data_style
+        if pf.file_style != None:
+            self._bn = pf.file_style
+        else:
+            self._bn = "%s.cpu%%04i"
         self.hierarchy_filename = os.path.abspath(
             "%s.hierarchy" % (pf.parameter_filename))
         harray_fn = self.hierarchy_filename[:-9] + "harrays"
@@ -197,7 +201,11 @@
             self.data_style = 'enzo_hdf4'
             mylog.debug("Detected HDF4")
         except:
-            list_of_sets = hdf5_light_reader.ReadListOfDatasets(test_grid, "/")
+            try:
+                list_of_sets = hdf5_light_reader.ReadListOfDatasets(test_grid, "/")
+            except:
+                print "Could not find dataset.  Defaulting to packed HDF5"
+                list_of_sets = []
             if len(list_of_sets) == 0 and rank == 3:
                 mylog.debug("Detected packed HDF5")
                 self.data_style = 'enzo_packed_3d'
@@ -288,7 +296,6 @@
             second_grid.Level = first_grid.Level
         self.grid_levels[sgi] = second_grid.Level
 
-    _bn = "%s.cpu%%04i"
     def _parse_binary_hierarchy(self):
         mylog.info("Getting the binary hierarchy")
         if not ytcfg.getboolean("yt","serialize"): return False
@@ -616,6 +623,7 @@
     _fieldinfo_fallback = EnzoFieldInfo
     _fieldinfo_known = KnownEnzoFields
     def __init__(self, filename, data_style=None,
+                 file_style = None,
                  parameter_override = None,
                  conversion_override = None,
                  storage_filename = None):
@@ -635,7 +643,7 @@
         self._conversion_override = conversion_override
         self.storage_filename = storage_filename
 
-        StaticOutput.__init__(self, filename, data_style)
+        StaticOutput.__init__(self, filename, data_style, file_style=file_style)
         if "InitialTime" not in self.parameters:
             self.current_time = 0.0
         rp = os.path.join(self.directory, "rates.out")


--- a/yt/frontends/flash/data_structures.py	Thu Jun 16 14:28:01 2011 -0700
+++ b/yt/frontends/flash/data_structures.py	Thu Jun 16 14:29:11 2011 -0700
@@ -284,9 +284,9 @@
 
         # Determine domain dimensions
         try:
-            nxb = self._find_parameter("integer", "nxb", handle = self._handle)
-            nyb = self._find_parameter("integer", "nyb", handle = self._handle)
-            nzb = self._find_parameter("integer", "nzb", handle = self._handle)
+            nxb = self._find_parameter("integer", "nxb", scalar = True, handle = self._handle)
+            nyb = self._find_parameter("integer", "nyb", scalar = True, handle = self._handle)
+            nzb = self._find_parameter("integer", "nzb", scalar = True, handle = self._handle)
         except KeyError:
             nxb, nyb, nzb = [int(self._handle["/simulation parameters"]['n%sb' % ax])
                               for ax in 'xyz']


--- a/yt/frontends/ramses/_ramses_reader.pyx	Thu Jun 16 14:28:01 2011 -0700
+++ b/yt/frontends/ramses/_ramses_reader.pyx	Thu Jun 16 14:29:11 2011 -0700
@@ -654,17 +654,17 @@
                   np.ndarray[np.float64_t, ndim=3] data,
                   np.ndarray[np.int32_t, ndim=3] filled,
                   int level, int ref_factor,
-                  component_grid_info):
+                  np.ndarray[np.int64_t, ndim=2] component_grid_info):
         cdef int varindex = self.field_ind[field]
         cdef RAMSES_tree *local_tree = NULL
         cdef RAMSES_hydro_data *local_hydro_data = NULL
 
         cdef int gi, i, j, k, domain, offset
         cdef int ir, jr, kr
+        cdef int n
         cdef int offi, offj, offk, odind
         cdef np.int64_t di, dj, dk
-        cdef np.ndarray[np.int64_t, ndim=1] ogrid_info
-        cdef np.ndarray[np.int64_t, ndim=1] og_start_index
+        cdef np.int32_t og_start_index[3]
         cdef np.float64_t temp_data
         cdef np.int64_t end_index[3]
         cdef int to_fill = 0
@@ -672,15 +672,13 @@
         #   (k*2 + j)*2 + i
         for i in range(3):
             end_index[i] = start_index[i] + grid_dims[i]
-        for gi in range(len(component_grid_info)):
-            ogrid_info = component_grid_info[gi]
-            domain = ogrid_info[0]
-            #print "Loading", domain, ogrid_info
+        for gi in range(component_grid_info.shape[0]):
+            domain = component_grid_info[gi,0]
             self.ensure_loaded(field, domain - 1)
             local_tree = self.trees[domain - 1]
             local_hydro_data = self.hydro_datas[domain - 1][varindex]
-            offset = ogrid_info[1]
-            og_start_index = ogrid_info[3:]
+            offset = component_grid_info[gi,1]
+            for n in range(3): og_start_index[n] = component_grid_info[gi,3+n]
             for i in range(2*ref_factor):
                 di = i + og_start_index[0] * ref_factor
                 if di < start_index[0] or di >= end_index[0]: continue
@@ -709,22 +707,6 @@
                         to_fill += 1
         return to_fill
 
-#def recursive_patch_splitting(ProtoSubgrid psg,
-#        np.ndarray[np.int64_t, ndim=1] dims,
-#        np.ndarray[np.int64_t, ndim=1] inds,
-#        np.ndarray[np.int64_t, ndim=2] left_index,
-#        np.ndarray[np.int64_t, ndim=2] right_index,
-#        np.ndarray[np.int64_t, ndim=2] gdims,
-#        np.ndarray[np.int64_t, ndim=2] fl,
-#        int num_deep = 0):
-#    cdef float min_eff = 0.1
-#    if num_deep > 40:
-#        psg.efficiency = min_eff
-#        return [psg]
-#    if psg.efficiency > min_eff or psg.efficiency < 0.0:
-#        return [psg]
-#    cdef 
-#
 cdef class ProtoSubgrid:
     cdef np.int64_t *signature[3]
     cdef np.int64_t left_edge[3]
@@ -784,10 +766,10 @@
         sig2 = self.sigs[2]
         efficiency = 0.0
         cdef int used
-        self.grid_file_locations = []
+        cdef np.ndarray[np.int32_t, ndim=1] mask
+        mask = np.zeros(ng, 'int32')
+        used = 0
         for gi in range(ng):
-            used = 0
-            nnn = 0
             for l0 in range(2):
                 i0 = left_edges[gi, 0] + l0
                 if i0 < self.left_edge[0]: continue
@@ -807,12 +789,19 @@
                         i = i2 - self.left_edge[2]
                         sig2[i] += 1
                         efficiency += 1
-                        used = 1
-            if used == 1:
+                        used += 1
+                        mask[gi] = 1
+        cdef np.ndarray[np.int64_t, ndim=2] gfl
+        gfl = np.zeros((used, 6), 'int64')
+        used = 0
+        self.grid_file_locations = gfl
+        for gi in range(ng):
+            if mask[gi] == 1:
                 grid_file_locations[gi,3] = left_edges[gi, 0]
                 grid_file_locations[gi,4] = left_edges[gi, 1]
                 grid_file_locations[gi,5] = left_edges[gi, 2]
-                self.grid_file_locations.append(grid_file_locations[gi,:])
+                for i in range(6):
+                    gfl[used, i] = grid_file_locations[gi,i]
          
         self.dd = np.ones(3, dtype='int64')
         for i in range(3):
@@ -828,7 +817,7 @@
 
     @cython.boundscheck(False)
     @cython.wraparound(False)
-    def find_split(self):
+    cdef void find_split(self, int *tr):
         # First look for zeros
         cdef int i, center, ax
         cdef np.ndarray[ndim=1, dtype=np.int64_t] axes
@@ -842,7 +831,8 @@
             for i in range(self.dimensions[ax]):
                 if sig[i] == 0 and i > 0 and i < self.dimensions[ax] - 1:
                     #print "zero: %s (%s)" % (i, self.dimensions[ax])
-                    return 0, ax, i
+                    tr[0] = 0; tr[1] = ax; tr[2] = i
+                    return
         zcstrength = 0
         zcp = 0
         zca = -1
@@ -866,7 +856,8 @@
                         zca = ax
             free(sig2d)
         #print "zcp: %s (%s)" % (zcp, self.dimensions[ax])
-        return 1, ax, zcp
+        tr[0] = 1; tr[1] = ax; tr[2] = zcp
+        return
 
     @cython.boundscheck(False)
     @cython.wraparound(False)
@@ -970,7 +961,9 @@
 @cython.boundscheck(False)
 @cython.wraparound(False)
 def get_array_indices_lists(np.ndarray[np.int64_t, ndim=1] ind,
-                            np.ndarray[np.int64_t, ndim=1] uind):
+                            np.ndarray[np.int64_t, ndim=1] uind,
+                            np.ndarray[np.int64_t, ndim=2] lefts,
+                            np.ndarray[np.int64_t, ndim=2] files):
     cdef np.ndarray[np.int64_t, ndim=1] count = np.zeros(uind.shape[0], 'int64')
     cdef int n, i
     cdef np.int64_t mi, mui
@@ -980,23 +973,89 @@
             if uind[n] == mi:
                 count[n] += 1
                 break
-    cdef np.int64_t **inds
-    inds = <np.int64_t **> malloc(sizeof(np.int64_t *) * uind.shape[0])
+    cdef np.int64_t **alefts
+    cdef np.int64_t **afiles
+    afiles = <np.int64_t **> malloc(sizeof(np.int64_t *) * uind.shape[0])
+    alefts = <np.int64_t **> malloc(sizeof(np.int64_t *) * uind.shape[0])
     cdef int *li = <int *> malloc(sizeof(int) * uind.shape[0])
-    cdef np.ndarray[np.int64_t, ndim=1] indices
-    all_indices = []
+    cdef np.ndarray[np.int64_t, ndim=2] locations
+    cdef np.ndarray[np.int64_t, ndim=2] left
+    all_locations = []
+    all_lefts = []
     for n in range(uind.shape[0]):
-        indices = np.zeros(count[n], 'int64')
-        all_indices.append(indices)
-        inds[n] = <np.int64_t *> indices.data
+        locations = np.zeros((count[n], 6), 'int64')
+        left = np.zeros((count[n], 3), 'int64')
+        all_locations.append(locations)
+        all_lefts.append(left)
+        afiles[n] = <np.int64_t *> locations.data
+        alefts[n] = <np.int64_t *> left.data
         li[n] = 0
+    cdef int fi
     for i in range(ind.shape[0]):
         mi = ind[i]
         for n in range(uind.shape[0]):
             if uind[n] == mi:
-                inds[n][li[n]] = i
+                for fi in range(3):
+                    alefts[n][li[n] * 3 + fi] = lefts[i, fi]
+                for fi in range(6):
+                    afiles[n][li[n] * 6 + fi] = files[i, fi]
                 li[n] += 1
                 break
-    free(inds) # not inds[...]
-    free(li)
-    return all_indices
+    free(afiles)
+    free(alefts)
+    return all_locations, all_lefts
+
+def recursive_patch_splitting(ProtoSubgrid psg,
+        np.ndarray[np.int64_t, ndim=1] dims,
+        np.ndarray[np.int64_t, ndim=1] ind,
+        np.ndarray[np.int64_t, ndim=2] left_index,
+        np.ndarray[np.int64_t, ndim=2] fl,
+        int num_deep = 0):
+    cdef float min_eff = 0.1
+    cdef ProtoSubgrid L, R
+    cdef np.ndarray[np.int64_t, ndim=1] dims_l, li_l
+    cdef np.ndarray[np.int64_t, ndim=1] dims_r, li_r
+    cdef int tt, ax, fp, i, j, k, gi
+    cdef int tr[3]
+    if num_deep > 40:
+        psg.efficiency = min_eff
+        return [psg]
+    if psg.efficiency > min_eff or psg.efficiency < 0.0:
+        return [psg]
+    psg.find_split(tr)
+    tt = tr[0]
+    ax = tr[1]
+    fp = tr[2]
+    if (fp % 2) != 0:
+        if dims[ax] != fp + 1:
+            fp += 1
+        else:
+            fp -= 1
+    dims_l = dims.copy()
+    dims_l[ax] = fp
+    li_l = ind.copy()
+    for i in range(3):
+        if dims_l[i] <= 0: return [psg]
+    dims_r = dims.copy()
+    dims_r[ax] -= fp
+    li_r = ind.copy()
+    li_r[ax] += fp
+    for i in range(3):
+        if dims_r[i] <= 0: return [psg]
+    L = ProtoSubgrid(li_l, dims_l, left_index, fl)
+    if L.efficiency > 1.0: raise RuntimeError
+    if L.efficiency <= 0.0: rv_l = []
+    elif L.efficiency < min_eff:
+        rv_l = recursive_patch_splitting(L, dims_r, li_r,
+                left_index, fl, num_deep + 1)
+    else:
+        rv_l = [L]
+    R = ProtoSubgrid(li_r, dims_r, left_index, fl)
+    if R.efficiency > 1.0: raise RuntimeError
+    if R.efficiency <= 0.0: rv_r = []
+    elif R.efficiency < min_eff:
+        rv_r = recursive_patch_splitting(R, dims_r, li_r,
+                left_index, fl, num_deep + 1)
+    else:
+        rv_r = [R]
+    return rv_r + rv_l


--- a/yt/frontends/ramses/data_structures.py	Thu Jun 16 14:28:01 2011 -0700
+++ b/yt/frontends/ramses/data_structures.py	Thu Jun 16 14:29:11 2011 -0700
@@ -34,7 +34,10 @@
       AMRHierarchy
 from yt.data_objects.static_output import \
       StaticOutput
-import _ramses_reader
+try:
+    import _ramses_reader
+except ImportError:
+    _ramses_reader = None
 from .fields import RAMSESFieldInfo, KnownRAMSESFields
 from yt.utilities.definitions import \
     mpc_conversion
@@ -183,29 +186,24 @@
             # Strictly speaking, we don't care about the index of any
             # individual oct at this point.  So we can then split them up.
             unique_indices = na.unique(hilbert_indices)
-            print "Level % 2i has % 10i unique indices for %0.3e octs" % (
+            mylog.debug("Level % 2i has % 10i unique indices for %0.3e octs",
                         level, unique_indices.size, hilbert_indices.size)
-            all_indices = _ramses_reader.get_array_indices_lists(
-                        hilbert_indices, unique_indices)
-            for curve_index, my_octs in zip(unique_indices, all_indices):
-                #print "Handling", curve_index
-                #my_octs = (hilbert_indices == curve_index)
-                dleft_index = left_index[my_octs,:]
-                dfl = fl[my_octs,:]
+            locs, lefts = _ramses_reader.get_array_indices_lists(
+                        hilbert_indices, unique_indices, left_index, fl)
+            for dleft_index, dfl in zip(lefts, locs):
                 initial_left = na.min(dleft_index, axis=0)
                 idims = (na.max(dleft_index, axis=0) - initial_left).ravel()+2
-                #if level > 10: insert_ipython()
-                #print initial_left, idims
                 psg = _ramses_reader.ProtoSubgrid(initial_left, idims,
                                 dleft_index, dfl)
                 if psg.efficiency <= 0: continue
                 self.num_deep = 0
-                psgs.extend(self._recursive_patch_splitting(
+                psgs.extend(_ramses_reader.recursive_patch_splitting(
                     psg, idims, initial_left, 
                     dleft_index, dfl))
-            print "Done with level % 2i" % (level)
+            mylog.debug("Done with level % 2i", level)
             pbar.finish()
             self.proto_grids.append(psgs)
+            print sum(len(psg.grid_file_locations) for psg in psgs)
             sums = na.zeros(3, dtype='int64')
             mylog.info("Final grid count: %s", len(self.proto_grids[level]))
             if len(self.proto_grids[level]) == 1: continue
@@ -214,56 +212,6 @@
             #assert(na.all(sums == dims.prod(axis=1).sum()))
         self.num_grids = sum(len(l) for l in self.proto_grids)
 
-    num_deep = 0
-
-    @num_deep_inc
-    def _recursive_patch_splitting(self, psg, dims, ind,
-            left_index, fl):
-        min_eff = 0.1 # This isn't always respected.
-        if self.num_deep > 40:
-            # If we've recursed more than 100 times, we give up.
-            psg.efficiency = min_eff
-            return [psg]
-        if psg.efficiency > min_eff or psg.efficiency < 0.0:
-            return [psg]
-        tt, ax, fp = psg.find_split()
-        if (fp % 2) != 0:
-            if dims[ax] != fp + 1:
-                fp += 1
-            else:
-                fp -= 1
-        #print " " * self.num_deep + "Got ax", ax, "fp", fp
-        dims_l = dims.copy()
-        dims_l[ax] = fp
-        li_l = ind.copy()
-        if na.any(dims_l <= 0): return [psg]
-        L = _ramses_reader.ProtoSubgrid(
-                li_l, dims_l, left_index, fl)
-        #print " " * self.num_deep + "L", tt, L.efficiency
-        if L.efficiency > 1.0: raise RuntimeError
-        if L.efficiency <= 0.0: L = []
-        elif L.efficiency < min_eff:
-            L = self._recursive_patch_splitting(L, dims_l, li_l,
-                    left_index, fl)
-        else:
-            L = [L]
-        dims_r = dims.copy()
-        dims_r[ax] -= fp
-        li_r = ind.copy()
-        li_r[ax] += fp
-        if na.any(dims_r <= 0): return [psg]
-        R = _ramses_reader.ProtoSubgrid(
-                li_r, dims_r, left_index, fl)
-        #print " " * self.num_deep + "R", tt, R.efficiency
-        if R.efficiency > 1.0: raise RuntimeError
-        if R.efficiency <= 0.0: R = []
-        elif R.efficiency < min_eff:
-            R = self._recursive_patch_splitting(R, dims_r, li_r,
-                    left_index, fl)
-        else:
-            R = [R]
-        return L + R
-        
     def _parse_hierarchy(self):
         # We have important work to do
         grids = []
@@ -313,6 +261,8 @@
     
     def __init__(self, filename, data_style='ramses',
                  storage_filename = None):
+        if _ramses_reader is None:
+            import _ramses_reader
         StaticOutput.__init__(self, filename, data_style)
         self.storage_filename = storage_filename
 


--- a/yt/frontends/stream/data_structures.py	Thu Jun 16 14:28:01 2011 -0700
+++ b/yt/frontends/stream/data_structures.py	Thu Jun 16 14:29:11 2011 -0700
@@ -38,6 +38,8 @@
 from yt.utilities.logger import ytLogger as mylog
 from yt.data_objects.field_info_container import \
     FieldInfoContainer, NullFunc
+from yt.utilities.amr_utils import \
+    get_box_grids_level
 
 from .fields import \
     StreamFieldContainer, \
@@ -97,7 +99,7 @@
 class StreamHandler(object):
     def __init__(self, left_edges, right_edges, dimensions,
                  levels, parent_ids, particle_count, processor_ids,
-                 fields):
+                 fields, io = None):
         self.left_edges = left_edges
         self.right_edges = right_edges
         self.dimensions = dimensions
@@ -107,6 +109,7 @@
         self.processor_ids = processor_ids
         self.num_grids = self.levels.size
         self.fields = fields
+        self.io = io
 
     def get_fields(self):
         return self.fields.all_fields
@@ -157,18 +160,22 @@
         self.grid_procs = self.stream_handler.processor_ids
         self.grid_particle_count[:] = self.stream_handler.particle_count
         mylog.debug("Copying reverse tree")
-        reverse_tree = self.stream_handler.parent_ids.tolist()
-        # Initial setup:
-        mylog.debug("Reconstructing parent-child relationships")
         self.grids = []
         # We enumerate, so it's 0-indexed id and 1-indexed pid
-        self.filenames = ["-1"] * self.num_grids
-        for id,pid in enumerate(reverse_tree):
+        for id in xrange(self.num_grids):
             self.grids.append(self.grid(id, self))
-            self.grids[-1].Level = self.grid_levels[id, 0]
-            if pid >= 0:
-                self.grids[-1]._parent_id = pid
-                self.grids[pid]._children_ids.append(self.grids[-1].id)
+            self.grids[id].Level = self.grid_levels[id, 0]
+        parent_ids = self.stream_handler.parent_ids
+        if parent_ids is not None:
+            reverse_tree = self.stream_handler.parent_ids.tolist()
+            # Initial setup:
+            for id,pid in enumerate(reverse_tree):
+                if pid >= 0:
+                    self.grids[-1]._parent_id = pid
+                    self.grids[pid]._children_ids.append(self.grids[-1].id)
+        else:
+            mylog.debug("Reconstructing parent-child relationships")
+            self._reconstruct_parent_child()
         self.max_level = self.grid_levels.max()
         mylog.debug("Preparing grids")
         for i, grid in enumerate(self.grids):
@@ -179,6 +186,22 @@
         self.grids = na.array(self.grids, dtype='object')
         mylog.debug("Prepared")
 
+    def _reconstruct_parent_child(self):
+        mask = na.empty(len(self.grids), dtype='int32')
+        mylog.debug("First pass; identifying child grids")
+        for i, grid in enumerate(self.grids):
+            get_box_grids_level(self.grid_left_edge[i,:],
+                                self.grid_right_edge[i,:],
+                                self.grid_levels[i] + 1,
+                                self.grid_left_edge, self.grid_right_edge,
+                                self.grid_levels, mask)
+            ids = na.where(mask.astype("bool"))
+            grid._children_ids = ids[0] # where is a tuple
+        mylog.debug("Second pass; identifying parents")
+        for i, grid in enumerate(self.grids): # Second pass
+            for child in grid.Children:
+                child._parent_id = i
+
     def _initialize_grid_arrays(self):
         AMRHierarchy._initialize_grid_arrays(self)
         self.grid_procs = na.zeros((self.num_grids,1),'int32')
@@ -214,7 +237,10 @@
         self.max_level = self.grid_levels.max()
 
     def _setup_data_io(self):
-        self.io = io_registry[self.data_style](self.stream_handler)
+        if self.stream_handler.io is not None:
+            self.io = self.stream_handler.io
+        else:
+            self.io = io_registry[self.data_style](self.stream_handler)
 
 class StreamStaticOutput(StaticOutput):
     _hierarchy_class = StreamHierarchy


--- a/yt/funcs.py	Thu Jun 16 14:28:01 2011 -0700
+++ b/yt/funcs.py	Thu Jun 16 14:29:11 2011 -0700
@@ -435,6 +435,10 @@
 # If we recognize one of the arguments on the command line as indicating a
 # different mechanism for handling tracebacks, we attach one of those handlers
 # and remove the argument from sys.argv.
+#
+# This fallback is for Paraview:
+if not hasattr(sys, 'argv') or sys.argv is None: sys.argv = []
+# Now, we check.
 if "--paste" in sys.argv:
     sys.excepthook = paste_traceback
     del sys.argv[sys.argv.index("--paste")]


--- a/yt/gui/reason/extdirect_repl.py	Thu Jun 16 14:28:01 2011 -0700
+++ b/yt/gui/reason/extdirect_repl.py	Thu Jun 16 14:29:11 2011 -0700
@@ -139,7 +139,9 @@
         self.repl.payload_handler.add_payload(
             {'type': 'cell_results',
              'output': result,
-             'input': highlighter(code)})
+             'input': highlighter(code),
+             'raw_input': code},
+            )
 
 def deliver_image(im):
     if hasattr(im, 'read'):
@@ -384,7 +386,17 @@
         cs = cStringIO.StringIO()
         cs.write("\n######\n".join(self.executed_cell_texts))
         cs = cs.getvalue()
-        ret = p.pastes.newPaste('pytb', cs, None, '', '', True)
+        ret = p.pastes.newPaste('python', cs, None, '', '', True)
+        site = "http://paste.enzotools.org/show/%s" % ret
+        return {'status': 'SUCCESS', 'site': site}
+
+    @lockit
+    def paste_text(self, to_paste):
+        import xmlrpclib, cStringIO
+        p = xmlrpclib.ServerProxy(
+            "http://paste.enzotools.org/xmlrpc/",
+            allow_none=True)
+        ret = p.pastes.newPaste('python', to_paste, None, '', '', True)
         site = "http://paste.enzotools.org/show/%s" % ret
         return {'status': 'SUCCESS', 'site': site}
 
@@ -546,6 +558,7 @@
         """ % dict(pfname = pfname)
         funccall = "\n".join((line.strip() for line in funccall.splitlines()))
         self.execute(funccall, hide = True)
+        self.execution_thread.queue.join()
         pf = self.locals['_tpf']
         levels = pf.h.grid_levels
         left_edge = pf.h.grid_left_edge
@@ -579,6 +592,7 @@
         """ % dict(pfname = pfname)
         funccall = "\n".join((line.strip() for line in funccall.splitlines()))
         self.execute(funccall, hide = True)
+        self.execution_thread.queue.join()
         pf = self.locals['_tpf']
         corners = pf.h.grid_corners
         levels = pf.h.grid_levels


Binary file yt/gui/reason/html/images/upload.png has changed


--- a/yt/gui/reason/html/index.html	Thu Jun 16 14:28:01 2011 -0700
+++ b/yt/gui/reason/html/index.html	Thu Jun 16 14:29:11 2011 -0700
@@ -53,6 +53,9 @@
     .singledownarrow { 
         background-image:url(images/single_down_sm.png) !important;
     }
+    .upload { 
+        background-image:url(images/upload.png) !important;
+    }
     #input_line {
         font-family: monospace;
     }


--- a/yt/gui/reason/html/js/functions.js	Thu Jun 16 14:28:01 2011 -0700
+++ b/yt/gui/reason/html/js/functions.js	Thu Jun 16 14:29:11 2011 -0700
@@ -54,7 +54,7 @@
         } else if (payload['type'] == 'cell_results') {
             text = "<pre>"+payload['output']+"</pre>";
             formatted_input = payload['input']
-            cell = new_cell(formatted_input, text);
+            cell = new_cell(formatted_input, text, payload['raw_input']);
             OutputContainer.add(cell);
             OutputContainer.doLayout();
             notebook.doLayout();
@@ -154,23 +154,70 @@
     });
 }
 
-function new_cell(input, result) {
+function new_cell(input, result, raw_input) {
     var name = "cell_" + cell_count;
     var CellPanel = new Ext.Panel(
         { 
             id: name, 
             //title: "Cell " + cell_count,
             items: [
-                new Ext.Panel({
-                    id:name+"_input",
-                    html:input,
-                }),
-                new Ext.Panel({
-                    id:name+"_result",
-                    autoScroll:true,
-                    width: "100%",
-                    html:result,
-                })
+                { xtype:'panel',
+                  layout: 'hbox',
+                  id:name+"_input",
+                  items: [
+                    { xtype:'panel',
+                      html:input,
+                      flex:1,
+                      boxMinHeight: 40,
+                    },
+                    { xtype: 'button',
+                      width: 24,
+                      height: 24,
+                      iconCls: 'upload',
+                      tooltip: 'Upload to Pastebin',
+                      listeners: {
+                          click: function(f, e) {
+                            yt_rpc.ExtDirectREPL.paste_text({to_paste:raw_input},
+                              function(f, a) {
+                                if (a.result['status'] == 'SUCCESS') {
+                                    var alert_text = 'Pasted cell to:<br>' + 
+                                    a.result['site']
+                                    var alert_text_rec = 'Pasted cell to: ' + 
+                                    a.result['site']
+                                    Ext.Msg.alert('Pastebin', alert_text);
+                                    var record = new logging_store.recordType(
+                                        {record: alert_text_rec });
+                                    logging_store.add(record, number_log_records++);
+                              }
+                            });
+                          }
+                        }
+                    },
+                    { xtype: 'button',
+                      width: 24,
+                      height: 24,
+                      iconCls: 'doubleuparrow',
+                      tooltip: 'Copy into current cell',
+                      listeners: {
+                          click: function(f, e) {
+                            repl_input.get('input_line').setValue(raw_input);
+                          }
+                      },
+                    },
+                  ],
+                },
+                { xtype:'panel',
+                  layout: 'hbox',
+                  items: [
+                    { xtype:'panel',
+                      id:name+"_result",
+                      autoScroll:true,
+                      flex: 1,
+                      html:result,
+                      boxMinHeight: 40,
+                    },
+                  ],
+                },
             ]
         }
     );


--- a/yt/gui/reason/html/js/reason.js	Thu Jun 16 14:28:01 2011 -0700
+++ b/yt/gui/reason/html/js/reason.js	Thu Jun 16 14:29:11 2011 -0700
@@ -55,11 +55,11 @@
 }
 
 var repl_input = new Ext.FormPanel({
-    title: 'YT Input',
     url: 'push',
-    flex: 0.2,
     layout: 'fit',
     padding: 5,
+    height: '100%',
+    flex: 1.0,
     items: [{
         id: 'input_line',
         xtype: 'textarea',
@@ -103,14 +103,37 @@
 });
 
 
-
+var CellInputContainer = new Ext.Panel({
+    title: 'YT Input',
+    flex: 0.3,
+    layout: {type: 'hbox',
+             pack: 'start',
+             align: 'stretch',
+             },
+    items: [ repl_input,
+            { xtype: 'button',
+              width: 24,
+              height: 24,
+              iconCls: 'doubledownarrow',
+              tooltip: 'Execute Cell',
+              listeners: {
+                  click: function(f, e) {
+                    disable_input();
+                    yt_rpc.ExtDirectREPL.execute({
+                        code:repl_input.get('input_line').getValue()},
+                    handle_result);
+                  }
+              },
+            }
+           ]
+});
 
 
 var OutputContainer = new Ext.Panel({
     title: 'YT Output',
     id: 'output_container',
     autoScroll: true,
-    flex: 0.8,
+    flex: 0.7,
     items: []
 });
 
@@ -141,6 +164,11 @@
                 null, {preventDefault: true});
             }
         },
+        dblclick: {
+            fn: function(node, e) {
+                treePanel.fireEvent("contextmenu", node, e);
+            }
+        },
         contextmenu: {
             fn: function(node, event){
                 var rightclickMenu;
@@ -156,10 +184,10 @@
                 } else if (node.attributes.objdata.type == 'pf') {
                   rightClickMenu = new Ext.menu.Menu({
                       items: [
-                          {
+                          /*{
                               text: 'View Grids',
                               handler: getGridViewerHandler(node),
-                          }, {
+                          },*/ {
                               text: 'View Grid Data',
                               handler: getGridDataViewerHandler(node),
                           }, {
@@ -171,10 +199,10 @@
                           /*}, {
                               text: 'Create Sphere',
                               handler: getSphereCreator(node), */
-                          }, {
+                          }, /*{
                               text: 'View Streamlines',
                               handler: getStreamlineViewerHandler(node),
-                          }
+                          }, */
                       ]
                   });
                 }
@@ -263,7 +291,7 @@
                         closable: false,
                         autoScroll: false,
                         iconCls: 'console',
-                        items: [repl_input, OutputContainer]
+                        items: [CellInputContainer, OutputContainer]
                     }, 
                 ]
             }


--- a/yt/gui/reason/html/map_index.html	Thu Jun 16 14:28:01 2011 -0700
+++ b/yt/gui/reason/html/map_index.html	Thu Jun 16 14:29:11 2011 -0700
@@ -8,6 +8,8 @@
 <script type="text/javascript">
   $(document).ready(function() {
       // initialize the map on the "map" div with a given center and zoom 
+      $("#map").width($(window).width());
+      $("#map").height($(window).height());
       var map = new L.Map('map', {
                     center: new L.LatLng(0.0, 0.0),
                     zoom: 0,


--- a/yt/mods.py	Thu Jun 16 14:28:01 2011 -0700
+++ b/yt/mods.py	Thu Jun 16 14:29:11 2011 -0700
@@ -40,6 +40,7 @@
 from yt.utilities.performance_counters import yt_counters, time_function
 from yt.config import ytcfg
 import yt.utilities.physical_constants as physical_constants
+from yt.utilities.cookbook import Intent
 
 from yt.data_objects.api import \
     BinnedProfile1D, BinnedProfile2D, BinnedProfile3D, \


--- a/yt/utilities/_amr_utils/ContourFinding.pyx	Thu Jun 16 14:28:01 2011 -0700
+++ b/yt/utilities/_amr_utils/ContourFinding.pyx	Thu Jun 16 14:29:11 2011 -0700
@@ -26,6 +26,7 @@
 import numpy as np
 cimport numpy as np
 cimport cython
+from stdlib cimport malloc, free
 
 cdef extern from "math.h":
     double fabs(double x)
@@ -38,6 +39,50 @@
     if i0 < i1: return i0
     return i1
 
+cdef extern from "union_find.h":
+    ctypedef struct forest_node:
+        void *value
+        forest_node *parent
+        int rank
+
+    forest_node* MakeSet(void* value)
+    void Union(forest_node* node1, forest_node* node2)
+    forest_node* Find(forest_node* node)
+
+ctypedef struct CellIdentifier:
+    np.int64_t hindex
+    int level
+
+cdef class GridContourContainer:
+    cdef np.int64_t dims[3]
+    cdef np.int64_t start_indices[3]
+    cdef forest_node **join_tree
+    cdef np.int64_t ncells
+
+    def __init__(self, dimensions, indices):
+        cdef int i
+        self.ncells = 1
+        for i in range(3):
+            self.ncells *= dimensions[i]
+            self.dims[i] = dimensions[i]
+            self.start_indices[i] = indices[i]
+        self.join_tree = <forest_node **> malloc(sizeof(forest_node) 
+                                                 * self.ncells)
+        for i in range(self.ncells): self.join_tree[i] = NULL
+
+    def __dealloc__(self):
+        cdef int i
+        for i in range(self.ncells):
+            if self.join_tree[i] != NULL: free(self.join_tree[i])
+        free(self.join_tree)
+
+    #def construct_join_tree(self,
+    #            np.ndarray[np.float64_t, ndim=3] field,
+    #            np.ndarray[np.bool_t, ndim=3] mask):
+    #    # This only looks at the components of the grid that are actually
+    #    # inside this grid -- boundary conditions are handled later.
+    #    pass
+
 #@cython.boundscheck(False)
 #@cython.wraparound(False)
 def construct_boundary_relationships(


--- a/yt/utilities/_amr_utils/VolumeIntegrator.pyx	Thu Jun 16 14:28:01 2011 -0700
+++ b/yt/utilities/_amr_utils/VolumeIntegrator.pyx	Thu Jun 16 14:29:11 2011 -0700
@@ -58,6 +58,7 @@
 cdef extern from "math.h":
     double exp(double x)
     float expf(float x)
+    long double expl(long double x)
     double floor(double x)
     double ceil(double x)
     double fmod(double x, double y)
@@ -742,7 +743,7 @@
     cdef void add_stars(self, kdtree_utils.kdres *ballq,
             np.float64_t dt, np.float64_t pos[3], np.float64_t *rgba):
         cdef int i, n, ns
-        cdef double px, py, pz
+        cdef np.float64_t px, py, pz
         cdef np.float64_t gexp, gaussian
         cdef np.float64_t* colors = NULL
         ns = kdtree_utils.kd_res_size(ballq)
@@ -754,7 +755,7 @@
             gexp = (px - pos[0])*(px - pos[0]) \
                  + (py - pos[1])*(py - pos[1]) \
                  + (pz - pos[2])*(pz - pos[2])
-            gaussian = self.star_coeff * exp(-gexp/self.star_sigma_num)
+            gaussian = self.star_coeff * expl(-gexp/self.star_sigma_num)
             for i in range(3): rgba[i] += gaussian*dt*colors[i]
         kdtree_utils.kd_res_rewind(ballq)
         


--- a/yt/utilities/_amr_utils/kdtree.c	Thu Jun 16 14:28:01 2011 -0700
+++ b/yt/utilities/_amr_utils/kdtree.c	Thu Jun 16 14:29:11 2011 -0700
@@ -50,11 +50,11 @@
 
 struct kdhyperrect {
 	int dim;
-	double *min, *max;              /* minimum/maximum coords */
+	npy_float64 *min, *max;              /* minimum/maximum coords */
 };
 
 struct kdnode {
-	double *pos;
+	npy_float64 *pos;
 	int dir;
 	void *data;
 
@@ -63,7 +63,7 @@
 
 struct res_node {
 	struct kdnode *item;
-	double dist_sq;
+	npy_float64 dist_sq;
 	struct res_node *next;
 };
 
@@ -84,15 +84,15 @@
 
 
 static void clear_rec(struct kdnode *node, void (*destr)(void*));
-static int insert_rec(struct kdnode **node, const double *pos, void *data, int dir, int dim);
-static int rlist_insert(struct res_node *list, struct kdnode *item, double dist_sq);
+static int insert_rec(struct kdnode **node, const npy_float64 *pos, void *data, int dir, int dim);
+static int rlist_insert(struct res_node *list, struct kdnode *item, npy_float64 dist_sq);
 static void clear_results(struct kdres *set);
 
-static struct kdhyperrect* hyperrect_create(int dim, const double *min, const double *max);
+static struct kdhyperrect* hyperrect_create(int dim, const npy_float64 *min, const npy_float64 *max);
 static void hyperrect_free(struct kdhyperrect *rect);
 static struct kdhyperrect* hyperrect_duplicate(const struct kdhyperrect *rect);
-static void hyperrect_extend(struct kdhyperrect *rect, const double *pos);
-static double hyperrect_dist_sq(struct kdhyperrect *rect, const double *pos);
+static void hyperrect_extend(struct kdhyperrect *rect, const npy_float64 *pos);
+static npy_float64 hyperrect_dist_sq(struct kdhyperrect *rect, const npy_float64 *pos);
 
 #ifdef USE_LIST_NODE_ALLOCATOR
 static struct res_node *alloc_resnode(void);
@@ -159,7 +159,7 @@
 }
 
 
-static int insert_rec(struct kdnode **nptr, const double *pos, void *data, int dir, int dim)
+static int insert_rec(struct kdnode **nptr, const npy_float64 *pos, void *data, int dir, int dim)
 {
 	int new_dir;
 	struct kdnode *node;
@@ -188,7 +188,7 @@
 	return insert_rec(&(*nptr)->right, pos, data, new_dir, dim);
 }
 
-int kd_insert(struct kdtree *tree, const double *pos, void *data)
+int kd_insert(struct kdtree *tree, const npy_float64 *pos, void *data)
 {
 	if (insert_rec(&tree->root, pos, data, 0, tree->dim)) {
 		return -1;
@@ -205,8 +205,8 @@
 
 int kd_insertf(struct kdtree *tree, const float *pos, void *data)
 {
-	static double sbuf[16];
-	double *bptr, *buf = 0;
+	static npy_float64 sbuf[16];
+	npy_float64 *bptr, *buf = 0;
 	int res, dim = tree->dim;
 
 	if(dim > 16) {
@@ -236,9 +236,9 @@
 	return res;
 }
 
-int kd_insert3(struct kdtree *tree, double x, double y, double z, void *data)
+int kd_insert3(struct kdtree *tree, npy_float64 x, npy_float64 y, npy_float64 z, void *data)
 {
-	double buf[3];
+	npy_float64 buf[3];
 	buf[0] = x;
 	buf[1] = y;
 	buf[2] = z;
@@ -247,16 +247,16 @@
 
 int kd_insert3f(struct kdtree *tree, float x, float y, float z, void *data)
 {
-	double buf[3];
+	npy_float64 buf[3];
 	buf[0] = x;
 	buf[1] = y;
 	buf[2] = z;
 	return kd_insert(tree, buf, data);
 }
 
-static int find_nearest(struct kdnode *node, const double *pos, double range, struct res_node *list, int ordered, int dim)
+static int find_nearest(struct kdnode *node, const npy_float64 *pos, npy_float64 range, struct res_node *list, int ordered, int dim)
 {
-	double dist_sq, dx;
+	npy_float64 dist_sq, dx;
 	int i, ret, added_res = 0;
 
 	if(!node) return 0;
@@ -287,13 +287,13 @@
 	return added_res;
 }
 
-static void kd_nearest_i(struct kdnode *node, const double *pos, struct kdnode **result, double *result_dist_sq, struct kdhyperrect* rect)
+static void kd_nearest_i(struct kdnode *node, const npy_float64 *pos, struct kdnode **result, npy_float64 *result_dist_sq, struct kdhyperrect* rect)
 {
 	int dir = node->dir;
 	int i, side;
-	double dummy, dist_sq;
+	npy_float64 dummy, dist_sq;
 	struct kdnode *nearer_subtree, *farther_subtree;
-	double *nearer_hyperrect_coord, *farther_hyperrect_coord;
+	npy_float64 *nearer_hyperrect_coord, *farther_hyperrect_coord;
 
 	/* Decide whether to go left or right in the tree */
 	dummy = pos[dir] - node->pos[dir];
@@ -348,12 +348,12 @@
 	}
 }
 
-struct kdres *kd_nearest(struct kdtree *kd, const double *pos)
+struct kdres *kd_nearest(struct kdtree *kd, const npy_float64 *pos)
 {
 	struct kdhyperrect *rect;
 	struct kdnode *result;
 	struct kdres *rset;
-	double dist_sq;
+	npy_float64 dist_sq;
 	int i;
 
 	if (!kd) return 0;
@@ -405,8 +405,8 @@
 
 struct kdres *kd_nearestf(struct kdtree *tree, const float *pos)
 {
-	static double sbuf[16];
-	double *bptr, *buf = 0;
+	static npy_float64 sbuf[16];
+	npy_float64 *bptr, *buf = 0;
 	int dim = tree->dim;
 	struct kdres *res;
 
@@ -437,9 +437,9 @@
 	return res;
 }
 
-struct kdres *kd_nearest3(struct kdtree *tree, double x, double y, double z)
+struct kdres *kd_nearest3(struct kdtree *tree, npy_float64 x, npy_float64 y, npy_float64 z)
 {
-	double pos[3];
+	npy_float64 pos[3];
 	pos[0] = x;
 	pos[1] = y;
 	pos[2] = z;
@@ -448,14 +448,14 @@
 
 struct kdres *kd_nearest3f(struct kdtree *tree, float x, float y, float z)
 {
-	double pos[3];
+	npy_float64 pos[3];
 	pos[0] = x;
 	pos[1] = y;
 	pos[2] = z;
 	return kd_nearest(tree, pos);
 }
 
-struct kdres *kd_nearest_range(struct kdtree *kd, const double *pos, double range)
+struct kdres *kd_nearest_range(struct kdtree *kd, const npy_float64 *pos, npy_float64 range)
 {
 	int ret;
 	struct kdres *rset;
@@ -481,8 +481,8 @@
 
 struct kdres *kd_nearest_rangef(struct kdtree *kd, const float *pos, float range)
 {
-	static double sbuf[16];
-	double *bptr, *buf = 0;
+	static npy_float64 sbuf[16];
+	npy_float64 *bptr, *buf = 0;
 	int dim = kd->dim;
 	struct kdres *res;
 
@@ -513,9 +513,9 @@
 	return res;
 }
 
-struct kdres *kd_nearest_range3(struct kdtree *tree, double x, double y, double z, double range)
+struct kdres *kd_nearest_range3(struct kdtree *tree, npy_float64 x, npy_float64 y, npy_float64 z, npy_float64 range)
 {
-	double buf[3];
+	npy_float64 buf[3];
 	buf[0] = x;
 	buf[1] = y;
 	buf[2] = z;
@@ -524,7 +524,7 @@
 
 struct kdres *kd_nearest_range3f(struct kdtree *tree, float x, float y, float z, float range)
 {
-	double buf[3];
+	npy_float64 buf[3];
 	buf[0] = x;
 	buf[1] = y;
 	buf[2] = z;
@@ -559,7 +559,7 @@
 	return rset->riter != 0;
 }
 
-void *kd_res_item(struct kdres *rset, double *pos)
+void *kd_res_item(struct kdres *rset, npy_float64 *pos)
 {
 	if(rset->riter) {
 		if(pos) {
@@ -584,7 +584,7 @@
 	return 0;
 }
 
-void *kd_res_item3(struct kdres *rset, double *x, double *y, double *z)
+void *kd_res_item3(struct kdres *rset, npy_float64 *x, npy_float64 *y, npy_float64 *z)
 {
 	if(rset->riter) {
 		if(*x) *x = rset->riter->item->pos[0];
@@ -610,9 +610,9 @@
 }
 
 /* ---- hyperrectangle helpers ---- */
-static struct kdhyperrect* hyperrect_create(int dim, const double *min, const double *max)
+static struct kdhyperrect* hyperrect_create(int dim, const npy_float64 *min, const npy_float64 *max)
 {
-	size_t size = dim * sizeof(double);
+	size_t size = dim * sizeof(npy_float64);
 	struct kdhyperrect* rect = 0;
 
 	if (!(rect = malloc(sizeof(struct kdhyperrect)))) {
@@ -647,7 +647,7 @@
 	return hyperrect_create(rect->dim, rect->min, rect->max);
 }
 
-static void hyperrect_extend(struct kdhyperrect *rect, const double *pos)
+static void hyperrect_extend(struct kdhyperrect *rect, const npy_float64 *pos)
 {
 	int i;
 
@@ -661,10 +661,10 @@
 	}
 }
 
-static double hyperrect_dist_sq(struct kdhyperrect *rect, const double *pos)
+static npy_float64 hyperrect_dist_sq(struct kdhyperrect *rect, const npy_float64 *pos)
 {
 	int i;
-	double result = 0;
+	npy_float64 result = 0;
 
 	for (i=0; i < rect->dim; i++) {
 		if (pos[i] < rect->min[i]) {
@@ -727,7 +727,7 @@
 
 
 /* inserts the item. if dist_sq is >= 0, then do an ordered insert */
-static int rlist_insert(struct res_node *list, struct kdnode *item, double dist_sq)
+static int rlist_insert(struct res_node *list, struct kdnode *item, npy_float64 dist_sq)
 {
 	struct res_node *rnode;
 


--- a/yt/utilities/_amr_utils/kdtree.h	Thu Jun 16 14:28:01 2011 -0700
+++ b/yt/utilities/_amr_utils/kdtree.h	Thu Jun 16 14:29:11 2011 -0700
@@ -26,6 +26,8 @@
 */
 #ifndef _KDTREE_H_
 #define _KDTREE_H_
+#include "Python.h"
+#include "numpy/ndarrayobject.h"
 
 #ifdef __cplusplus
 extern "C" {
@@ -51,18 +53,18 @@
 void kd_data_destructor(struct kdtree *tree, void (*destr)(void*));
 
 /* insert a node, specifying its position, and optional data */
-int kd_insert(struct kdtree *tree, const double *pos, void *data);
+int kd_insert(struct kdtree *tree, const npy_float64 *pos, void *data);
 int kd_insertf(struct kdtree *tree, const float *pos, void *data);
-int kd_insert3(struct kdtree *tree, double x, double y, double z, void *data);
+int kd_insert3(struct kdtree *tree, npy_float64 x, npy_float64 y, npy_float64 z, void *data);
 int kd_insert3f(struct kdtree *tree, float x, float y, float z, void *data);
 
 /* Find one of the nearest nodes from the specified point.
  *
  * This function returns a pointer to a result set with at most one element.
  */
-struct kdres *kd_nearest(struct kdtree *tree, const double *pos);
+struct kdres *kd_nearest(struct kdtree *tree, const npy_float64 *pos);
 struct kdres *kd_nearestf(struct kdtree *tree, const float *pos);
-struct kdres *kd_nearest3(struct kdtree *tree, double x, double y, double z);
+struct kdres *kd_nearest3(struct kdtree *tree, npy_float64 x, npy_float64 y, npy_float64 z);
 struct kdres *kd_nearest3f(struct kdtree *tree, float x, float y, float z);
 
 /* Find any nearest nodes from the specified point within a range.
@@ -73,9 +75,9 @@
  * a valid result set is always returned which may contain 0 or more elements.
  * The result set must be deallocated with kd_res_free, after use.
  */
-struct kdres *kd_nearest_range(struct kdtree *tree, const double *pos, double range);
+struct kdres *kd_nearest_range(struct kdtree *tree, const npy_float64 *pos, npy_float64 range);
 struct kdres *kd_nearest_rangef(struct kdtree *tree, const float *pos, float range);
-struct kdres *kd_nearest_range3(struct kdtree *tree, double x, double y, double z, double range);
+struct kdres *kd_nearest_range3(struct kdtree *tree, npy_float64 x, npy_float64 y, npy_float64 z, npy_float64 range);
 struct kdres *kd_nearest_range3f(struct kdtree *tree, float x, float y, float z, float range);
 
 /* frees a result set returned by kd_nearest_range() */
@@ -98,9 +100,9 @@
 /* returns the data pointer (can be null) of the current result set item
  * and optionally sets its position to the pointers(s) if not null.
  */
-void *kd_res_item(struct kdres *set, double *pos);
+void *kd_res_item(struct kdres *set, npy_float64 *pos);
 void *kd_res_itemf(struct kdres *set, float *pos);
-void *kd_res_item3(struct kdres *set, double *x, double *y, double *z);
+void *kd_res_item3(struct kdres *set, npy_float64 *x, npy_float64 *y, npy_float64 *z);
 void *kd_res_item3f(struct kdres *set, float *x, float *y, float *z);
 
 /* equivalent to kd_res_item(set, 0) */


--- a/yt/utilities/_amr_utils/kdtree_utils.pxd	Thu Jun 16 14:28:01 2011 -0700
+++ b/yt/utilities/_amr_utils/kdtree_utils.pxd	Thu Jun 16 14:29:11 2011 -0700
@@ -34,18 +34,18 @@
     kdtree *kd_create(int k)
     void kd_free(kdtree *tree)
     
-    int kd_insert3(kdtree *tree, double x, double y, double z, void *data)
-    kdres *kd_nearest3(kdtree *tree, double x, double y, double z)
+    int kd_insert3(kdtree *tree, np.float64_t x, np.float64_t y, np.float64_t z, void *data)
+    kdres *kd_nearest3(kdtree *tree, np.float64_t x, np.float64_t y, np.float64_t z)
 
-    kdres *kd_nearest_range3(kdtree *tree, double x, double y, double z,
-                             double range)
+    kdres *kd_nearest_range3(kdtree *tree, np.float64_t x, np.float64_t y, np.float64_t z,
+                             np.float64_t range)
 
     void kd_res_free(kdres *set)
     int kd_res_size(kdres *set)
     int kd_res_next(kdres *set)
     void kd_res_rewind(kdres *set)
 
-    void kd_res_item3(kdres *set, double *x, double *y, double *z)
+    void kd_res_item3(kdres *set, np.float64_t *x, np.float64_t *y, np.float64_t *z)
     void *kd_res_item_data(kdres *set)
 
     void kd_data_destructor(kdtree *tree, void (*destr)(void*))


--- a/yt/utilities/_amr_utils/misc_utilities.pyx	Thu Jun 16 14:28:01 2011 -0700
+++ b/yt/utilities/_amr_utils/misc_utilities.pyx	Thu Jun 16 14:29:11 2011 -0700
@@ -67,8 +67,8 @@
             continue
         inside = 1
         for n in range(3):
-            if left_edge[n] > right_edges[i,n] or \
-               right_edge[n] < left_edges[i,n]:
+            if left_edge[n] >= right_edges[i,n] or \
+               right_edge[n] <= left_edges[i,n]:
                 inside = 0
                 break
         if inside == 1: mask[i] = 1


--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/yt/utilities/_amr_utils/union_find.c	Thu Jun 16 14:29:11 2011 -0700
@@ -0,0 +1,66 @@
+/* Copyright (c) 2011 the authors listed at the following URL, and/or
+the authors of referenced articles or incorporated external code:
+http://en.literateprograms.org/Disjoint_set_data_structure_(C)?action=history&offset=20080516180553
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+Retrieved from: http://en.literateprograms.org/Disjoint_set_data_structure_(C)?oldid=13366
+*/
+
+#include <stdlib.h>
+
+#include "union_find.h"
+
+forest_node* MakeSet(void* value) {
+    forest_node* node = malloc(sizeof(forest_node));
+    node->value = value;
+    node->parent = NULL;
+    node->rank = 0;
+    return node;
+}
+
+void Union(forest_node* node1, forest_node* node2) {
+    if (node1->rank > node2->rank) {
+        node2->parent = node1;
+    } else if (node2->rank > node1->rank) {
+        node1->parent = node2;
+    } else { /* they are equal */
+        node2->parent = node1;
+        node1->rank++;
+    }
+}
+
+forest_node* Find(forest_node* node) {
+    forest_node* temp;
+    /* Find the root */
+    forest_node* root = node;
+    while (root->parent != NULL) {
+        root = root->parent;
+    }
+    /* Update the parent pointers */
+    while (node->parent != NULL) {
+        temp = node->parent;
+        node->parent = root;
+        node = temp;
+    }
+    return root;
+}
+
+


--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/yt/utilities/_amr_utils/union_find.h	Thu Jun 16 14:29:11 2011 -0700
@@ -0,0 +1,41 @@
+/* Copyright (c) 2011 the authors listed at the following URL, and/or
+the authors of referenced articles or incorporated external code:
+http://en.literateprograms.org/Disjoint_set_data_structure_(C)?action=history&offset=20080516180553
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+Retrieved from: http://en.literateprograms.org/Disjoint_set_data_structure_(C)?oldid=13366
+*/
+
+#ifndef _UNION_FIND_H_
+#define _UNION_FIND_H_
+
+typedef struct forest_node_t {
+    void* value;
+    struct forest_node_t* parent;
+    int rank;
+} forest_node;
+
+forest_node* MakeSet(void* value);
+void Union(forest_node* node1, forest_node* node2);
+forest_node* Find(forest_node* node);
+
+#endif /* #ifndef _UNION_FIND_H_ */
+


--- a/yt/utilities/command_line.py	Thu Jun 16 14:28:01 2011 -0700
+++ b/yt/utilities/command_line.py	Thu Jun 16 14:29:11 2011 -0700
@@ -556,7 +556,8 @@
         """
         pf = _fix_pf(arg)
         pf.h.print_stats()
-        v, c = pf.h.find_max("Density")
+        if "Density" in pf.h.field_list:
+            v, c = pf.h.find_max("Density")
         print "Maximum density: %0.5e at %s" % (v, c)
         if opts.output is not None:
             t = pf.current_time * pf['years']
@@ -1246,6 +1247,24 @@
         while 1:
             time.sleep(1)
 
+    def do_intents(self, subcmd, opts, *intents):
+        """
+        ${cmd_name}: What are your ... intentions?
+
+        ${cmd_usage}
+        ${cmd_option_list}
+        """
+        from yt.utilities.cookbook import Intent
+        if len(intents) == 0:
+            Intent.list_intents()
+        else:
+            intent = Intent.select_intent(intents[0])
+            if intent is None:
+                print "Could not find %s" % intents[0]
+                return 1
+            intent_inst = intent(intents[1:])
+            intent_inst.run()
+
 def run_main():
     for co in ["--parallel", "--paste"]:
         if co in sys.argv: del sys.argv[sys.argv.index(co)]


--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/yt/utilities/cookbook.py	Thu Jun 16 14:29:11 2011 -0700
@@ -0,0 +1,109 @@
+"""
+A way to find an utilize recipes
+
+Author: Matthew Turk <matthewturk at gmail.com>
+Affiliation: KIPAC/SLAC/Stanford
+Homepage: http://yt.enzotools.org/
+License:
+  Copyright (C) 2011 Matthew Turk.  All Rights Reserved.
+
+  This file is part of yt.
+
+  yt is free software; you can redistribute it and/or modify
+  it under the terms of the GNU General Public License as published by
+  the Free Software Foundation; either version 3 of the License, or
+  (at your option) any later version.
+
+  This program is distributed in the hope that it will be useful,
+  but WITHOUT ANY WARRANTY; without even the implied warranty of
+  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+  GNU General Public License for more details.
+
+  You should have received a copy of the GNU General Public License
+  along with this program.  If not, see <http://www.gnu.org/licenses/>.
+"""
+
+# See also:
+#  http://en.wikipedia.org/wiki/Me_(mythology)
+
+import os
+import argparse
+import abc
+import glob
+import imp
+import types
+import sys
+import pprint
+
+def _load_intent(intent_path):
+    mname = os.path.basename(intent_path[:-3])
+    f, filename, desc = imp.find_module(mname,
+            [os.path.dirname(intent_path)])
+    intent = imp.load_module(mname, f, filename, desc)
+    for i in sorted(dir(intent)):
+        obj = getattr(intent, i)
+        if issubclass(obj, Intent) and \
+           isinstance(obj.desc, types.StringTypes):
+             return obj
+    return None
+
+def _find_cookbook_dir():
+    yt_dest = os.environ.get("YT_DEST", None)
+    if yt_dest is None:
+        print "YT_DEST is not set!  Set it and try again."
+        return False
+    cookbook_dir = os.path.join(yt_dest,
+        "src/yt-supplemental/yt-cookbook/intents")
+    if not os.path.isdir(cookbook_dir):
+        print "Cookbook does not contain 'intents' directory."
+        print "Update with 'yt instinfo -u' and try again."
+        print "(%s)" % cookbook_dir
+        return False
+    return cookbook_dir
+
+class Intent(object):
+    __metaclass__ = abc.ABCMeta
+
+    def __init__(self, args):
+        self.args = args
+        if "help" in self.args:
+            print
+            print "The arguments to supply, in order:"
+            print
+            print self.help
+            print
+            sys.exit()
+
+    @abc.abstractmethod
+    def run(self):
+        pass
+
+    @abc.abstractproperty
+    def desc(self): pass
+
+    @abc.abstractproperty
+    def help(self): pass
+
+    @classmethod
+    def list_intents(self):
+        intents = []
+        cookbook_dir = _find_cookbook_dir()
+        if cookbook_dir is False: return 1
+        for fn in sorted(glob.glob(os.path.join(cookbook_dir, "*"))):
+            # We skim them, looking for the 'Intent' subclass
+            if any(("(Intent):" in line for line in open(fn))):
+                intents.append((os.path.basename(fn)[:-3],
+                                _load_intent(fn)))
+        print
+        print "Found these Intents:"
+        print "\n".join(("% 15s: %s" % (a, b.desc) for a, b in intents))
+        print
+
+    @classmethod
+    def select_intent(self, intent_name):
+        cookbook_dir = _find_cookbook_dir()
+        intent = None
+        for fn in glob.glob(os.path.join(cookbook_dir, "*")):
+            if os.path.basename(fn)[:-3] == intent_name:
+                intent = _load_intent(fn)
+        return intent


--- a/yt/utilities/data_point_utilities.c	Thu Jun 16 14:28:01 2011 -0700
+++ b/yt/utilities/data_point_utilities.c	Thu Jun 16 14:29:11 2011 -0700
@@ -880,7 +880,7 @@
     npy_int64 gxs, gys, gzs, gxe, gye, gze;
     npy_int64 cxs, cys, czs, cxe, cye, cze;
     npy_int64 ixs, iys, izs, ixe, iye, ize;
-    npy_int64 gxi, gyi, gzi, cxi, cyi, czi;
+    int gxi, gyi, gzi, cxi, cyi, czi;
     npy_int64 cdx, cdy, cdz;
     npy_int64 dw[3];
     int i;
@@ -1014,17 +1014,17 @@
         ci = (cxi % dw[0]);
         ci = (ci < 0) ? ci + dw[0] : ci;
         if ( ci < gxs*refratio || ci >= gxe*refratio) continue;
-        gxi = floor(ci / refratio) - gxs;
+        gxi = ((int) (ci / refratio)) - gxs;
         for(cyi=cys;cyi<=cye;cyi++) {
             cj = cyi % dw[1];
             cj = (cj < 0) ? cj + dw[1] : cj;
             if ( cj < gys*refratio || cj >= gye*refratio) continue;
-            gyi = floor(cj / refratio) - gys;
+            gyi = ((int) (cj / refratio)) - gys;
             for(czi=czs;czi<=cze;czi++) {
                 ck = czi % dw[2];
                 ck = (ck < 0) ? ck + dw[2] : ck;
                 if ( ck < gzs*refratio || ck >= gze*refratio) continue;
-                gzi = floor(ck / refratio) - gzs;
+                gzi = ((int) (ck / refratio)) - gzs;
                     if ((ll) || (*(npy_int32*)PyArray_GETPTR3(mask, gxi,gyi,gzi) > 0)) 
                 {
                 for(n=0;n<n_fields;n++){
@@ -1214,43 +1214,75 @@
     cye = (cys + cdy - 1);
     cze = (czs + cdz - 1);
 
-    /* It turns out that C89 doesn't define a mechanism for choosing the sign
-       of the remainder.
-    */
     int x_loc, y_loc; // For access into the buffer
-    for(cxi=cxs;cxi<=cxe;cxi++) {
+
+    /* We check here if the domain is important or not.
+       If it's not, then, well, we get to use the fast version. */
+    if (dw[0] == dw[1] == dw[2] == 0) {
+      for(gxi=gxs,cxi=gxs*refratio;gxi<gxe;gxi++,cxi+=refratio) {
+        for(gyi=gys,cyi=gys*refratio;gyi<gye;gyi++,cyi+=refratio) {
+          for(gzi=gzs,czi=gzs*refratio;gzi<gze;gzi++,czi+=refratio) {
+            if ((refratio!=1) &&
+                (*(npy_int32*)PyArray_GETPTR3(mask, gxi,gyi,gzi)==0)) continue;
+            switch (axis) {
+              case 0: x_loc = cyi-cys; y_loc = czi-czs; break;
+              case 1: x_loc = cxi-cxs; y_loc = czi-czs; break;
+              case 2: x_loc = cxi-cys; y_loc = cyi-cys; break;
+            }
+            //fprintf(stderr, "%d %d %d %d %d\n", x_loc, y_loc, gxi, gyi, gzi);
+            for(ri=0;ri<refratio;ri++){
+              for(rj=0;rj<refratio;rj++){
+                for(n=0;n<n_fields;n++){
+                  for(n=0;n<n_fields;n++){
+                    *(npy_float64*) PyArray_GETPTR2(c_data[n], x_loc+ri, y_loc+rj)
+                      +=  *(npy_float64*) PyArray_GETPTR3(g_data[n],
+                          gxi-gxs, gyi-gys, gzi-gzs) * dls[n];
+                  }
+                }
+              }
+            }
+            total+=1;
+          }
+        }
+      }
+    } else {
+      /* Gotta go the slow route. */
+      for(cxi=gxs*refratio;cxi<=cxe;cxi++) {
+        /* It turns out that C89 doesn't define a mechanism for choosing the sign
+           of the remainder.
+         */
         ci = (cxi % dw[0]);
         ci = (ci < 0) ? ci + dw[0] : ci;
-        if ( ci < gxs*refratio || ci >= gxe*refratio) continue;
+        if ( ci >= gxe*refratio) break;
         gxi = floor(ci / refratio) - gxs;
-        for(cyi=cys;cyi<=cye;cyi++) {
-            cj = cyi % dw[1];
-            cj = (cj < 0) ? cj + dw[1] : cj;
-            if ( cj < gys*refratio || cj >= gye*refratio) continue;
-            gyi = floor(cj / refratio) - gys;
-            for(czi=czs;czi<=cze;czi++) {
-                ck = czi % dw[2];
-                ck = (ck < 0) ? ck + dw[2] : ck;
-                if ( ck < gzs*refratio || ck >= gze*refratio) continue;
-                gzi = floor(ck / refratio) - gzs;
-                    if (refratio == 1 || *(npy_int32*)PyArray_GETPTR3(mask, gxi,gyi,gzi) > 0)
-                {
-                switch (axis) {
-                  case 0: x_loc = cyi-cys; y_loc = czi-czs; break;
-                  case 1: x_loc = cxi-cxs; y_loc = czi-czs; break;
-                  case 2: x_loc = cxi-cys; y_loc = cyi-cys; break;
-                }
-                for(n=0;n<n_fields;n++){
-                    *(npy_float64*) PyArray_GETPTR2(c_data[n], x_loc, y_loc)
-                    +=  *(npy_float64*) PyArray_GETPTR3(g_data[n], gxi, gyi, gzi) 
-                        * dls[n] / refratio;
-                }
-                total += 1;
-                }
+        for(cyi=gys*refratio;cyi<=cye;cyi++) {
+          cj = cyi % dw[1];
+          cj = (cj < 0) ? cj + dw[1] : cj;
+          if ( cj >= gye*refratio) break;
+          gyi = floor(cj / refratio) - gys;
+          for(czi=gzs*refratio;czi<=cze;czi++) {
+            ck = czi % dw[2];
+            ck = (ck < 0) ? ck + dw[2] : ck;
+            if ( ck >= gze*refratio) break;
+            gzi = floor(ck / refratio) - gzs;
+            if (refratio == 1 || *(npy_int32*)PyArray_GETPTR3(mask, gxi,gyi,gzi) > 0)
+            {
+              switch (axis) {
+                case 0: x_loc = cyi-cys; y_loc = czi-czs; break;
+                case 1: x_loc = cxi-cxs; y_loc = czi-czs; break;
+                case 2: x_loc = cxi-cys; y_loc = cyi-cys; break;
+              }
+              for(n=0;n<n_fields;n++){
+                *(npy_float64*) PyArray_GETPTR2(c_data[n], x_loc, y_loc)
+                  +=  *(npy_float64*) PyArray_GETPTR3(g_data[n], gxi, gyi, gzi) 
+                  * dls[n] / refratio;
+              }
+              total += 1;
             }
+          }
         }
+      }
     }
-
     Py_DECREF(g_start);
     Py_DECREF(c_start);
     Py_DECREF(g_dims);


--- a/yt/utilities/setup.py	Thu Jun 16 14:28:01 2011 -0700
+++ b/yt/utilities/setup.py	Thu Jun 16 14:29:11 2011 -0700
@@ -161,7 +161,8 @@
     config.add_extension("amr_utils", 
         ["yt/utilities/amr_utils.pyx",
          "yt/utilities/_amr_utils/FixedInterpolator.c",
-         "yt/utilities/_amr_utils/kdtree.c"] +
+         "yt/utilities/_amr_utils/kdtree.c",
+         "yt/utilities/_amr_utils/union_find.c"] +
          glob.glob("yt/utilities/_amr_utils/healpix_*.c"), 
         define_macros=[("PNG_SETJMP_NOT_SUPPORTED", True)],
         include_dirs=["yt/utilities/_amr_utils/", png_inc,
@@ -172,6 +173,10 @@
                 glob.glob("yt/utilities/_amr_utils/*.h") +
                 glob.glob("yt/utilities/_amr_utils/*.c"),
         )
+    #config.add_extension("voropp",
+    #    ["yt/utilities/voropp.pyx"],
+    #    language="c++",
+    #    include_dirs=["yt/utilities/voro++"])
     config.add_extension("libconfig_wrapper", 
         ["yt/utilities/libconfig_wrapper.pyx"] +
          glob.glob("yt/utilities/_libconfig/*.c"), 


--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/yt/utilities/voropp.pyx	Thu Jun 16 14:29:11 2011 -0700
@@ -0,0 +1,69 @@
+"""
+Wrapping code for voro++
+
+Author: Matthew Turk <matthewturk at gmail.com>
+Affiliation: Columbia University
+Homepage: http://yt.enzotools.org/
+License:
+  Copyright (C) 2011 Matthew Turk.  All Rights Reserved.
+
+  This file is part of yt.
+
+  yt is free software; you can redistribute it and/or modify
+  it under the terms of the GNU General Public License as published by
+  the Free Software Foundation; either version 3 of the License, or
+  (at your option) any later version.
+
+  This program is distributed in the hope that it will be useful,
+  but WITHOUT ANY WARRANTY; without even the implied warranty of
+  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+  GNU General Public License for more details.
+
+  You should have received a copy of the GNU General Public License
+  along with this program.  If not, see <http://www.gnu.org/licenses/>.
+"""
+
+from cython.operator cimport dereference as deref, preincrement as inc
+from libc.stdlib cimport malloc, free, abs, calloc, labs
+cimport libcpp
+
+import numpy as np
+cimport numpy as np
+cimport cython
+
+cdef extern from "voro++.cc":
+    cdef cppclass container:
+        container(double xmin, double xmax, double ymin, double ymax,
+                  double zmin, double zmax, int nx, int ny, int nz,
+                  libcpp.bool xper, libcpp.bool yper, libcpp.bool zper, int alloc)
+        void put(int n, double x, double y, double z)
+        void store_cell_volumes(double *vols)
+
+cdef class VoronoiVolume:
+    cdef container *my_con
+    cdef int npart
+    def __init__(self, xi, yi, zi):
+        self.my_con = new container(0.0, 1.0, 0.0, 1.0, 0.0, 1.0,
+                                    xi, yi, zi, False, False, False, 8)
+        self.npart = 0
+
+    def __dealloc__(self):
+        del self.my_con
+
+    @cython.boundscheck(False)
+    @cython.wraparound(False)
+    def add_array(self, np.ndarray[np.float64_t, ndim=1] xpos,
+                        np.ndarray[np.float64_t, ndim=1] ypos,
+                        np.ndarray[np.float64_t, ndim=1] zpos):
+        cdef int i
+        for i in range(xpos.shape[0]):
+            self.my_con.put(self.npart, xpos[i], ypos[i], zpos[i])
+            self.npart += 1
+
+    @cython.boundscheck(False)
+    @cython.wraparound(False)
+    def get_volumes(self):
+        cdef np.ndarray vol = np.zeros(self.npart, 'double')
+        cdef double *vdouble = <double *> vol.data
+        self.my_con.store_cell_volumes(vdouble)
+        return vol


--- a/yt/visualization/_MPL.c	Thu Jun 16 14:28:01 2011 -0700
+++ b/yt/visualization/_MPL.c	Thu Jun 16 14:29:11 2011 -0700
@@ -52,7 +52,9 @@
 static PyObject* Py_Pixelize(PyObject *obj, PyObject *args) {
 
   PyObject *xp, *yp, *dxp, *dyp, *dp;
+  PyArrayObject *x, *y, *dx, *dy, *d;
   xp = yp = dxp = dyp = dp = NULL;
+  x = y = dx = dy = d = NULL;
   unsigned int rows, cols;
   int antialias = 1;
   double x_min, x_max, y_min, y_max;
@@ -78,34 +80,34 @@
       PyErr_Format( _pixelizeError, "Cannot scale to zero size.");
 
   // Get numeric arrays
-  PyArrayObject *x = (PyArrayObject *) PyArray_FromAny(xp,
+  x = (PyArrayObject *) PyArray_FromAny(xp,
             PyArray_DescrFromType(NPY_FLOAT64), 1, 1, 0, NULL);
   if (x == NULL) {
       PyErr_Format( _pixelizeError, "x is of incorrect type (wanted 1D float)");
       goto _fail;
   }
 
-  PyArrayObject *y = (PyArrayObject *) PyArray_FromAny(yp,
+  y = (PyArrayObject *) PyArray_FromAny(yp,
             PyArray_DescrFromType(NPY_FLOAT64), 1, 1, 0, NULL);
   if ((y == NULL) || (PyArray_SIZE(y) != PyArray_SIZE(x))) {
       PyErr_Format( _pixelizeError, "y is of incorrect type (wanted 1D float)");
       goto _fail;
   }
 
-  PyArrayObject *d = (PyArrayObject *) PyArray_FromAny(dp,
+  d = (PyArrayObject *) PyArray_FromAny(dp,
             PyArray_DescrFromType(NPY_FLOAT64), 1, 1, 0, NULL);
   if ((d == NULL) || (PyArray_SIZE(d) != PyArray_SIZE(x))) {
       PyErr_Format( _pixelizeError, "data is of incorrect type (wanted 1D float)");
       goto _fail;
   }
 
-  PyArrayObject *dx = (PyArrayObject *) PyArray_FromAny(dxp,
+  dx = (PyArrayObject *) PyArray_FromAny(dxp,
             PyArray_DescrFromType(NPY_FLOAT64), 1, 1, 0, NULL);
   if ((dx == NULL) || (PyArray_SIZE(dx) != PyArray_SIZE(x))) {
       PyErr_Format( _pixelizeError, "dx is of incorrect type (wanted 1D float)");
       goto _fail;
   }
-  PyArrayObject *dy = (PyArrayObject *) PyArray_FromAny(dyp,
+  dy = (PyArrayObject *) PyArray_FromAny(dyp,
             PyArray_DescrFromType(NPY_FLOAT64), 1, 1, 0, NULL);
   if ((dy == NULL) || (PyArray_SIZE(dy) != PyArray_SIZE(x))) {
       PyErr_Format( _pixelizeError, "dy is of incorrect type (wanted 1D float)");


--- a/yt/visualization/eps_writer.py	Thu Jun 16 14:28:01 2011 -0700
+++ b/yt/visualization/eps_writer.py	Thu Jun 16 14:29:11 2011 -0700
@@ -279,7 +279,7 @@
         if isinstance(plot, VMPlot):
             if units == None:
                 # Determine the best units
-                astro_units = ['cm', 'rsun', 'au', 'pc', 'kpc', 'Mpc']
+                astro_units = ['cm', 'rsun', 'au', 'pc', 'kpc', 'mpc']
                 best_fit = 0
                 while plot.width*plot.pf[astro_units[best_fit]] > 1e3 and \
                           best_fit < len(astro_units):


--- a/yt/visualization/image_writer.py	Thu Jun 16 14:28:01 2011 -0700
+++ b/yt/visualization/image_writer.py	Thu Jun 16 14:29:11 2011 -0700
@@ -29,7 +29,7 @@
 import _colormap_data as cmd
 import yt.utilities.amr_utils as au
 
-def scale_image(image):
+def scale_image(image, mi=None, ma=None):
     r"""Scale an image ([NxNxM] where M = 1-4) to be uint8 and values scaled 
     from [0,255].
 
@@ -41,13 +41,17 @@
     --------
 
         >>> image = scale_image(image)
+
+        >>> image = scale_image(image, min=0, max=1000)
     """
     if isinstance(image, na.ndarray) and image.dtype == na.uint8:
         return image
     if isinstance(image, (types.TupleType, types.ListType)):
         image, mi, ma = image
-    else:
-        mi, ma = image.min(), image.max()
+    if mi is None:
+        mi = image.min()
+    if ma is None:
+        ma = image.max()
     image = (na.clip((image-mi)/(ma-mi) * 255, 0, 255)).astype('uint8')
     return image
 


--- a/yt/visualization/volume_rendering/camera.py	Thu Jun 16 14:28:01 2011 -0700
+++ b/yt/visualization/volume_rendering/camera.py	Thu Jun 16 14:29:11 2011 -0700
@@ -31,12 +31,14 @@
 from .transfer_functions import ProjectionTransferFunction
 
 from yt.utilities.amr_utils import TransferFunctionProxy, VectorPlane, \
-    arr_vec2pix_nest, arr_pix2vec_nest, AdaptiveRaySource
+    arr_vec2pix_nest, arr_pix2vec_nest, AdaptiveRaySource, \
+    arr_ang2pix_nest
 from yt.visualization.image_writer import write_bitmap
 from yt.data_objects.data_containers import data_object_registry
 from yt.utilities.parallel_tools.parallel_analysis_interface import \
     ParallelAnalysisInterface
 from yt.utilities.amr_kdtree.api import AMRKDTree
+from numpy import pi
 
 class Camera(ParallelAnalysisInterface):
     def __init__(self, center, normal_vector, width,
@@ -318,7 +320,7 @@
                                       self.unit_vectors[1])
         return vector_plane
 
-    def snapshot(self, fn = None):
+    def snapshot(self, fn = None, clip_ratio = None):
         r"""Ray-cast the camera.
 
         This method instructs the camera to take a snapshot -- i.e., call the ray
@@ -329,6 +331,9 @@
         fn : string, optional
             If supplied, the image will be saved out to this before being
             returned.  Scaling will be to the maximum value.
+        clip_ratio : float, optional
+            If supplied, the 'max_val' argument to write_bitmap will be handed
+            clip_ratio * image.std()
 
         Returns
         -------
@@ -352,7 +357,10 @@
         pbar.finish()
 
         if self._mpi_get_rank() is 0 and fn is not None:
-            write_bitmap(image, fn)
+            if clip_ratio is not None:
+                write_bitmap(image, fn, clip_ratio*image.std())
+            else:
+                write_bitmap(image, fn)
 
         return image
 
@@ -598,6 +606,24 @@
             pbar.update(total_cells)
         pbar.finish()
 
+        if self._mpi_get_rank() is 0 and fn is not None:
+            # This assumes Density; this is a relatively safe assumption.
+            import matplotlib.figure
+            import matplotlib.backends.backend_agg
+            phi, theta = na.mgrid[0.0:2*pi:800j, 0:pi:800j]
+            pixi = arr_ang2pix_nest(self.nside, theta.ravel(), phi.ravel())
+            image *= self.radius * self.pf['cm']
+            img = na.log10(image[:,0,0][pixi]).reshape((800,800))
+
+            fig = matplotlib.figure.Figure((10, 5))
+            ax = fig.add_subplot(1,1,1,projection='mollweide')
+            implot = ax.imshow(img, extent=(-pi,pi,-pi/2,pi/2), clip_on=False, aspect=0.5)
+            cb = fig.colorbar(implot, orientation='horizontal')
+            cb.set_label(r"$\mathrm{Column}\/\mathrm{Density}\/[\mathrm{g}/\mathrm{cm}^2]$")
+            ax.xaxis.set_ticks(())
+            ax.yaxis.set_ticks(())
+            canvas = matplotlib.backends.backend_agg.FigureCanvasAgg(fig)
+            canvas.print_figure(fn)
         return image
 
 


--- a/yt/visualization/volume_rendering/grid_partitioner.py	Thu Jun 16 14:28:01 2011 -0700
+++ b/yt/visualization/volume_rendering/grid_partitioner.py	Thu Jun 16 14:29:11 2011 -0700
@@ -403,7 +403,7 @@
 
 def import_partitioned_grids(fn, int_type=na.int64, float_type=na.float64):
     f = h5py.File(fn, "r")
-    n_groups = len(f.listnames())
+    n_groups = len(f)
     grid_list = []
     dims = f["/PGrids/Dims"][:].astype(int_type)
     left_edges = f["/PGrids/LeftEdges"][:].astype(float_type)


--- a/yt/visualization/volume_rendering/transfer_functions.py	Thu Jun 16 14:28:01 2011 -0700
+++ b/yt/visualization/volume_rendering/transfer_functions.py	Thu Jun 16 14:29:11 2011 -0700
@@ -547,7 +547,7 @@
             if mi is None: mi = col_bounds[0] + dist/(10.0*N)
             if ma is None: ma = col_bounds[1] - dist/(10.0*N)
         if w is None: w = 0.001 * (ma-mi)/N
-        if alpha is None: alpha = na.logspace(-2.0, 0.0, N)
+        if alpha is None: alpha = na.logspace(-3.0, 0.0, N)
         for v, a in zip(na.mgrid[mi:ma:N*1j], alpha):
             self.sample_colormap(v, w, a, colormap=colormap, col_bounds=col_bounds)
 


http://bitbucket.org/yt_analysis/yt/changeset/5a866887c58a/
changeset:   5a866887c58a
branch:      deliberate_fields
user:        caseywstark
date:        2011-06-20 21:53:34
summary:     readability stuff. pep 8 and 257
affected #:  1 file (-1 bytes)

--- a/yt/analysis_modules/halo_finding/halo_objects.py	Thu Jun 16 14:29:11 2011 -0700
+++ b/yt/analysis_modules/halo_finding/halo_objects.py	Mon Jun 20 12:53:34 2011 -0700
@@ -60,6 +60,7 @@
     """
     A data source that returns particle information about the members of a
     HOP-identified halo.
+
     """
     __metaclass__ = ParallelDummy # This will proxy up our methods
     _distributed = False
@@ -70,16 +71,18 @@
     extra_wrap = ["__getitem__"]
 
     def __init__(self, halo_list, id, indices = None, size=None, CoM=None,
-        max_dens_point=None, group_total_mass=None, max_radius=None, bulk_vel=None,
-        tasks=None, rms_vel=None):
+                 max_dens_point=None, group_total_mass=None, max_radius=None,
+                 bulk_vel=None, tasks=None, rms_vel=None):
         self._max_dens = halo_list._max_dens
         self.id = id
         self.data = halo_list._data_source
         self.pf = self.data.pf
+
         if indices is not None:
             self.indices = halo_list._base_indices[indices]
         else:
             self.indices = None
+
         # We assume that if indices = None, the instantiator has OTHER plans
         # for us -- i.e., setting it somehow else
         self.size = size
@@ -94,13 +97,10 @@
         self.overdensity = None
 
     def center_of_mass(self):
-        r"""Calculate and return the center of mass.
+        """Calculate and return the center of mass.
 
         The center of mass of the halo is directly calculated and returned.
-        
-        Examples
-        --------
-        >>> com = halos[0].center_of_mass()
+
         """
         c_vec = self.maximum_density_location() - na.array([0.5,0.5,0.5])
         pm = self["ParticleMassMsun"]
@@ -119,6 +119,7 @@
         Examples
         --------
         >>> max_dens = halos[0].maximum_density()
+
         """
         return self._max_dens[self.id][0]
 
@@ -131,6 +132,7 @@
         Examples
         --------
         >>> max_dens_loc = halos[0].maximum_density_location()
+
         """
         return na.array([
                 self._max_dens[self.id][1],
@@ -146,6 +148,7 @@
         Examples
         --------
         >>> halos[0].total_mass()
+
         """
         return self["ParticleMassMsun"].sum()
 
@@ -158,6 +161,7 @@
         Examples
         --------
         >>> bv = halos[0].bulk_velocity()
+
         """
         pm = self["ParticleMassMsun"]
         vx = (self["particle_velocity_x"] * pm).sum()
@@ -176,6 +180,7 @@
         Examples
         --------
         >>> rms_vel = halos[0].rms_velocity()
+
         """
         bv = self.bulk_velocity()
         pm = self["ParticleMassMsun"]
@@ -366,8 +371,7 @@
         h = self.pf.hubble_constant
         Om_matter = self.pf.omega_matter
         z = self.pf.current_redshift
-        period = self.pf.domain_right_edge - \
-            self.pf.domain_left_edge
+        period = self.pf.domain_right_edge - self.pf.domain_left_edge
         cm = self.pf["cm"]
         thissize = max(self.size, self.indices.size)
         rho_crit_now = 1.8788e-29 * h**2.0 * Om_matter # g cm^-3


http://bitbucket.org/yt_analysis/yt/changeset/9681ce79965f/
changeset:   9681ce79965f
branch:      deliberate_fields
user:        caseywstark
date:        2011-06-20 22:28:28
summary:     Cleaning up halo_objects module. Decided to pause after parallelHOPHalo (why not ParallelHOPHalo?). There's a lot to do here.
affected #:  1 file (-1 bytes)

--- a/yt/analysis_modules/halo_finding/halo_objects.py	Mon Jun 20 12:53:34 2011 -0700
+++ b/yt/analysis_modules/halo_finding/halo_objects.py	Mon Jun 20 13:28:28 2011 -0700
@@ -97,107 +97,83 @@
         self.overdensity = None
 
     def center_of_mass(self):
-        """Calculate and return the center of mass.
-
-        The center of mass of the halo is directly calculated and returned.
-
-        """
-        c_vec = self.maximum_density_location() - na.array([0.5,0.5,0.5])
+        """ Calculate and return this halo's center of mass. """
+        c_vec = self.maximum_density_location() - na.array([0.5, 0.5, 0.5])
         pm = self["ParticleMassMsun"]
         cx = (self["particle_position_x"] - c_vec[0])
         cy = (self["particle_position_y"] - c_vec[1])
         cz = (self["particle_position_z"] - c_vec[2])
-        com = na.array([v-na.floor(v) for v in [cx,cy,cz]])
+        com = na.array([v - na.floor(v) for v in [cx, cy, cz]])
         return (com*pm).sum(axis=1)/pm.sum() + c_vec
 
     def maximum_density(self):
-        r"""Return the HOP-identified maximum density. Not applicable to
-        FOF halos.
-
-        Return the HOP-identified maximum density. Not applicable to FOF halos.
-
-        Examples
-        --------
-        >>> max_dens = halos[0].maximum_density()
+        """
+        Return the HOP-identified maximum density. Not applicable to FOF halos,
+        yet.
 
         """
         return self._max_dens[self.id][0]
 
     def maximum_density_location(self):
-        r"""Return the location HOP identified as maximally dense. Not
-        applicable to FOF halos.
-
-        Return the location HOP identified as maximally dense.  
-
-        Examples
-        --------
-        >>> max_dens_loc = halos[0].maximum_density_location()
+        """
+        Return the location HOP identified as maximally dense. Not applicable
+        to FOF halos, yet.
 
         """
-        return na.array([
-                self._max_dens[self.id][1],
-                self._max_dens[self.id][2],
-                self._max_dens[self.id][3]])
+        return na.array([self._max_dens[self.id][1],
+                         self._max_dens[self.id][2],
+                         self._max_dens[self.id][3]])
 
     def total_mass(self):
-        r"""Returns the total mass in solar masses of the halo.
-        
+        """
         Returns the total mass in solar masses of just the particles in the
         halo.
 
-        Examples
-        --------
-        >>> halos[0].total_mass()
-
         """
         return self["ParticleMassMsun"].sum()
 
     def bulk_velocity(self):
-        r"""Returns the mass-weighted average velocity in cm/s.
+        """
+        Returns the mass-weighted average velocity in cm/s.
 
         This calculates and returns the mass-weighted average velocity of just
         the particles in the halo in cm/s.
-        
-        Examples
-        --------
-        >>> bv = halos[0].bulk_velocity()
 
         """
         pm = self["ParticleMassMsun"]
         vx = (self["particle_velocity_x"] * pm).sum()
         vy = (self["particle_velocity_y"] * pm).sum()
         vz = (self["particle_velocity_z"] * pm).sum()
-        return na.array([vx,vy,vz])/pm.sum()
+        return na.array([vx, vy, vz]) / pm.sum()
 
     def rms_velocity(self):
-        r"""Returns the mass-weighted RMS velocity for the halo
-        particles in cgs units.
+        """
+        Returns the mass-weighted RMS velocity for the halo particles in cgs
+        units.
 
         Calculate and return the mass-weighted RMS velocity for just the
-        particles in the halo.  The bulk velocity of the halo is subtracted
+        particles in the halo. The bulk velocity of the halo is subtracted
         before computation.
-        
-        Examples
-        --------
-        >>> rms_vel = halos[0].rms_velocity()
 
         """
         bv = self.bulk_velocity()
         pm = self["ParticleMassMsun"]
         sm = pm.sum()
+
         vx = (self["particle_velocity_x"] - bv[0]) * pm/sm
         vy = (self["particle_velocity_y"] - bv[1]) * pm/sm
         vz = (self["particle_velocity_z"] - bv[2]) * pm/sm
-        s = vx**2. + vy**2. + vz**2.
+
+        s = vx**2.0 + vy**2.0 + vz**2.0
         ms = na.mean(s)
         return na.sqrt(ms) * pm.size
 
     def maximum_radius(self, center_of_mass=True):
-        r"""Returns the maximum radius in the halo for all particles,
-        either from the point of maximum density or from the
-        center of mass.
+        r"""
+        Returns the maximum radius in the halo for all particles, either from
+        the point of maximum density or from the center of mass.
 
-        The maximum radius from the most dense point is calculated.  This
+        The maximum radius from the most dense point is calculated. This
         accounts for periodicity.
         
         Parameters
@@ -207,20 +183,19 @@
             False chooses from the maximum density location for HOP halos
             (it has no effect for FOF halos).
             Default = True.
-        
-        Examples
-        --------
-        >>> radius = halos[0].maximum_radius()
+
         """
         if center_of_mass: center = self.center_of_mass()
         else: center = self.maximum_density_location()
+
         rx = na.abs(self["particle_position_x"]-center[0])
         ry = na.abs(self["particle_position_y"]-center[1])
         rz = na.abs(self["particle_position_z"]-center[2])
+
         DW = self.data.pf.domain_right_edge - self.data.pf.domain_left_edge
         r = na.sqrt(na.minimum(rx, DW[0]-rx)**2.0
-                +   na.minimum(ry, DW[1]-ry)**2.0
-                +   na.minimum(rz, DW[2]-rz)**2.0)
+                    + na.minimum(ry, DW[1]-ry)**2.0
+                    + na.minimum(rz, DW[2]-rz)**2.0)
         return r.max()
 
     def __getitem__(self, key):
@@ -230,7 +205,7 @@
             return self.data[key][self.indices]
 
     def get_sphere(self, center_of_mass=True):
-        r"""Returns a sphere source.
+        r""" Returns a sphere source.
 
         This will generate a new, empty sphere source centered on this halo,
         with the maximum radius of the halo.
@@ -248,16 +223,13 @@
         sphere : `yt.data_objects.api.AMRSphereBase`
             The empty data source.
 
-        Examples
-        --------
-        >>> sp = halos[0].get_sphere()
         """
         if center_of_mass: center = self.center_of_mass()
         else: center = self.maximum_density_location()
+
         radius = self.maximum_radius()
-        # A bit of a long-reach here...
-        sphere = self.data.hierarchy.sphere(
-                        center, radius=radius)
+        sphere = self.data.hierarchy.sphere(center, radius=radius)  # A bit of a long-reach here...
+
         return sphere
 
     def get_size(self):
@@ -267,24 +239,27 @@
         self._processing = True
         gn = "Halo%08i" % (self.id)
         handle.create_group("/%s" % gn)
-        for field in ["particle_position_%s" % ax for ax in 'xyz'] \
-                   + ["particle_velocity_%s" % ax for ax in 'xyz'] \
-                   + ["particle_index"] + ["ParticleMassMsun"]:
+
+        for field in (["particle_position_%s" % ax for ax in 'xyz']
+                      + ["particle_velocity_%s" % ax for ax in 'xyz']
+                      + ["particle_index"] + ["ParticleMassMsun"]):
             handle.create_dataset("/%s/%s" % (gn, field), data=self[field])
+
         if 'creation_time' in self.data.pf.h.field_list:
             handle.create_dataset("/%s/creation_time" % gn,
-                data=self['creation_time'])
+                                  data=self['creation_time'])
         n = handle["/%s" % gn]
         # set attributes on n
         self._processing = False
 
     def virial_mass(self, virial_overdensity=200., bins=300):
-        r"""Return the virial mass of the halo in Msun, using only the particles
-        in the halo (no baryonic information used). 
+        r"""
+        Return the virial mass of the halo in Msun, using only the particles in
+        the halo (no baryonic information used). 
 
         The virial mass is calculated, using the built in `Halo.virial_info`
         functionality.  The mass is then returned.
-        
+
         Parameters
         ----------
         virial_overdensity : float
@@ -299,21 +274,19 @@
         mass : float
             The virial mass in solar masses of the particles in the halo.  -1
             if not virialized.
-        
-        Examples
-        --------
-        >>> vm = halos[0].virial_mass()
+
         """
         self.virial_info(bins=bins)
-        vir_bin = self.virial_bin(virial_overdensity=virial_overdensity, bins=bins)
+        vir_bin = self.virial_bin(virial_overdensity=virial_overdensity,
+                                  bins=bins)
+
         if vir_bin != -1:
             return self.mass_bins[vir_bin]
         else:
-            return -1
-        
-    
+            return -1        
+
     def virial_radius(self, virial_overdensity=200., bins=300):
-        r"""Return the virial radius of the halo in code units.
+        r""" Return the virial radius of the halo in code units.
         
         The virial radius of the halo is calculated, using only the particles
         in the halo (no baryonic information used). Returns -1 if the halo is
@@ -333,25 +306,27 @@
         radius : float
             The virial raius in code units of the particles in the halo.  -1
             if not virialized.
-        
-        Examples
-        --------
-        >>> vr = halos[0].virial_radius()
+
         """
         self.virial_info(bins=bins)
-        vir_bin = self.virial_bin(virial_overdensity=virial_overdensity, bins=bins)
+        vir_bin = self.virial_bin(virial_overdensity=virial_overdensity,
+                                  bins=bins)
+
         if vir_bin != -1:
             return self.radial_bins[vir_bin]
         else:
             return -1
 
     def virial_bin(self, virial_overdensity=200., bins=300):
-        r"""Returns the bin index of the virial radius of the halo. Generally,
-        it is better to call virial_radius instead, which calls this function
+        """
+        Returns the bin index of the virial radius of the halo. Generally, it is
+        better to call virial_radius instead, which calls this function
         automatically.
+
         """
         self.virial_info(bins=bins)
         over = (self.overdensity > virial_overdensity)
+
         if (over == True).any():
             vir_bin = max(na.arange(bins+1)[over])
             return vir_bin
@@ -359,54 +334,63 @@
             return -1
     
     def virial_info(self, bins=300):
-        r"""Calculates the virial information for the halo. Generally, it is
-        better to call virial_radius or virial_mass instead, which calls this
-        function automatically.
+        """
+        Calculates the virial information for the halo. Generally, it is better
+        to call virial_radius or virial_mass instead, which calls this function
+        automatically.
+
         """
         # Skip if we've already calculated for this number of bins.
         if self.bin_count == bins and self.overdensity is not None:
             return None
         self.bin_count = bins
+
         # Cosmology
         h = self.pf.hubble_constant
         Om_matter = self.pf.omega_matter
         z = self.pf.current_redshift
         period = self.pf.domain_right_edge - self.pf.domain_left_edge
         cm = self.pf["cm"]
+
         thissize = max(self.size, self.indices.size)
         rho_crit_now = 1.8788e-29 * h**2.0 * Om_matter # g cm^-3
         Msun2g = 1.989e33
         rho_crit = rho_crit_now * ((1.0 + z)**3.0)
+
         # Get some pertinent information about the halo.
         self.mass_bins = na.zeros(self.bin_count+1, dtype='float64')
         dist = na.empty(thissize, dtype='float64')
         cen = self.center_of_mass()
         mark = 0
+
         # Find the distances to the particles. I don't like this much, but I
         # can't see a way to eliminate a loop like this, either here or in
         # yt.math.
         for pos in itertools.izip(self["particle_position_x"],
-                self["particle_position_y"], self["particle_position_z"]):
+                                  self["particle_position_y"],
+                                  self["particle_position_z"]):
             dist[mark] = periodic_dist(cen, pos, period)
             mark += 1
+
         # Set up the radial bins.
         # Multiply min and max to prevent issues with digitize below.
         self.radial_bins = na.logspace(math.log10(min(dist)*.99 + TINY), 
             math.log10(max(dist)*1.01 + 2*TINY), num=self.bin_count+1)
+
         # Find out which bin each particle goes into, and add the particle
         # mass to that bin.
         inds = na.digitize(dist, self.radial_bins) - 1
         if self["particle_position_x"].size > 1:
             for index in na.unique(inds):
                 self.mass_bins[index] += sum(self["ParticleMassMsun"][inds==index])
+
         # Now forward sum the masses in the bins.
         for i in xrange(self.bin_count):
             self.mass_bins[i+1] += self.mass_bins[i]
+
         # Calculate the over densities in the bins.
-        self.overdensity = self.mass_bins * Msun2g / \
-        (4./3. * math.pi * rho_crit * \
-        (self.radial_bins * cm)**3.0)
-        
+        self.overdensity = self.mass_bins * Msun2g / (4./3. * math.pi * rho_crit
+                           * (self.radial_bins * cm)**3.0)
 
 class HOPHalo(Halo):
     pass
@@ -419,58 +403,40 @@
         "rms_velocity"]
 
     def maximum_density(self):
-        r"""Return the HOP-identified maximum density.
-
-        Return the HOP-identified maximum density.
-
-        Examples
-        --------
-        >>> max_dens = halos[0].maximum_density()
-        """
+        """ Return the HOP-identified maximum density. """
         if self.max_dens_point is not None:
             return self.max_dens_point[0]
+
         max = self._mpi_allmax(self._max_dens[self.id][0])
         return max
 
     def maximum_density_location(self):
-        r"""Return the location HOP identified as maximally dense.
-        
-        Return the location HOP identified as maximally dense.
-
-        Examples
-        --------
-        >>> max_dens_loc = halos[0].maximum_density_location()
-        """
+        """ Return the location HOP identified as maximally dense. """
         if self.max_dens_point is not None:
             return self.max_dens_point[1:]
+
         # If I own the maximum density, my location is globally correct.
         max_dens = self.maximum_density()
         if self._max_dens[self.id][0] == max_dens:
-            value = na.array([
-                self._max_dens[self.id][1],
-                self._max_dens[self.id][2],
-                self._max_dens[self.id][3]])
+            value = na.array([self._max_dens[self.id][1],
+                              self._max_dens[self.id][2],
+                              self._max_dens[self.id][3]])
         else:
             value = na.array([0,0,0])
+
+        ### TODO: fix this MPI thing
         # This works, and isn't appropriate but for now will be fine...
         value = self._mpi_allsum(value)
         return value
 
     def center_of_mass(self):
-        r"""Calculate and return the center of mass.
-
-        The center of mass of the halo is directly calculated and returned.
-        
-        Examples
-        --------
-        >>> com = halos[0].center_of_mass()
-        """
+        """ Calculate and return the center of mass. """
         # If it's precomputed, we save time!
         if self.CoM is not None:
             return self.CoM
-        # This need to be called by all tasks, but not all will end up using
-        # it.
-        c_vec = self.maximum_density_location() - na.array([0.5,0.5,0.5])
+
+        # This need to be called by all tasks, but not all will end up using it.
+        c_vec = self.maximum_density_location() - na.array([0.5, 0.5, 0.5])
         if self.indices is not None:
             pm = self["ParticleMassMsun"]
             cx = (self["particle_position_x"] - c_vec[0])
@@ -482,41 +448,37 @@
         else:
             my_mass = 0.
             my_com = na.array([0.,0.,0.])
+
         global_mass = self._mpi_allsum(my_mass)
         global_com = self._mpi_allsum(my_com)
         return global_com / global_mass
 
     def total_mass(self):
-        r"""Returns the total mass in solar masses of the halo.
-        
+        """
         Returns the total mass in solar masses of just the particles in the
         halo.
 
-        Examples
-        --------
-        >>> halos[0].total_mass()
         """
-        if self.group_total_mass is not None:
+        if self.group_total_mass is not None:   # already computed
             return self.group_total_mass
+
         if self.indices is not None:
             my_mass = self["ParticleMassMsun"].sum()
         else:
             my_mass = 0.
+
         global_mass = self._mpi_allsum(float(my_mass))
         return global_mass
 
     def bulk_velocity(self):
-        r"""Returns the mass-weighted average velocity in cm/s.
-
+        """
         This calculates and returns the mass-weighted average velocity of just
         the particles in the halo in cm/s.
-        
-        Examples
-        --------
-        >>> bv = halos[0].bulk_velocity()
+
         """
         if self.bulk_vel is not None:
             return self.bulk_vel
+
         # Unf. this cannot be reasonably computed inside of parallelHOP because
         # we don't pass velocities in.
         if self.indices is not None:
@@ -530,24 +492,21 @@
             vx = 0.
             vy = 0.
             vz = 0.
+
         bv = na.array([vx,vy,vz,pm])
         global_bv = self._mpi_allsum(bv)
         return global_bv[:3]/global_bv[3]
 
     def rms_velocity(self):
-        r"""Returns the mass-weighted RMS velocity for the halo
-        particles in cgs units.
+        """
+        Calculate and return the mass-weighted RMS velocity for just the
+        particles in the halo. The bulk velocity of the halo is subtracted
+        before computation.
 
-        Calculate and return the mass-weighted RMS velocity for just the
-        particles in the halo.  The bulk velocity of the halo is subtracted
-        before computation.
-        
-        Examples
-        --------
-        >>> rms_vel = halos[0].rms_velocity()
         """
-        if self.rms_vel is not None:
+        if self.rms_vel is not None:    # already computed
             return self.rms_vel
+
         bv = self.bulk_velocity()
         pm = self["ParticleMassMsun"]
         sm = pm.sum()
@@ -560,15 +519,16 @@
             size = vx.size
             ss = na.array([s, float(size)])
         else:
-            ss = na.array([0.,0.])
+            ss = na.array([0.0, 0.0])
+
         global_ss = self._mpi_allsum(ss)
         ms = global_ss[0] / global_ss[1]
         return na.sqrt(ms) * global_ss[1]
 
     def maximum_radius(self, center_of_mass=True):
-        r"""Returns the maximum radius in the halo for all particles,
-        either from the point of maximum density or from the
-        center of mass.
+        r"""
+        Returns the maximum radius in the halo for all particles, either from
+        the point of maximum density or from the center of mass.
 
         The maximum radius from the most dense point is calculated.  This
         accounts for periodicity.
@@ -580,27 +540,26 @@
             False chooses from the maximum density location for HOP halos
             (it has no effect for FOF halos).
             Default = True.
-        
-        Examples
-        --------
-        >>> radius = halos[0].maximum_radius()
+
         """
-        if self.max_radius is not None:
+        if self.max_radius is not None:     # already computed
             return self.max_radius
+
         if center_of_mass: center = self.center_of_mass()
         else: center = self.maximum_density_location()
+
         DW = self.data.pf.domain_right_edge - self.data.pf.domain_left_edge
         if self.indices is not None:
             rx = na.abs(self["particle_position_x"]-center[0])
             ry = na.abs(self["particle_position_y"]-center[1])
             rz = na.abs(self["particle_position_z"]-center[2])
-            r = na.sqrt(na.minimum(rx, DW[0]-rx)**2.0
-                    +   na.minimum(ry, DW[1]-ry)**2.0
-                    +   na.minimum(rz, DW[2]-rz)**2.0)
+            r = na.sqrt(na.minimum(rx, DW[0] - rx)**2.0
+                        + na.minimum(ry, DW[1] - ry)**2.0
+                        + na.minimum(rz, DW[2] - rz)**2.0)
             my_max = r.max()
-            
         else:
             my_max = 0.
+
         return self._mpi_allmax(my_max)
 
     def get_size(self):
@@ -640,10 +599,7 @@
         mass : float
             The virial mass in solar masses of the particles in the halo.  -1
             if not virialized.
-        
-        Examples
-        --------
-        >>> vm = halos[0].virial_mass()
+
         """
         self.virial_info(bins=bins)
         vir_bin = self.virial_bin(virial_overdensity=virial_overdensity, bins=bins)
@@ -651,8 +607,7 @@
             return self.mass_bins[vir_bin]
         else:
             return -1
-        
-    
+
     def virial_radius(self, virial_overdensity=200., bins=300):
         r"""Return the virial radius of the halo in code units.
         
@@ -674,22 +629,21 @@
         radius : float
             The virial raius in code units of the particles in the halo.  -1
             if not virialized.
-        
-        Examples
-        --------
-        >>> vr = halos[0].virial_radius()
+
         """
         self.virial_info(bins=bins)
-        vir_bin = self.virial_bin(virial_overdensity=virial_overdensity, bins=bins)
+        vir_bin = self.virial_bin(virial_overdensity=virial_overdensity,
+                                  bins=bins)
         if vir_bin != -1:
             return self.radial_bins[vir_bin]
-        else:
-            return -1
+        return -1
 
     def virial_bin(self, virial_overdensity=200., bins=300):
-        r"""Returns the bin index of the virial radius of the halo. Generally,
+        """
+        Returns the bin index of the virial radius of the halo. Generally,
         it is better to call virial_radius instead, which calls this function
         automatically.
+
         """
         self.virial_info(bins=bins)
         over = (self.overdensity > virial_overdensity)
@@ -699,10 +653,13 @@
         else:
             return -1
 
+    ### TODO: fix copy pasta with halo.virial_info
     def virial_info(self, bins=300):
-        r"""Calculates the virial information for the halo. Generally, it is
+        """
+        Calculates the virial information for the halo. Generally, it is
         better to call virial_radius or virial_mass instead, which calls this
         function automatically.
+
         """
         # Skip if we've already calculated for this number of bins.
         if self.bin_count == bins and self.overdensity is not None:
@@ -761,7 +718,6 @@
         (4./3. * math.pi * rho_crit * \
         (self.radial_bins * self.data.pf["cm"])**3.0)
 
-
 class FOFHalo(Halo):
 
     def center_of_mass(self):


http://bitbucket.org/yt_analysis/yt/changeset/190b29714a72/
changeset:   190b29714a72
branch:      deliberate_fields
user:        caseywstark
date:        2011-06-20 23:13:40
summary:     Adding bitbucket.org to authorized fingerprints. Removes the cert warnings.
affected #:  1 file (-1 bytes)

--- a/yt/utilities/command_line.py	Mon Jun 20 13:28:28 2011 -0700
+++ b/yt/utilities/command_line.py	Mon Jun 20 14:13:40 2011 -0700
@@ -1027,6 +1027,19 @@
             print
             loki = raw_input("Press enter to go on, Ctrl-C to exit.")
             cedit.config.setoption(uu, hgrc_path, "bb.username=%s" % bbusername)
+        bb_fp = "81:2b:08:90:dc:d3:71:ee:e0:7c:b4:75:ce:9b:6c:48:94:56:a1:fe"
+        if uu.config("hostfingerprints", "bitbucket.org", None) is None:
+            print "Let's also add bitbucket.org to the known hosts, so hg"
+            print "doesn't warn us about bitbucket."
+            print "We will add this:"
+            print
+            print "   [hostfingerprints]"
+            print "   bitbucket.org = %s" % (bb_fp)
+            print
+            loki = raw_input("Press enter to go on, Ctrl-C to exit.")
+            cedit.config.setoption(uu, hgrc_path,
+                                   "hostfingerprints.bitbucket.org=%s" % bb_fp)
+
         # We now reload the UI's config file so that it catches the [bb]
         # section changes.
         uu.readconfig(hgrc_path[0])


http://bitbucket.org/yt_analysis/yt/changeset/28e15137bc5c/
changeset:   28e15137bc5c
branch:      deliberate_fields
user:        caseywstark
date:        2011-06-20 23:14:35
summary:     Some cleanup before I get to it.
affected #:  1 file (-1 bytes)

--- a/yt/frontends/castro/fields.py	Mon Jun 20 14:13:40 2011 -0700
+++ b/yt/frontends/castro/fields.py	Mon Jun 20 14:14:35 2011 -0700
@@ -21,7 +21,9 @@
 
   You should have received a copy of the GNU General Public License
   along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
 """
+
 from yt.utilities.physical_constants import \
     mh, kboltz
 from yt.data_objects.field_info_container import \
@@ -45,25 +47,26 @@
 # def _convertDensity(data):
 #     return data.convert("Density")
 add_castro_field("density", function=NullFunc, take_log=True,
-          units=r"\rm{g}/\rm{cm}^3",
-CastroFieldInfo["density"]._projected_units =r"\rm{g}/\rm{cm}^2"
+                 units=r"\rm{g}/\rm{cm}^3")
+
+CastroFieldInfo["density"]._projected_units = r"\rm{g}/\rm{cm}^2"
 #CastroFieldInfo["density"]._convert_function=_convertDensity
 
 add_castro_field("eden", function=NullFunc, take_log=True,
-          validators = [ValidateDataField("eden")],
-          units=r"\rm{erg}/\rm{cm}^3")
+                 validators = [ValidateDataField("eden")],
+                 units=r"\rm{erg}/\rm{cm}^3")
 
 add_castro_field("xmom", function=NullFunc, take_log=False,
-          validators = [ValidateDataField("xmom")],
-          units=r"\rm{g}/\rm{cm^2\ s}")
+                 validators = [ValidateDataField("xmom")],
+                 units=r"\rm{g}/\rm{cm^2\ s}")
 
 add_castro_field("ymom", function=NullFunc, take_log=False,
-          validators = [ValidateDataField("ymom")],
-          units=r"\rm{gm}/\rm{cm^2\ s}")
+                 validators = [ValidateDataField("ymom")],
+                 units=r"\rm{gm}/\rm{cm^2\ s}")
 
 add_castro_field("zmom", function=NullFunc, take_log=False,
-          validators = [ValidateDataField("zmom")],
-          units=r"\rm{g}/\rm{cm^2\ s}")
+                 validators = [ValidateDataField("zmom")],
+                 units=r"\rm{g}/\rm{cm^2\ s}")
 
 translation_dict = {"x-velocity": "xvel",
                     "y-velocity": "yvel",
@@ -82,33 +85,32 @@
 
 # Now fallbacks, in case these fields are not output
 def _xVelocity(field, data):
-    """generate x-velocity from x-momentum and density
+    """ Generate x-velocity from x-momentum and density. """
+    return data["xmom"] / data["density"]
 
-    """
-    return data["xmom"]/data["density"]
 add_field("x-velocity", function=_xVelocity, take_log=False,
           units=r'\rm{cm}/\rm{s}')
 
 def _yVelocity(field, data):
-    """generate y-velocity from y-momentum and density
+    """ Generate y-velocity from y-momentum and density. """
+    return data["ymom"] / data["density"]
 
-    """
-    return data["ymom"]/data["density"]
 add_field("y-velocity", function=_yVelocity, take_log=False,
           units=r'\rm{cm}/\rm{s}')
 
 def _zVelocity(field, data):
-    """generate z-velocity from z-momentum and density
+    """ Generate z-velocity from z-momentum and density. """
+    return data["zmom"] / data["density"]
 
-    """
-    return data["zmom"]/data["density"]
 add_field("z-velocity", function=_zVelocity, take_log=False,
           units=r'\rm{cm}/\rm{s}')
 
 def _ThermalEnergy(field, data):
-    """generate thermal (gas energy). Dual Energy Formalism was
-        implemented by Stella, but this isn't how it's called, so I'll
-        leave that commented out for now.
+    """
+    Generate thermal (gas energy). Dual Energy Formalism was implemented by
+    Stella, but this isn't how it's called, so I'll leave that commented out for
+    now.
+
     """
     #if data.pf["DualEnergyFormalism"]:
     #    return data["Gas_Energy"]
@@ -117,24 +119,33 @@
         data["x-velocity"]**2.0
         + data["y-velocity"]**2.0
         + data["z-velocity"]**2.0 )
+
 add_field("ThermalEnergy", function=_ThermalEnergy,
           units=r"\rm{ergs}/\rm{cm^3}")
 
 def _Pressure(field, data):
-    """M{(Gamma-1.0)*e, where e is thermal energy density
-       NB: this will need to be modified for radiation
     """
-    return (data.pf["Gamma"] - 1.0)*data["ThermalEnergy"]
+    M{(Gamma-1.0)*e, where e is thermal energy density
+    
+    NB: this will need to be modified for radiation
+
+    """
+    return (data.pf["Gamma"] - 1.0) * data["ThermalEnergy"]
+
 add_field("Pressure", function=_Pressure, units=r"\rm{dyne}/\rm{cm}^{2}")
 
 def _Temperature(field, data):
-    return (data.pf["Gamma"]-1.0)*data.pf["mu"]*mh*data["ThermalEnergy"]/(kboltz*data["Density"])
-add_field("Temperature", function=_Temperature, units=r"\rm{Kelvin}", take_log=False)
+    return ((data.pf["Gamma"] - 1.0) * data.pf["mu"] * mh *
+            data["ThermalEnergy"] / (kboltz * data["Density"]))
+
+add_field("Temperature", function=_Temperature, units=r"\rm{Kelvin}",
+          take_log=False)
 
 def _convertParticleMassMsun(data):
-    return 1.0/1.989e33
+    return 1.0 / 1.989e33
 def _ParticleMassMsun(field, data):
     return data["particle_mass"]
+
 add_field("ParticleMassMsun",
           function=_ParticleMassMsun, validators=[ValidateSpatial(0)],
           particle_type=True, convert_function=_convertParticleMassMsun,


http://bitbucket.org/yt_analysis/yt/changeset/bc95d852ee69/
changeset:   bc95d852ee69
branch:      deliberate_fields
user:        caseywstark
date:        2011-06-21 20:08:51
summary:     Reorganizing. I will get rid of these dunder methods later.
affected #:  1 file (-1 bytes)

--- a/yt/frontends/castro/data_structures.py	Mon Jun 20 14:14:35 2011 -0700
+++ b/yt/frontends/castro/data_structures.py	Tue Jun 21 11:08:51 2011 -0700
@@ -27,16 +27,12 @@
 import os
 import weakref
 import itertools
+from collections import defaultdict
+from string import strip, rstrip
+from stat import ST_CTIME
+
 import numpy as na
 
-from collections import \
-    defaultdict
-from string import \
-    strip, \
-    rstrip
-from stat import \
-    ST_CTIME
-
 from yt.funcs import *
 from yt.data_objects.grid_patch import \
            AMRGridPatch
@@ -65,7 +61,8 @@
 
 class CastroGrid(AMRGridPatch):
     _id_offset = 0
-    def __init__(self, LeftEdge, RightEdge, index, level, filename, offset, dimensions, start, stop, paranoia=False,**kwargs):
+    def __init__(self, LeftEdge, RightEdge, index, level, filename, offset,
+                 dimensions, start, stop, paranoia=False, **kwargs):
         AMRGridPatch.__init__(self, index,**kwargs)
         self.filename = filename
         self._offset = offset
@@ -84,12 +81,11 @@
         return self.start_index
 
     def _prepare_grid(self):
-        """
-        Copies all the appropriate attributes from the hierarchy
-        """
+        """ Copies all the appropriate attributes from the hierarchy. """
         # This is definitely the slowest part of generating the hierarchy
         # Now we give it pointers to all of its attributes
         # Note that to keep in line with Enzo, we have broken PEP-8
+
         h = self.hierarchy # cache it
         #self.StartIndices = h.gridStartIndices[self.id]
         #self.EndIndices = h.gridEndIndices[self.id]
@@ -101,6 +97,7 @@
         self.field_indexes = h.field_indexes
         self.Children = h.gridTree[self.id]
         pIDs = h.gridReverseTree[self.id]
+
         if len(pIDs) > 0:
             self.Parent = [weakref.proxy(h.grids[pID]) for pID in pIDs]
         else:
@@ -135,7 +132,8 @@
         #self._setup_classes()
 
         # This also sets up the grid objects
-        self.read_global_header(header_filename, self.parameter_file.paranoid_read) 
+        self.read_global_header(header_filename,
+                                self.parameter_file.paranoid_read) 
         self.read_particle_header()
         self.__cache_endianness(self.levels[-1].grids[-1])
         AMRHierarchy.__init__(self, pf, self.data_style)
@@ -144,18 +142,16 @@
         self._populate_hierarchy()
         
     def read_global_header(self, filename, paranoid_read):
-        """
-        read the global header file for an Castro plotfile output.
-        """
+        """ Read the global header file for an Castro plotfile output. """
         counter = 0
-        header_file = open(filename,'r')
+        header_file = open(filename, 'r')
         self.__global_header_lines = header_file.readlines()
 
         # parse the file
         self.castro_version = self.__global_header_lines[0].rstrip()
-        self.n_fields      = int(self.__global_header_lines[1])
+        self.n_fields = int(self.__global_header_lines[1])
 
-        counter = self.n_fields+2
+        counter = self.n_fields + 2
         self.field_list = []
         for i, line in enumerate(self.__global_header_lines[2:counter]):
             self.field_list.append(line.rstrip())
@@ -167,12 +163,14 @@
         self.dimension = int(self.__global_header_lines[counter])
         if self.dimension != 3:
             raise RunTimeError("Castro must be in 3D to use yt.")
+
         counter += 1
         self.Time = float(self.__global_header_lines[counter])
         counter += 1
         self.finest_grid_level = int(self.__global_header_lines[counter])
         self.n_levels = self.finest_grid_level + 1
         counter += 1
+
         # quantities with _unnecessary are also stored in the inputs
         # file and are not needed.  they are read in and stored in
         # case in the future we want to enable a "backwards" way of
@@ -182,14 +180,16 @@
         counter += 1
         self.domainRightEdge_unnecessary = na.array(map(float, self.__global_header_lines[counter].split()))
         counter += 1
-        self.refinementFactor_unnecessary = self.__global_header_lines[counter].split() #na.array(map(int, self.__global_header_lines[counter].split()))
+        self.refinementFactor_unnecessary = self.__global_header_lines[counter].split()
+        #na.array(map(int, self.__global_header_lines[counter].split()))
         counter += 1
         self.globalIndexSpace_unnecessary = self.__global_header_lines[counter]
         #domain_re.search(self.__global_header_lines[counter]).groups()
         counter += 1
         self.timestepsPerLevel_unnecessary = self.__global_header_lines[counter]
         counter += 1
-        self.dx = na.zeros((self.n_levels,3))
+
+        self.dx = na.zeros((self.n_levels, 3))
         for i, line in enumerate(self.__global_header_lines[counter:counter+self.n_levels]):
             self.dx[i] = na.array(map(float, line.split()))
         counter += self.n_levels
@@ -201,10 +201,11 @@
         # this is just to debug. eventually it should go away.
         linebreak = int(self.__global_header_lines[counter])
         if linebreak != 0:
-            raise RunTimeError("INTERNAL ERROR! This should be a zero.")
+            raise RunTimeError("INTERNAL ERROR! Header is unexpected size")
         counter += 1
 
-        # each level is one group with ngrids on it. each grid has 3 lines of 2 reals
+        # Each level is one group with ngrids on it. each grid has 3 lines of 2 reals
+        # BoxLib madness
         self.levels = []
         grid_counter = 0
         file_finder_pattern = r"FabOnDisk: (\w+_D_[0-9]{4}) (\d+)\n"
@@ -216,25 +217,26 @@
 
         for level in range(0, self.n_levels):
             tmp = self.__global_header_lines[counter].split()
-            # should this be grid_time or level_time??
+            # Should this be grid_time or level_time??
             lev, ngrids, grid_time = int(tmp[0]), int(tmp[1]), float(tmp[2])
             counter += 1
             nsteps = int(self.__global_header_lines[counter])
             counter += 1
             self.levels.append(CastroLevel(lev, ngrids))
-            # open level header, extract file names and offsets for
-            # each grid
-            # read slightly out of order here: at the end of the lo, hi
-            # pairs for x, y, z is a *list* of files types in the Level
-            # directory. each type has Header and a number of data
-            # files (one per processor)
+            # Open level header, extract file names and offsets for each grid.
+            # Read slightly out of order here: at the end of the lo, hi pairs
+            # for x, y, z is a *list* of files types in the Level directory. 
+            # Each type has Header and a number of data files
+            # (one per processor)
             tmp_offset = counter + 3*ngrids
             nfiles = 0
             key_off = 0
             files =   {} # dict(map(lambda a: (a,[]), self.field_list))
             offsets = {} # dict(map(lambda a: (a,[]), self.field_list))
-            while nfiles+tmp_offset < len(self.__global_header_lines) and data_files_finder.match(self.__global_header_lines[nfiles+tmp_offset]):
-                filen = os.path.join(self.parameter_file.fullplotdir, \
+
+            while (nfiles + tmp_offset < len(self.__global_header_lines) and
+                   data_files_finder.match(self.__global_header_lines[nfiles+tmp_offset])):
+                filen = os.path.join(self.parameter_file.fullplotdir,
                                      self.__global_header_lines[nfiles+tmp_offset].strip())
                 # open each "_H" header file, and get the number of
                 # components within it
@@ -242,18 +244,22 @@
                 start_stop_index = re_dim_finder.findall(level_header_file) # just take the last one
                 grid_file_offset = re_file_finder.findall(level_header_file)
                 ncomp_this_file = int(level_header_file.split('\n')[2])
+
                 for i in range(ncomp_this_file):
                     key = self.field_list[i+key_off]
                     f, o = zip(*grid_file_offset)
                     files[key] = f
                     offsets[key] = o
                     self.field_indexes[key] = i
+
                 key_off += ncomp_this_file
                 nfiles += 1
+
             # convert dict of lists to list of dicts
             fn = []
             off = []
-            lead_path = os.path.join(self.parameter_file.fullplotdir,'Level_%i'%level)
+            lead_path = os.path.join(self.parameter_file.fullplotdir,
+                                     'Level_%i' % level)
             for i in range(ngrids):
                 fi = [os.path.join(lead_path, files[key][i]) for key in self.field_list]
                 of = [int(offsets[key][i]) for key in self.field_list]
@@ -264,20 +270,20 @@
                 gfn = fn[grid]  # filename of file containing this grid
                 gfo = off[grid] # offset within that file
                 xlo, xhi = map(float, self.__global_header_lines[counter].split())
-                counter+=1
+                counter += 1
                 ylo, yhi = map(float, self.__global_header_lines[counter].split())
-                counter+=1
+                counter += 1
                 zlo, zhi = map(float, self.__global_header_lines[counter].split())
-                counter+=1
+                counter += 1
                 lo = na.array([xlo, ylo, zlo])
                 hi = na.array([xhi, yhi, zhi])
                 dims, start, stop = self.__calculate_grid_dimensions(start_stop_index[grid])
                 self.levels[-1].grids.append(self.grid(lo, hi, grid_counter, level, gfn, gfo, dims, start, stop, paranoia=paranoid_read, hierarchy=self))
-                grid_counter += 1 # this is global, and shouldn't be reset
-                                  # for each level
+                grid_counter += 1   # this is global, and shouldn't be reset
+                                    # for each level
 
             # already read the filenames above...
-            counter+=nfiles
+            counter += nfiles
             self.num_grids = grid_counter
             self.float_type = 'float64'
 
@@ -290,50 +296,53 @@
         if not self.parameter_file.use_particles:
             self.pgrid_info = na.zeros((self.num_grids, 3), dtype='int64')
             return
+
         self.field_list += castro_particle_field_names[:]
-        header = open(os.path.join(self.parameter_file.fullplotdir,
-                        "DM", "Header"))
+        header = open(os.path.join(self.parameter_file.fullplotdir, "DM",
+                                   "Header"))
         version = header.readline()
         ndim = header.readline()
         nfields = header.readline()
         ntotalpart = int(header.readline())
         dummy = header.readline() # nextid
         maxlevel = int(header.readline()) # max level
+
         # Skip over how many grids on each level; this is degenerate
         for i in range(maxlevel+1): dummy = header.readline()
         grid_info = na.fromiter((int(i)
-                    for line in header.readlines()
-                    for i in line.split()
-                    ),
-            dtype='int64', count=3*self.num_grids).reshape((self.num_grids, 3))
+                                 for line in header.readlines()
+                                 for i in line.split()),
+                                dtype='int64',
+                                count=3*self.num_grids).reshape((self.num_grids, 3))
         self.pgrid_info = grid_info
 
     def __cache_endianness(self, test_grid):
         """
-        Cache the endianness and bytes perreal of the grids by using a
-        test grid and assuming that all grids have the same
-        endianness. This is a pretty safe assumption since Castro uses
-        one file per processor, and if you're running on a cluster
-        with different endian processors, then you're on your own!
+        Cache the endianness and bytes perreal of the grids by using a test grid
+        and assuming that all grids have the same endianness. This is a pretty
+        safe assumption since Castro uses one file per processor, and if you're
+        running on a cluster with different endian processors, then you're on
+        your own!
+
         """
-        # open the test file & grab the header
-        inFile = open(os.path.expanduser(test_grid.filename[self.field_list[0]]),'rb')
-        header = inFile.readline()
-        inFile.close()
+        # open the test file and grab the header
+        in_file = open(os.path.expanduser(test_grid.filename[self.field_list[0]]), 'rb')
+        header = in_file.readline()
+        in_file.close()
         header.strip()
         
-        # parse it. the patter is in CastroDefs.py
-        headerRe = re.compile(castro_FAB_header_pattern)
-        bytesPerReal, endian, start, stop, centerType, nComponents = headerRe.search(header).groups()
-        self._bytesPerReal = int(bytesPerReal)
-        if self._bytesPerReal == int(endian[0]):
+        # Parse it. The pattern is in castro.definitions.py
+        header_re = re.compile(castro_FAB_header_pattern)
+        bytes_per_real, endian, start, stop, centerType, n_components = header_re.search(header).groups()
+        self._bytes_per_real = int(bytes_per_real)
+        if self._bytes_per_real == int(endian[0]):
             dtype = '<'
-        elif self._bytesPerReal == int(endian[-1]):
+        elif self._bytes_per_real == int(endian[-1]):
             dtype = '>'
         else:
             raise ValueError("FAB header is neither big nor little endian. Perhaps the file is corrupt?")
 
-        dtype += ('f%i' % self._bytesPerReal) # always a floating point
+        dtype += ('f%i' % self._bytes_per_real) # always a floating point
         self._dtype = dtype
 
     def __calculate_grid_dimensions(self, start_stop):


http://bitbucket.org/yt_analysis/yt/changeset/d24fb3dc0f0c/
changeset:   d24fb3dc0f0c
branch:      deliberate_fields
user:        caseywstark
date:        2011-06-21 21:40:46
summary:     Proper superclass init calls
affected #:  2 files (-1 bytes)

--- a/doc/coding_styleguide.txt	Tue Jun 21 11:08:51 2011 -0700
+++ b/doc/coding_styleguide.txt	Tue Jun 21 12:40:46 2011 -0700
@@ -21,6 +21,9 @@
    be "na.multiply(a, 3, a)".
  * In general, avoid all double-underscore method names: __something is usually
    unnecessary.
+ * When writing a subclass, use the super built-in to access the super class,
+   rather than explicitly. Ex: "super(SpecialGrid, self).__init__()" rather than
+   "SpecialGrid.__init__()".
  * Doc strings should describe input, output, behavior, and any state changes
    that occur on an object.  See the file `doc/docstring_example.txt` for a
    fiducial example of a docstring.


--- a/yt/frontends/castro/data_structures.py	Tue Jun 21 11:08:51 2011 -0700
+++ b/yt/frontends/castro/data_structures.py	Tue Jun 21 12:40:46 2011 -0700
@@ -63,7 +63,7 @@
     _id_offset = 0
     def __init__(self, LeftEdge, RightEdge, index, level, filename, offset,
                  dimensions, start, stop, paranoia=False, **kwargs):
-        AMRGridPatch.__init__(self, index,**kwargs)
+        super(CastroGrid, self).__init__(self, index, **kwargs)
         self.filename = filename
         self._offset = offset
         self._paranoid = paranoia
@@ -123,20 +123,20 @@
 class CastroHierarchy(AMRHierarchy):
     grid = CastroGrid
     def __init__(self, pf, data_style='castro_native'):
+        super(CastroHierarchy, self).__init__(self, pf, self.data_style)
+
         self.field_info = CastroFieldContainer()
         self.field_indexes = {}
         self.parameter_file = weakref.proxy(pf)
         header_filename = os.path.join(pf.fullplotdir, 'Header')
         self.directory = pf.fullpath
         self.data_style = data_style
-        #self._setup_classes()
 
         # This also sets up the grid objects
         self.read_global_header(header_filename,
                                 self.parameter_file.paranoid_read) 
         self.read_particle_header()
         self.__cache_endianness(self.levels[-1].grids[-1])
-        AMRHierarchy.__init__(self, pf, self.data_style)
         self._setup_data_io()
         self._setup_field_list()
         self._populate_hierarchy()
@@ -487,8 +487,9 @@
 
 class CastroStaticOutput(StaticOutput):
     """
-    This class is a stripped down class that simply reads and parses
-    *filename*, without looking at the Castro hierarchy.
+    This class is a stripped down class that simply reads and parses *filename*,
+    without looking at the Castro hierarchy.
+
     """
     _hierarchy_class = CastroHierarchy
     _fieldinfo_fallback = CastroFieldInfo
@@ -497,7 +498,8 @@
     def __init__(self, plotname, paramFilename=None, fparamFilename=None,
                  data_style='castro_native', paranoia=False,
                  storage_filename = None):
-        """need to override for Castro file structure.
+        """
+        Need to override for Castro file structure.
 
         the paramfile is usually called "inputs"
         and there may be a fortran inputs file usually called "probin"
@@ -508,6 +510,8 @@
          * ASCII (not implemented in yt)
 
         """
+        super(CastroStaticOutput, self).__init__(self, plotname.rstrip("/"),
+                                                 data_style='castro_native')
         self.storage_filename = storage_filename
         self.paranoid_read = paranoia
         self.parameter_filename = paramFilename
@@ -516,10 +520,8 @@
 
         self.fparameters = {}
 
-        StaticOutput.__init__(self, plotname.rstrip("/"),
-                              data_style='castro_native')
         self.field_info = FieldInfoContainer.create_with_fallback(
-                            self._fieldinfo_fallback)
+                              self._fieldinfo_fallback)
 
         # These should maybe not be hardcoded?
         self.parameters["HydroMethod"] = 'castro' # always PPM DE


http://bitbucket.org/yt_analysis/yt/changeset/b7b60b24c5d4/
changeset:   b7b60b24c5d4
branch:      deliberate_fields
user:        caseywstark
date:        2011-06-21 23:30:54
summary:     Converted dunder methods to single underscore (private idiom) methods. Added more TODO notes. More cleanup.
affected #:  1 file (-1 bytes)

--- a/yt/frontends/castro/data_structures.py	Tue Jun 21 12:40:46 2011 -0700
+++ b/yt/frontends/castro/data_structures.py	Tue Jun 21 14:30:54 2011 -0700
@@ -34,16 +34,12 @@
 import numpy as na
 
 from yt.funcs import *
-from yt.data_objects.grid_patch import \
-           AMRGridPatch
-from yt.data_objects.hierarchy import \
-           AMRHierarchy
-from yt.data_objects.static_output import \
-           StaticOutput
-from yt.utilities.definitions import \
-    mpc_conversion
-from yt.utilities.amr_utils import \
-    get_box_grids_level
+from yt.data_objects.field_info_container import FieldInfoContainer, NullFunc
+from yt.data_objects.grid_patch import AMRGridPatch
+from yt.data_objects.hierarchy import AMRHierarchy
+from yt.data_objects.static_output import StaticOutput
+from yt.utilities.definitions import mpc_conversion
+from yt.utilities.amr_utils import get_box_grids_level
 
 from .definitions import \
     castro2enzoDict, \
@@ -52,37 +48,37 @@
     castro_FAB_header_pattern, \
     castro_particle_field_names, \
     boxlib_bool_to_int
-
-from yt.data_objects.field_info_container import \
-    FieldInfoContainer, NullFunc
 from .fields import \
-    CastroFieldInfo, add_castro_field, KnownCastroFields
+    CastroFieldInfo, \
+    KnownCastroFields, \
+    add_castro_field
 
 
 class CastroGrid(AMRGridPatch):
     _id_offset = 0
+
     def __init__(self, LeftEdge, RightEdge, index, level, filename, offset,
                  dimensions, start, stop, paranoia=False, **kwargs):
         super(CastroGrid, self).__init__(self, index, **kwargs)
         self.filename = filename
         self._offset = offset
-        self._paranoid = paranoia
-        
-        # should error check this
+        self._paranoid = paranoia  # TODO: Factor this behavior out in tests
+
+        ### TODO: error check this (test)
         self.ActiveDimensions = (dimensions.copy()).astype('int32')#.transpose()
         self.start_index = start.copy()#.transpose()
         self.stop_index = stop.copy()#.transpose()
         self.LeftEdge  = LeftEdge.copy()
         self.RightEdge = RightEdge.copy()
         self.index = index
-        self.Level = level
+        self.level = level
 
-    def get_global_startindex(self):
+    def get_global_start_index(self):
         return self.start_index
 
     def _prepare_grid(self):
         """ Copies all the appropriate attributes from the hierarchy. """
-        # This is definitely the slowest part of generating the hierarchy
+        # This is definitely the slowest part of generating the hierarchy.
         # Now we give it pointers to all of its attributes
         # Note that to keep in line with Enzo, we have broken PEP-8
 
@@ -113,6 +109,7 @@
             LE, RE = self.hierarchy.grid_left_edge[id,:], \
                      self.hierarchy.grid_right_edge[id,:]
             self.dds = na.array((RE-LE)/self.ActiveDimensions)
+
         if self.pf.dimensionality < 2: self.dds[1] = 1.0
         if self.pf.dimensionality < 3: self.dds[2] = 1.0
         self.data['dx'], self.data['dy'], self.data['dz'] = self.dds
@@ -122,6 +119,7 @@
 
 class CastroHierarchy(AMRHierarchy):
     grid = CastroGrid
+
     def __init__(self, pf, data_style='castro_native'):
         super(CastroHierarchy, self).__init__(self, pf, self.data_style)
 
@@ -136,7 +134,7 @@
         self.read_global_header(header_filename,
                                 self.parameter_file.paranoid_read) 
         self.read_particle_header()
-        self.__cache_endianness(self.levels[-1].grids[-1])
+        self._cache_endianness(self.levels[-1].grids[-1])
         self._setup_data_io()
         self._setup_field_list()
         self._populate_hierarchy()
@@ -145,29 +143,29 @@
         """ Read the global header file for an Castro plotfile output. """
         counter = 0
         header_file = open(filename, 'r')
-        self.__global_header_lines = header_file.readlines()
+        self._global_header_lines = header_file.readlines()
 
         # parse the file
-        self.castro_version = self.__global_header_lines[0].rstrip()
-        self.n_fields = int(self.__global_header_lines[1])
+        self.castro_version = self._global_header_lines[0].rstrip()
+        self.n_fields = int(self._global_header_lines[1])
 
         counter = self.n_fields + 2
         self.field_list = []
-        for i, line in enumerate(self.__global_header_lines[2:counter]):
+        for i, line in enumerate(self._global_header_lines[2:counter]):
             self.field_list.append(line.rstrip())
 
         # this is unused...eliminate it?
         #for f in self.field_indexes:
         #    self.field_list.append(castro2ytFieldsDict.get(f, f))
 
-        self.dimension = int(self.__global_header_lines[counter])
+        self.dimension = int(self._global_header_lines[counter])
         if self.dimension != 3:
             raise RunTimeError("Castro must be in 3D to use yt.")
 
         counter += 1
-        self.Time = float(self.__global_header_lines[counter])
+        self.Time = float(self._global_header_lines[counter])
         counter += 1
-        self.finest_grid_level = int(self.__global_header_lines[counter])
+        self.finest_grid_level = int(self._global_header_lines[counter])
         self.n_levels = self.finest_grid_level + 1
         counter += 1
 
@@ -176,30 +174,30 @@
         # case in the future we want to enable a "backwards" way of
         # taking the data out of the Header file and using it to fill
         # in in the case of a missing inputs file
-        self.domainLeftEdge_unnecessary = na.array(map(float, self.__global_header_lines[counter].split()))
+        self.domainLeftEdge_unnecessary = na.array(map(float, self._global_header_lines[counter].split()))
         counter += 1
-        self.domainRightEdge_unnecessary = na.array(map(float, self.__global_header_lines[counter].split()))
+        self.domainRightEdge_unnecessary = na.array(map(float, self._global_header_lines[counter].split()))
         counter += 1
-        self.refinementFactor_unnecessary = self.__global_header_lines[counter].split()
-        #na.array(map(int, self.__global_header_lines[counter].split()))
+        self.refinementFactor_unnecessary = self._global_header_lines[counter].split()
+        #na.array(map(int, self._global_header_lines[counter].split()))
         counter += 1
-        self.globalIndexSpace_unnecessary = self.__global_header_lines[counter]
-        #domain_re.search(self.__global_header_lines[counter]).groups()
+        self.globalIndexSpace_unnecessary = self._global_header_lines[counter]
+        #domain_re.search(self._global_header_lines[counter]).groups()
         counter += 1
-        self.timestepsPerLevel_unnecessary = self.__global_header_lines[counter]
+        self.timestepsPerLevel_unnecessary = self._global_header_lines[counter]
         counter += 1
 
         self.dx = na.zeros((self.n_levels, 3))
         for i, line in enumerate(self.__global_header_lines[counter:counter+self.n_levels]):
             self.dx[i] = na.array(map(float, line.split()))
         counter += self.n_levels
-        self.geometry = int(self.__global_header_lines[counter])
+        self.geometry = int(self._global_header_lines[counter])
         if self.geometry != 0:
             raise RunTimeError("yt only supports cartesian coordinates.")
         counter += 1
 
         # this is just to debug. eventually it should go away.
-        linebreak = int(self.__global_header_lines[counter])
+        linebreak = int(self._global_header_lines[counter])
         if linebreak != 0:
             raise RunTimeError("INTERNAL ERROR! Header is unexpected size")
         counter += 1
@@ -216,11 +214,11 @@
         data_files_finder = re.compile(data_files_pattern)
 
         for level in range(0, self.n_levels):
-            tmp = self.__global_header_lines[counter].split()
+            tmp = self._global_header_lines[counter].split()
             # Should this be grid_time or level_time??
             lev, ngrids, grid_time = int(tmp[0]), int(tmp[1]), float(tmp[2])
             counter += 1
-            nsteps = int(self.__global_header_lines[counter])
+            nsteps = int(self._global_header_lines[counter])
             counter += 1
             self.levels.append(CastroLevel(lev, ngrids))
             # Open level header, extract file names and offsets for each grid.
@@ -234,10 +232,10 @@
             files =   {} # dict(map(lambda a: (a,[]), self.field_list))
             offsets = {} # dict(map(lambda a: (a,[]), self.field_list))
 
-            while (nfiles + tmp_offset < len(self.__global_header_lines) and
-                   data_files_finder.match(self.__global_header_lines[nfiles+tmp_offset])):
+            while (nfiles + tmp_offset < len(self._global_header_lines) and
+                   data_files_finder.match(self._global_header_lines[nfiles+tmp_offset])):
                 filen = os.path.join(self.parameter_file.fullplotdir,
-                                     self.__global_header_lines[nfiles+tmp_offset].strip())
+                                     self._global_header_lines[nfiles+tmp_offset].strip())
                 # open each "_H" header file, and get the number of
                 # components within it
                 level_header_file = open(filen+'_H','r').read()
@@ -269,16 +267,20 @@
             for grid in range(0, ngrids):
                 gfn = fn[grid]  # filename of file containing this grid
                 gfo = off[grid] # offset within that file
-                xlo, xhi = map(float, self.__global_header_lines[counter].split())
+                xlo, xhi = map(float, self._global_header_lines[counter].split())
                 counter += 1
-                ylo, yhi = map(float, self.__global_header_lines[counter].split())
+                ylo, yhi = map(float, self._global_header_lines[counter].split())
                 counter += 1
-                zlo, zhi = map(float, self.__global_header_lines[counter].split())
+                zlo, zhi = map(float, self._global_header_lines[counter].split())
                 counter += 1
                 lo = na.array([xlo, ylo, zlo])
                 hi = na.array([xhi, yhi, zhi])
-                dims, start, stop = self.__calculate_grid_dimensions(start_stop_index[grid])
-                self.levels[-1].grids.append(self.grid(lo, hi, grid_counter, level, gfn, gfo, dims, start, stop, paranoia=paranoid_read, hierarchy=self))
+                dims, start, stop = self._calculate_grid_dimensions(start_stop_index[grid])
+                self.levels[-1].grids.append(self.grid(lo, hi, grid_counter,
+                                                       level, gfn, gfo, dims,
+                                                       start, stop,
+                                                       paranoia=paranoid_read,  ### TODO: at least the code isn't schizophrenic paranoid
+                                                       hierarchy=self))
                 grid_counter += 1   # this is global, and shouldn't be reset
                                     # for each level
 
@@ -316,7 +318,7 @@
                                 count=3*self.num_grids).reshape((self.num_grids, 3))
         self.pgrid_info = grid_info
 
-    def __cache_endianness(self, test_grid):
+    def _cache_endianness(self, test_grid):
         """
         Cache the endianness and bytes perreal of the grids by using a test grid
         and assuming that all grids have the same endianness. This is a pretty
@@ -345,7 +347,7 @@
         dtype += ('f%i' % self._bytes_per_real) # always a floating point
         self._dtype = dtype
 
-    def __calculate_grid_dimensions(self, start_stop):
+    def _calculate_grid_dimensions(self, start_stop):
         start = na.array(map(int, start_stop[0].split(',')))
         stop = na.array(map(int, start_stop[1].split(',')))
         dimension = stop - start + 1
@@ -353,21 +355,28 @@
         
     def _populate_grid_objects(self):
         mylog.debug("Creating grid objects")
+
         self.grids = na.concatenate([level.grids for level in self.levels])
         basedir = self.parameter_file.fullplotdir
+
         for g, pg in itertools.izip(self.grids, self.pgrid_info):
             g.particle_filename = os.path.join(
                 basedir, "DM", "Level_%s" % (g.Level), "DATA_%04i" % pg[0])
             g.NumberOfParticles = pg[1]
             g._particle_offset = pg[2]
+
         self.grid_particle_count[:,0] = self.pgrid_info[:,1]
         del self.pgrid_info
-        gls = na.concatenate([level.ngrids*[level.level] for level in self.levels])
+
+        gls = na.concatenate([level.ngrids * [level.level] for level in self.levels])
         self.grid_levels[:] = gls.reshape((self.num_grids,1))
-        grid_dcs = na.concatenate([level.ngrids*[self.dx[level.level]] for level in self.levels], axis=0)
+        grid_dcs = na.concatenate([level.ngrids * [self.dx[level.level]]
+                                  for level in self.levels], axis=0)
+
         self.grid_dxs = grid_dcs[:,0].reshape((self.num_grids,1))
         self.grid_dys = grid_dcs[:,1].reshape((self.num_grids,1))
         self.grid_dzs = grid_dcs[:,2].reshape((self.num_grids,1))
+
         left_edges = []
         right_edges = []
         dims = []
@@ -375,23 +384,28 @@
             left_edges += [g.LeftEdge for g in level.grids]
             right_edges += [g.RightEdge for g in level.grids]
             dims += [g.ActiveDimensions for g in level.grids]
+
         self.grid_left_edge = na.array(left_edges)
         self.grid_right_edge = na.array(right_edges)
         self.grid_dimensions = na.array(dims)
         self.gridReverseTree = [] * self.num_grids
         self.gridReverseTree = [ [] for i in range(self.num_grids)]
         self.gridTree = [ [] for i in range(self.num_grids)]
+
         mylog.debug("Done creating grid objects")
 
     def _populate_hierarchy(self):
-        self.__setup_grid_tree()
+        self._setup_grid_tree()
         #self._setup_grid_corners()
+
         for i, grid in enumerate(self.grids):
-            if (i%1e4) == 0: mylog.debug("Prepared % 7i / % 7i grids", i, self.num_grids)
+            if (i % 1e4) == 0:
+                mylog.debug("Prepared % 7i / % 7i grids", i, self.num_grids)
+
             grid._prepare_grid()
             grid._setup_dx()
 
-    def __setup_grid_tree(self):
+    def _setup_grid_tree(self):
         mask = na.empty(self.grids.size, dtype='int32')
         for i, grid in enumerate(self.grids):
             get_box_grids_level(grid.LeftEdge, grid.RightEdge, grid.Level + 1,
@@ -419,16 +433,20 @@
 
     def _setup_field_list(self):
         self.derived_field_list = []
+
         for field in self.field_info:
             try:
-                fd = self.field_info[field].get_dependencies(pf = self.parameter_file)
+                fd = self.field_info[field].get_dependencies(pf=self.parameter_file)
             except:
                 continue
+
             available = na.all([f in self.field_list for f in fd.requested])
             if available: self.derived_field_list.append(field)
+
         for field in self.field_list:
             if field not in self.derived_field_list:
                 self.derived_field_list.append(field)
+
         if self.parameter_file.use_particles:
             # We know which particle fields will exist -- pending further
             # changes in the future.
@@ -446,8 +464,10 @@
                         convert_function=cf, take_log=False,
                         particle_type=True)
 
+    ### TODO: check if this can be removed completely
     def _count_grids(self):
-        """this is already provided in 
+        """
+        this is already provided in ???
 
         """
         pass
@@ -477,13 +497,12 @@
         self._data_file = None
         self._data_mode = None
         self._max_locations = {}
-    
+
 class CastroLevel:
     def __init__(self, level, ngrids):
         self.level = level
         self.ngrids = ngrids
         self.grids = []
-    
 
 class CastroStaticOutput(StaticOutput):
     """
@@ -524,8 +543,9 @@
                               self._fieldinfo_fallback)
 
         # These should maybe not be hardcoded?
+        ### TODO: this.
         self.parameters["HydroMethod"] = 'castro' # always PPM DE
-        self.parameters["Time"] = 1. # default unit is 1...
+        self.parameters["Time"] = 1.0 # default unit is 1...
         self.parameters["DualEnergyFormalism"] = 0 # always off.
         self.parameters["EOSType"] = -1 # default
 
@@ -542,13 +562,17 @@
         # fill our args
         pname = args[0].rstrip("/")
         dn = os.path.dirname(pname)
-        if len(args) > 1: kwargs['paramFilename'] = args[1]
+        if len(args) > 1:
+            kwargs['paramFilename'] = args[1]
+
         pfname = kwargs.get("paramFilename", os.path.join(dn, "inputs"))
 
         # We check for the job_info file's existence because this is currently
         # what distinguishes Castro data from MAESTRO data.
+        ### ^ that is nuts
         pfn = os.path.join(pfname)
-        if not os.path.exists(pfn): return False
+        if not os.path.exists(pfn):
+            return False
         castro = any(("castro." in line for line in open(pfn)))
         maestro = os.path.exists(os.path.join(pname, "job_info"))
         orion = (not castro) and (not maestro)
@@ -556,35 +580,37 @@
         
     def _parse_parameter_file(self):
         """
-        Parses the parameter file and establishes the various
-        dictionaries.
+        Parses the parameter file and establishes the various dictionaries.
+
         """
+        # Boxlib madness
         self.fullplotdir = os.path.abspath(self.parameter_filename)
         self._parse_header_file()
-        self.parameter_filename = self._localize(
-                self.__ipfn, 'inputs')
-        self.fparameter_filename = self._localize(
-                self.fparameter_filename, 'probin')
+        self.parameter_filename = self._localize(self.__ipfn, 'inputs')
+        self.fparameter_filename = self._localize(self.fparameter_filename, 'probin')
         if os.path.isfile(self.fparameter_filename):
             self._parse_fparameter_file()
             for param in self.fparameters:
                 if castro2enzoDict.has_key(param):
-                    self.parameters[castro2enzoDict[param]]=self.fparameters[param]
+                    self.parameters[castro2enzoDict[param]] = self.fparameters[param]
+
         # Let's read the file
-        self.unique_identifier = \
-            int(os.stat(self.parameter_filename)[ST_CTIME])
+        self.unique_identifier = int(os.stat(self.parameter_filename)[ST_CTIME])
         lines = open(self.parameter_filename).readlines()
         self.use_particles = False
-        for lineI, line in enumerate(lines):
+
+        for line in lines:
             if line.find("#") >= 1: # Keep the commented lines...
-                line=line[:line.find("#")]
-            line=line.strip().rstrip()
+                line = line[:line.find("#")]
+            line = line.strip().rstrip()
             if len(line) < 2 or line.find("#") == 0: # ...but skip comments
                 continue
+
             try:
                 param, vals = map(strip, map(rstrip, line.split("=")))
             except ValueError:
                 mylog.error("ValueError: '%s'", line)
+
             if castro2enzoDict.has_key(param):
                 paramName = castro2enzoDict[param]
                 t = map(parameterDict[paramName], vals.split())
@@ -595,13 +621,10 @@
                         self.parameters[paramName] = t[0]
                     else:
                         self.parameters[paramName] = t
-                
             elif param.startswith("geometry.prob_hi"):
-                self.domain_right_edge = \
-                    na.array([float(i) for i in vals.split()])
+                self.domain_right_edge = na.array([float(i) for i in vals.split()])
             elif param.startswith("geometry.prob_lo"):
-                self.domain_left_edge = \
-                    na.array([float(i) for i in vals.split()])
+                self.domain_left_edge = na.array([float(i) for i in vals.split()])
             elif param.startswith("particles.write_in_plotfile"):
                 self.use_particles = boxlib_bool_to_int(vals)
 
@@ -610,33 +633,38 @@
         self.domain_dimensions = self.parameters["TopGridDimensions"]
         self.refine_by = self.parameters.get("RefineBy", 2)
 
-        if self.parameters.has_key("ComovingCoordinates") and bool(self.parameters["ComovingCoordinates"]):
+        if (self.parameters.has_key("ComovingCoordinates") and
+            bool(self.parameters["ComovingCoordinates"])):
             self.cosmological_simulation = 1
             self.omega_lambda = self.parameters["CosmologyOmegaLambdaNow"]
             self.omega_matter = self.parameters["CosmologyOmegaMatterNow"]
             self.hubble_constant = self.parameters["CosmologyHubbleConstantNow"]
-            a_file = open(os.path.join(self.fullplotdir,'comoving_a'))
+
+            # Stupid that we have to read a separate file for this :/
+            a_file = open(os.path.join(self.fullplotdir, "comoving_a"))
             line = a_file.readline().strip()
             a_file.close()
-            self.parameters["CosmologyCurrentRedshift"] = 1/float(line) - 1
+
+            self.parameters["CosmologyCurrentRedshift"] = 1 / float(line) - 1
             self.cosmological_scale_factor = float(line)
             self.current_redshift = self.parameters["CosmologyCurrentRedshift"]
         else:
+            ### TODO: make these defaults automatic
             self.current_redshift = self.omega_lambda = self.omega_matter = \
                 self.hubble_constant = self.cosmological_simulation = 0.0
 
     def _parse_fparameter_file(self):
         """
-        Parses the fortran parameter file for Castro. Most of this will
-        be useless, but this is where it keeps mu = mass per
-        particle/m_hydrogen.
+        Parses the fortran parameter file for Castro. Most of this will be
+        useless, but this is where it keeps mu = mass per particle/m_hydrogen.
+
         """
         lines = open(self.fparameter_filename).readlines()
         for line in lines:
             if line.count("=") == 1:
                 param, vals = map(strip, map(rstrip, line.split("=")))
                 if vals.count("'") == 0:
-                    t = map(float,[a.replace('D','e').replace('d','e') for a in vals.split()]) # all are floating point.
+                    t = map(float, [a.replace('D','e').replace('d','e') for a in vals.split()]) # all are floating point.
                 else:
                     t = vals.split()
                 if len(t) == 1:
@@ -646,47 +674,50 @@
 
     def _parse_header_file(self):
         """
-        Parses the BoxLib header file to get any parameters stored
-        there. Hierarchy information is read out of this file in
-        CastroHierarchy. 
+        Parses the BoxLib header file to get any parameters stored there.
+        Hierarchy information is read out of this file in CastroHierarchy. 
 
         Currently, only Time is read here.
+
         """
-        header_file = open(os.path.join(self.fullplotdir,'Header'))
+        header_file = open(os.path.join(self.fullplotdir, "Header"))
         lines = header_file.readlines()
         header_file.close()
         n_fields = int(lines[1])
-        self.current_time = float(lines[3+n_fields])
+        self.current_time = float(lines[3 + n_fields])
 
-
-                
     def _set_units(self):
         """
-        Generates the conversion to various physical _units based on the parameter file
+        Generates the conversion to various physical _units based on the
+        parameter file.
+
         """
         self.units = {}
         self.time_units = {}
+
         if len(self.parameters) == 0:
             self._parse_parameter_file()
+
         if self.cosmological_simulation:
-            cf = 1e5*(self.cosmological_scale_factor)
+            cf = 1e5 * self.cosmological_scale_factor   # Where does the 1e5 come from?
             for ax in 'xyz':
                 self.units['particle_velocity_%s' % ax] = cf
-            self.units['particle_mass'] = 1.989e33
+            self.units['particle_mass'] = 1.989e33  ### TODO: Make a global solar mass def
+
         mylog.warning("Setting 1.0 in code units to be 1.0 cm")
         if not self.has_key("TimeUnits"):
-            mylog.warning("No time units.  Setting 1.0 = 1 second.")
+            mylog.warning("No time units. Setting 1.0 = 1 second.")
             self.conversion_factors["Time"] = 1.0
         for unit in mpc_conversion.keys():
             self.units[unit] = mpc_conversion[unit] / mpc_conversion["cm"]
-        
+
         self.conversion_factors = defaultdict(lambda: 1.0)
         self.time_units['1'] = 1
         self.units['1'] = 1.0
         self.units['unitary'] = 1.0 / (self.domain_right_edge - self.domain_left_edge).max()
         seconds = 1 #self["Time"]
-        self.time_units['years'] = seconds / (365*3600*24.0)
-        self.time_units['days']  = seconds / (3600*24.0)
+        self.time_units['years'] = seconds / (365 * 3600 * 24.0)
+        self.time_units['days']  = seconds / (3600 * 24.0)
         for key in yt2castroFieldsDict:
             self.conversion_factors[key] = 1.0
         for key in castro_particle_field_names:


http://bitbucket.org/yt_analysis/yt/changeset/afaaf1cdf0da/
changeset:   afaaf1cdf0da
branch:      deliberate_fields
user:        caseywstark
date:        2011-06-21 23:36:36
summary:     Undo startindex rename
affected #:  1 file (-1 bytes)

--- a/yt/frontends/castro/data_structures.py	Tue Jun 21 14:30:54 2011 -0700
+++ b/yt/frontends/castro/data_structures.py	Tue Jun 21 14:36:36 2011 -0700
@@ -73,7 +73,7 @@
         self.index = index
         self.level = level
 
-    def get_global_start_index(self):
+    def get_global_startindex(self):
         return self.start_index
 
     def _prepare_grid(self):


http://bitbucket.org/yt_analysis/yt/changeset/99fa48e6f235/
changeset:   99fa48e6f235
branch:      deliberate_fields
user:        caseywstark
date:        2011-06-22 02:36:34
summary:     pep'in
affected #:  1 file (-1 bytes)

--- a/tests/runall.py	Tue Jun 21 14:36:36 2011 -0700
+++ b/tests/runall.py	Tue Jun 21 17:36:34 2011 -0700
@@ -1,6 +1,6 @@
 from yt.config import ytcfg
-ytcfg["yt","loglevel"] = "50"
-ytcfg["yt","serialize"] = "False"
+ytcfg["yt", "loglevel"] = "50"
+ytcfg["yt", "serialize"] = "False"
 
 from yt.utilities.answer_testing.api import \
     RegressionTestRunner, clear_registry, create_test, \
@@ -26,6 +26,7 @@
 #
 
 cwd = os.path.dirname(globals().get("__file__", os.getcwd()))
+#cwd = os.path.abspath(os.path.dirname(sys.argv[0]))
 
 def load_tests(iname, idir):
     f, filename, desc = imp.find_module(iname, [idir])
@@ -48,50 +49,60 @@
 
 if __name__ == "__main__":
     mapping = find_and_initialize_tests()
-    test_storage_directory = ytcfg.get("yt","test_storage_dir")
+    test_storage_directory = ytcfg.get("yt", "test_storage_dir")
     my_hash = get_yt_version()
+
     parser = optparse.OptionParser()
     parser.add_option("-f", "--parameter-file", dest="parameter_file",
-                      default = os.path.join(cwd, "DD0010/moving7_0010"),
-                      help = "The parameter file value to feed to 'load' to test against",
-                      )
+                      default=os.path.join(cwd, "DD0010/moving7_0010"),
+                      help="The parameter file value to feed to 'load' to test against")
     parser.add_option("-l", "--list", dest="list_tests", action="store_true",
-                      default = False, help = "List all tests and then exit")
+                      default=False, help="List all tests and then exit")
     parser.add_option("-t", "--tests", dest="test_pattern", default="*",
-                      help = "The test name pattern to match.  Can include wildcards.")
+                      help="The test name pattern to match.  Can include wildcards.")
     parser.add_option("-o", "--output", dest="storage_dir",
                       default=test_storage_directory,
-                      help = "Base directory for storing test output.")
+                      help="Base directory for storing test output.")
     parser.add_option("-c", "--compare", dest="compare_name",
                       default=None,
-                      help = "The name against which we will compare")
+                      help="The name against which we will compare")
     parser.add_option("-n", "--name", dest="this_name",
                       default=my_hash,
-                      help = "The name we'll call this set of tests")
+                      help="The name we'll call this set of tests")
     opts, args = parser.parse_args()
+
     if opts.list_tests:
         print "\n    ".join(sorted(itertools.chain(*mapping.values())))
         sys.exit(0)
+
+    # Load the test pf and make sure it's good.
     pf = load(opts.parameter_file)
     if pf is None:
         print "Couldn't load the specified parameter file."
         sys.exit(1)
+
     # Now we modify our compare name and self name to include the pf.
     compare_id = opts.compare_name
-    if compare_id is not None: compare_id += "_%s_%s" % (pf, pf._hash())
+    if compare_id is not None:
+        compare_id += "_%s_%s" % (pf, pf._hash())
     this_id = opts.this_name + "_%s_%s" % (pf, pf._hash())
+
     rtr = RegressionTestRunner(this_id, compare_id,
-            results_path = opts.storage_dir,
-            compare_results_path = opts.storage_dir,
-            io_log = [opts.parameter_file])
+                               results_path=opts.storage_dir,
+                               compare_results_path=opts.storage_dir,
+                               io_log=[opts.parameter_file])
+
     tests_to_run = []
     for m, vals in mapping.items():
         print vals, opts.test_pattern
         new_tests = fnmatch.filter(vals, opts.test_pattern)
+
         if len(new_tests) == 0: continue
         tests_to_run += new_tests
         load_tests(m, cwd)
+
     for test_name in sorted(tests_to_run):
         rtr.run_test(test_name)
+
     for test_name, result in sorted(rtr.passed_tests.items()):
         print "TEST %s: %s" % (test_name, result)


http://bitbucket.org/yt_analysis/yt/changeset/48cd9943e268/
changeset:   48cd9943e268
branch:      deliberate_fields
user:        caseywstark
date:        2011-06-22 02:37:52
summary:     Super PEPy. Also fixed a missing import (NullFunc).
affected #:  3 files (-1 bytes)

--- a/yt/data_objects/field_info_container.py	Tue Jun 21 17:36:34 2011 -0700
+++ b/yt/data_objects/field_info_container.py	Tue Jun 21 17:37:52 2011 -0700
@@ -26,27 +26,31 @@
 """
 
 import types
-import numpy as na
 import inspect
 import copy
 import itertools
 
+import numpy as na
+
 from yt.funcs import *
 
 class FieldInfoContainer(dict): # Resistance has utility
     """
     This is a generic field container.  It contains a list of potential derived
-    fields, all of which know how to act on a data object and return a value.  This
-    object handles converting units as well as validating the availability of a
-    given field.
+    fields, all of which know how to act on a data object and return a value.
+    This object handles converting units as well as validating the availability
+    of a given field.
+
     """
     fallback = None
-    def add_field(self, name, function = None, **kwargs):
+
+    def add_field(self, name, function=None, **kwargs):
         """
         Add a new field, along with supplemental metadata, to the list of
         available fields.  This respects a number of arguments, all of which
         are passed on to the constructor for
         :class:`~yt.data_objects.api.DerivedField`.
+
         """
         if function == None:
             def create_function(function):
@@ -62,7 +66,8 @@
         return self.fallback.has_key(key)
 
     def __missing__(self, key):
-        if self.fallback is None: raise KeyError
+        if self.fallback is None:
+            raise KeyError("No field named %s" % key)
         return self.fallback[key]
 
     @classmethod
@@ -128,22 +133,27 @@
     NumberOfParticles = 1
     _read_exception = None
     _id_offset = 0
+
     def __init__(self, nd = 16, pf = None, flat = False):
         self.nd = nd
         self.flat = flat
         self._spatial = not flat
         self.ActiveDimensions = [nd,nd,nd]
-        self.LeftEdge = [0.0,0.0,0.0]
-        self.RightEdge = [1.0,1.0,1.0]
+        self.LeftEdge = [0.0, 0.0, 0.0]
+        self.RightEdge = [1.0, 1.0, 1.0]
         self.dds = na.ones(3, "float64")
         self['dx'] = self['dy'] = self['dz'] = na.array([1.0])
         class fake_parameter_file(defaultdict):
             pass
+
         if pf is None:
+            # required attrs
             pf = fake_parameter_file(lambda: 1)
             pf.current_redshift = pf.omega_lambda = pf.omega_matter = \
                 pf.hubble_constant = pf.cosmological_simulation = 0.0
+
         self.pf = pf
+
         class fake_hierarchy(object):
             class fake_io(object):
                 def _read_data_set(io_self, data, field):
@@ -152,33 +162,35 @@
             io = fake_io()
             def get_smallest_dx(self):
                 return 1.0
+
         self.hierarchy = fake_hierarchy()
         self.requested = []
         self.requested_parameters = []
         if not self.flat:
             defaultdict.__init__(self,
-                lambda: na.ones((nd,nd,nd), dtype='float64')
-                + 1e-4*na.random.random((nd,nd,nd)))
+                lambda: na.ones((nd, nd, nd), dtype='float64')
+                + 1e-4*na.random.random((nd, nd, nd)))
         else:
             defaultdict.__init__(self, 
-                lambda: na.ones((nd*nd*nd), dtype='float64')
-                + 1e-4*na.random.random((nd*nd*nd)))
+                lambda: na.ones((nd * nd * nd), dtype='float64')
+                + 1e-4*na.random.random((nd * nd * nd)))
+
     def __missing__(self, item):
         FI = getattr(self.pf, "field_info", FieldInfo)
-        if FI.has_key(item) and \
-            FI[item]._function.func_name != '<lambda>':
+        if FI.has_key(item) and FI[item]._function.func_name != '<lambda>':
             try:
                 vv = FI[item](self)
             except NeedsGridType as exc:
                 ngz = exc.ghost_zones
-                nfd = FieldDetector(self.nd+ngz*2)
+                nfd = FieldDetector(self.nd + ngz * 2)
                 nfd._num_ghost_zones = ngz
                 vv = FI[item](nfd)
-                if ngz > 0: vv = vv[ngz:-ngz,ngz:-ngz,ngz:-ngz]
+                if ngz > 0: vv = vv[ngz:-ngz, ngz:-ngz, ngz:-ngz]
                 for i in nfd.requested:
                     if i not in self.requested: self.requested.append(i)
                 for i in nfd.requested_parameters:
-                    if i not in self.requested_parameters: self.requested_parameters.append(i)
+                    if i not in self.requested_parameters:
+                        self.requested_parameters.append(i)
             if vv is not None:
                 if not self.flat: self[item] = vv
                 else: self[item] = vv.ravel()
@@ -189,16 +201,15 @@
     def _read_data(self, field_name):
         self.requested.append(field_name)
         FI = getattr(self.pf, "field_info", FieldInfo)
-        if FI.has_key(field_name) and \
-           FI[field_name].particle_type:
+        if FI.has_key(field_name) and FI[field_name].particle_type:
             self.requested.append(field_name)
             return na.ones(self.NumberOfParticles)
         return defaultdict.__missing__(self, field_name)
 
     def get_field_parameter(self, param):
         self.requested_parameters.append(param)
-        if param in ['bulk_velocity','center','height_vector']:
-            return na.random.random(3)*1e-2
+        if param in ['bulk_velocity', 'center', 'height_vector']:
+            return na.random.random(3) * 1e-2
         else:
             return 0.0
     _num_ghost_zones = 0
@@ -222,14 +233,15 @@
         :param function: is a function handle that defines the field
         :param convert_function: must convert to CGS, if it needs to be done
         :param units: is a mathtext-formatted string that describes the field
-        :param projected_units: if we display a projection, what should the units be?
+        :param projected_units: if we display a projection, what should the
+                                units be?
         :param take_log: describes whether the field should be logged
         :param validators: is a list of :class:`FieldValidator` objects
         :param particle_type: is this field based on particles?
         :param vector_field: describes the dimensionality of the field
         :param display_field: governs its appearance in the dropdowns in reason
-        :param not_in_all: is used for baryon fields from the data that are not in
-                           all the grids
+        :param not_in_all: is used for baryon fields from the data that are not
+                           in all the grids
         :param display_name: a name used in the plots
         :param projection_conversion: which unit should we multiply by in a
                                       projection?
@@ -276,9 +288,7 @@
         return e
 
     def get_units(self):
-        """
-        Return a string describing the units.
-        """
+        """ Return a string describing the units. """
         return self._units
 
     def get_projected_units(self):
@@ -288,9 +298,7 @@
         return self._projected_units
 
     def __call__(self, data):
-        """
-        Return the value of the field in a given *data* object.
-        """
+        """ Return the value of the field in a given *data* object. """
         ii = self.check_available(data)
         original_fields = data.keys() # Copy
         dd = self._function(self, data)


--- a/yt/data_objects/grid_patch.py	Tue Jun 21 17:36:34 2011 -0700
+++ b/yt/data_objects/grid_patch.py	Tue Jun 21 17:37:52 2011 -0700
@@ -25,12 +25,13 @@
 
 import exceptions
 import pdb
-import numpy as na
 import weakref
 
+import numpy as na
+
 from yt.funcs import *
+from yt.utilities.definitions import x_dict, y_dict
 
-from yt.utilities.definitions import x_dict, y_dict
 from .field_info_container import \
     NeedsGridType, \
     NeedsOriginalGrid, \
@@ -54,7 +55,8 @@
                  'start_index', 'filename', '__weakref__', 'dds',
                  '_child_mask', '_child_indices', '_child_index_mask',
                  '_parent_id', '_children_ids']
-    def __init__(self, id, filename = None, hierarchy = None):
+
+    def __init__(self, id, filename=None, hierarchy=None):
         self.data = {}
         self.field_parameters = {}
         self.id = id
@@ -67,24 +69,26 @@
         """
         Return the integer starting index for each dimension at the current
         level.
+
         """
         if self.start_index != None:
             return self.start_index
         if self.Parent == None:
-            iLE = self.LeftEdge - self.pf.domain_left_edge
-            start_index = iLE / self.dds
+            left = self.LeftEdge - self.pf.domain_left_edge
+            start_index = left / self.dds
             return na.rint(start_index).astype('int64').ravel()
+
         pdx = self.Parent.dds
         start_index = (self.Parent.get_global_startindex()) + \
-                       na.rint((self.LeftEdge - self.Parent.LeftEdge)/pdx)
-        self.start_index = (start_index*self.pf.refine_by).astype('int64').ravel()
+                       na.rint((self.LeftEdge - self.Parent.LeftEdge) / pdx)
+        self.start_index = (start_index * self.pf.refine_by).astype('int64').ravel()
         return self.start_index
 
-
     def get_field_parameter(self, name, default=None):
         """
-        This is typically only used by derived field functions, but
-        it returns parameters used to generate fields.
+        This is typically only used by derived field functions, but it returns
+        parameters used to generate fields.
+
         """
         if self.field_parameters.has_key(name):
             return self.field_parameters[name]
@@ -95,19 +99,19 @@
         """
         Here we set up dictionaries that get passed up and down and ultimately
         to derived fields.
+
         """
         self.field_parameters[name] = val
 
     def has_field_parameter(self, name):
-        """
-        Checks if a field parameter is set.
-        """
+        """ Checks if a field parameter is set. """
         return self.field_parameters.has_key(name)
 
     def convert(self, datatype):
         """
-        This will attempt to convert a given unit to cgs from code units.
-        It either returns the multiplicative factor or throws a KeyError.
+        This will attempt to convert a given unit to cgs from code units. It
+        either returns the multiplicative factor or throws a KeyError.
+
         """
         return self.pf[datatype]
 
@@ -115,7 +119,7 @@
         # We'll do this the slow way to be clear what's going on
         s = "%s (%s): " % (self.__class__.__name__, self.pf)
         s += ", ".join(["%s=%s" % (i, getattr(self,i))
-                       for i in self._con_args])
+                        for i in self._con_args])
         return s
 
     def _generate_field(self, field):
@@ -129,7 +133,7 @@
                 f_gz = ngt_exception.fields
                 gz_grid = self.retrieve_ghost_zones(n_gz, f_gz, smoothed=True)
                 temp_array = self.pf.field_info[field](gz_grid)
-                sl = [slice(n_gz,-n_gz)] * 3
+                sl = [slice(n_gz, -n_gz)] * 3
                 self[field] = temp_array[sl]
             else:
                 self[field] = self.pf.field_info[field](self)
@@ -140,32 +144,24 @@
         return (key in self.data)
 
     def __getitem__(self, key):
-        """
-        Returns a single field.  Will add if necessary.
-        """
+        """ Returns a single field.  Will add if necessary. """
         if not self.data.has_key(key):
             self.get_data(key)
         return self.data[key]
 
     def __setitem__(self, key, val):
-        """
-        Sets a field to be some other value.
-        """
+        """ Sets a field to be some other value. """
         self.data[key] = val
 
     def __delitem__(self, key):
-        """
-        Deletes a field
-        """
+        """ Deletes a field. """
         del self.data[key]
 
     def keys(self):
         return self.data.keys()
     
     def get_data(self, field):
-        """
-        Returns a field or set of fields for a key or set of keys
-        """
+        """ Returns a field or set of fields for a key or set of keys. """
         if not self.data.has_key(field):
             if field in self.hierarchy.field_list:
                 conv_factor = 1.0
@@ -174,7 +170,7 @@
                 if self.pf.field_info[field].particle_type and \
                    self.NumberOfParticles == 0:
                     # because this gets upcast to float
-                    self[field] = na.array([],dtype='int64')
+                    self[field] = na.array([], dtype='int64')
                     return self.data[field]
                 try:
                     temp = self.hierarchy.io.pop(self, field)
@@ -192,14 +188,14 @@
 
     def _setup_dx(self):
         # So first we figure out what the index is.  We don't assume
-        # that dx=dy=dz , at least here.  We probably do elsewhere.
+        # that dx=dy=dz, at least here.  We probably do elsewhere.
         id = self.id - self._id_offset
         if self.Parent is not None:
             self.dds = self.Parent.dds / self.pf.refine_by
         else:
             LE, RE = self.hierarchy.grid_left_edge[id,:], \
                      self.hierarchy.grid_right_edge[id,:]
-            self.dds = na.array((RE-LE)/self.ActiveDimensions)
+            self.dds = na.array((RE - LE) / self.ActiveDimensions)
         if self.pf.dimensionality < 2: self.dds[1] = 1.0
         if self.pf.dimensionality < 3: self.dds[2] = 1.0
         self.data['dx'], self.data['dy'], self.data['dz'] = self.dds
@@ -222,6 +218,7 @@
         Generate a mask that shows which cells overlap with arbitrary arrays
         *LE* and *RE*) of edges, typically grids, along *axis*.
         Use algorithm described at http://www.gamedev.net/reference/articles/article735.asp
+
         """
         x = x_dict[axis]
         y = y_dict[axis]
@@ -239,8 +236,9 @@
 
     def clear_data(self):
         """
-        Clear out the following things: child_mask, child_indices,
-        all fields, all field parameters.
+        Clear out the following things: child_mask, child_indices, all fields,
+        all field parameters.
+
         """
         self._del_child_mask()
         self._del_child_indices()
@@ -251,9 +249,7 @@
         return self._child_mask, self._child_indices
 
     def _prepare_grid(self):
-        """
-        Copies all the appropriate attributes from the hierarchy
-        """
+        """ Copies all the appropriate attributes from the hierarchy. """
         # This is definitely the slowest part of generating the hierarchy
         # Now we give it pointers to all of its attributes
         # Note that to keep in line with Enzo, we have broken PEP-8
@@ -265,33 +261,27 @@
         h.grid_levels[my_ind, 0] = self.Level
         # This might be needed for streaming formats
         #self.Time = h.gridTimes[my_ind,0]
-        self.NumberOfParticles = h.grid_particle_count[my_ind,0]
+        self.NumberOfParticles = h.grid_particle_count[my_ind, 0]
 
     def __len__(self):
         return na.prod(self.ActiveDimensions)
 
     def find_max(self, field):
-        """
-        Returns value, index of maximum value of *field* in this gird
-        """
-        coord1d=(self[field]*self.child_mask).argmax()
-        coord=na.unravel_index(coord1d, self[field].shape)
+        """ Returns value, index of maximum value of *field* in this grid. """
+        coord1d = (self[field] * self.child_mask).argmax()
+        coord = na.unravel_index(coord1d, self[field].shape)
         val = self[field][coord]
         return val, coord
 
     def find_min(self, field):
-        """
-        Returns value, index of minimum value of *field* in this gird
-        """
-        coord1d=(self[field]*self.child_mask).argmin()
-        coord=na.unravel_index(coord1d, self[field].shape)
+        """ Returns value, index of minimum value of *field* in this grid. """
+        coord1d = (self[field] * self.child_mask).argmin()
+        coord = na.unravel_index(coord1d, self[field].shape)
         val = self[field][coord]
         return val, coord
 
     def get_position(self, index):
-        """
-        Returns center position of an *index*
-        """
+        """ Returns center position of an *index*. """
         pos = (index + 0.5) * self.dds + self.LeftEdge
         return pos
 
@@ -299,19 +289,18 @@
         """
         Clears all datafields from memory and calls
         :meth:`clear_derived_quantities`.
+
         """
         for key in self.keys():
             del self.data[key]
         del self.data
-        if hasattr(self,"retVal"):
+        if hasattr(self, "retVal"):
             del self.retVal
         self.data = {}
         self.clear_derived_quantities()
 
     def clear_derived_quantities(self):
-        """
-        Clears coordinates, child_indices, child_mask.
-        """
+        """ Clears coordinates, child_indices, child_mask. """
         # Access the property raw-values here
         del self.child_mask
         del self.child_ind
@@ -366,8 +355,8 @@
     def __fill_child_mask(self, child, mask, tofill):
         rf = self.pf.refine_by
         gi, cgi = self.get_global_startindex(), child.get_global_startindex()
-        startIndex = na.maximum(0, cgi/rf - gi)
-        endIndex = na.minimum( (cgi+child.ActiveDimensions)/rf - gi,
+        startIndex = na.maximum(0, cgi / rf - gi)
+        endIndex = na.minimum((cgi + child.ActiveDimensions) / rf - gi,
                               self.ActiveDimensions)
         endIndex += (startIndex == endIndex)
         mask[startIndex[0]:endIndex[0],
@@ -377,7 +366,8 @@
     def __generate_child_mask(self):
         """
         Generates self.child_mask, which is zero where child grids exist (and
-        thus, where higher resolution data is available.)
+        thus, where higher resolution data is available).
+
         """
         self._child_mask = na.ones(self.ActiveDimensions, 'int32')
         for child in self.Children:
@@ -388,6 +378,7 @@
         """
         Generates self.child_index_mask, which is -1 where there is no child,
         and otherwise has the ID of the grid that resides there.
+
         """
         self._child_index_mask = na.zeros(self.ActiveDimensions, 'int32') - 1
         for child in self.Children:
@@ -398,10 +389,10 @@
         if self.__coords == None: self._generate_coords()
         return self.__coords
 
-    def _set_coords(self, newC):
+    def _set_coords(self, new_c):
         if self.__coords != None:
             mylog.warning("Overriding coords attribute!  This is probably unwise!")
-        self.__coords = newC
+        self.__coords = new_c
 
     def _del_coords(self):
         del self.__coords
@@ -409,12 +400,12 @@
 
     def _generate_coords(self):
         """
-        Creates self.coords, which is of dimensions (3,ActiveDimensions)
+        Creates self.coords, which is of dimensions (3, ActiveDimensions)
+
         """
-        #print "Generating coords"
         ind = na.indices(self.ActiveDimensions)
-        LE = na.reshape(self.LeftEdge,(3,1,1,1))
-        self['x'], self['y'], self['z'] = (ind+0.5)*self.dds+LE
+        left_shaped = na.reshape(self.LeftEdge, (3, 1, 1, 1))
+        self['x'], self['y'], self['z'] = (ind + 0.5) * self.dds + left_shaped
 
     child_mask = property(fget=_get_child_mask, fdel=_del_child_mask)
     child_index_mask = property(fget=_get_child_index_mask, fdel=_del_child_index_mask)
@@ -425,44 +416,31 @@
         # We will attempt this by creating a datacube that is exactly bigger
         # than the grid by nZones*dx in each direction
         nl = self.get_global_startindex() - n_zones
-        nr = nl + self.ActiveDimensions + 2*n_zones
+        nr = nl + self.ActiveDimensions + 2 * n_zones
         new_left_edge = nl * self.dds + self.pf.domain_left_edge
         new_right_edge = nr * self.dds + self.pf.domain_left_edge
+
         # Something different needs to be done for the root grid, though
         level = self.Level
         if all_levels:
             level = self.hierarchy.max_level + 1
         args = (level, new_left_edge, new_right_edge)
-        kwargs = {'dims': self.ActiveDimensions + 2*n_zones,
-                  'num_ghost_zones':n_zones,
-                  'use_pbar':False, 'fields':fields}
+        kwargs = {'dims': self.ActiveDimensions + 2 * n_zones,
+                  'num_ghost_zones': n_zones,
+                  'use_pbar': False, 'fields': fields}
+
         if smoothed:
-            #cube = self.hierarchy.smoothed_covering_grid(
-            #    level, new_left_edge, new_right_edge, **kwargs)
-            cube = self.hierarchy.si_covering_grid(
-                level, new_left_edge, **kwargs)
+            cube = self.hierarchy.si_covering_grid(level, new_left_edge,
+                                                   **kwargs)
         else:
-            cube = self.hierarchy.covering_grid(
-                level, new_left_edge, **kwargs)
+            cube = self.hierarchy.covering_grid(level, new_left_edge, **kwargs)
+
         return cube
 
-    def get_vertex_centered_data(self, field, smoothed=True,
-                                 no_ghost=False):
-        if not no_ghost:
-            cg = self.retrieve_ghost_zones(1, field, smoothed=smoothed)
-            # We have two extra zones in every direction
-            new_field = na.zeros(self.ActiveDimensions + 1, dtype='float64')
-            na.add(new_field, cg[field][1: ,1: ,1: ], new_field)
-            na.add(new_field, cg[field][:-1,1: ,1: ], new_field)
-            na.add(new_field, cg[field][1: ,:-1,1: ], new_field)
-            na.add(new_field, cg[field][1: ,1: ,:-1], new_field)
-            na.add(new_field, cg[field][:-1,1: ,:-1], new_field)
-            na.add(new_field, cg[field][1: ,:-1,:-1], new_field)
-            na.add(new_field, cg[field][:-1,:-1,1: ], new_field)
-            na.add(new_field, cg[field][:-1,:-1,:-1], new_field)
-            na.multiply(new_field, 0.125, new_field)
-        else:
-            new_field = na.zeros(self.ActiveDimensions + 1, dtype='float64')
+    def get_vertex_centered_data(self, field, smoothed=True, no_ghost=False):
+        new_field = na.zeros(self.ActiveDimensions + 1, dtype='float64')
+
+        if no_ghost:
             of = self[field]
             new_field[:-1,:-1,:-1] += of
             new_field[:-1,:-1,1:] += of
@@ -475,16 +453,26 @@
             na.multiply(new_field, 0.125, new_field)
             if self.pf.field_info[field].take_log:
                 new_field = na.log10(new_field)
-            
+
             new_field[:,:, -1] = 2.0*new_field[:,:,-2] - new_field[:,:,-3]
             new_field[:,:, 0]  = 2.0*new_field[:,:,1] - new_field[:,:,2]
-
             new_field[:,-1, :] = 2.0*new_field[:,-2,:] - new_field[:,-3,:]
             new_field[:,0, :]  = 2.0*new_field[:,1,:] - new_field[:,2,:]
-
             new_field[-1,:,:] = 2.0*new_field[-2,:,:] - new_field[-3,:,:]
             new_field[0,:,:]  = 2.0*new_field[1,:,:] - new_field[2,:,:]
+
             if self.pf.field_info[field].take_log:
                 na.power(10.0, new_field, new_field)
+        else:
+            cg = self.retrieve_ghost_zones(1, field, smoothed=smoothed)
+            na.add(new_field, cg[field][1: ,1: ,1: ], new_field)
+            na.add(new_field, cg[field][:-1,1: ,1: ], new_field)
+            na.add(new_field, cg[field][1: ,:-1,1: ], new_field)
+            na.add(new_field, cg[field][1: ,1: ,:-1], new_field)
+            na.add(new_field, cg[field][:-1,1: ,:-1], new_field)
+            na.add(new_field, cg[field][1: ,:-1,:-1], new_field)
+            na.add(new_field, cg[field][:-1,:-1,1: ], new_field)
+            na.add(new_field, cg[field][:-1,:-1,:-1], new_field)
+            na.multiply(new_field, 0.125, new_field)
+
         return new_field
-


--- a/yt/data_objects/hierarchy.py	Tue Jun 21 17:36:34 2011 -0700
+++ b/yt/data_objects/hierarchy.py	Tue Jun 21 17:37:52 2011 -0700
@@ -35,12 +35,12 @@
 
 from yt.arraytypes import blankRecordArray
 from yt.config import ytcfg
+from yt.data_objects.field_info_container import NullFunc
 from yt.utilities.definitions import MAXLEVEL
 from yt.utilities.io_handler import io_registry
 from yt.utilities.parallel_tools.parallel_analysis_interface import \
     ParallelAnalysisInterface, parallel_splitter
-from object_finding_mixin import \
-    ObjectFindingMixin
+from object_finding_mixin import ObjectFindingMixin
 
 from .data_containers import data_object_registry
 


http://bitbucket.org/yt_analysis/yt/changeset/bb6b942c68ff/
changeset:   bb6b942c68ff
branch:      deliberate_fields
user:        caseywstark
date:        2011-06-22 02:39:32
summary:     Adding KnownField Containers
affected #:  11 files (-1 bytes)

--- a/yt/frontends/art/fields.py	Tue Jun 21 17:37:52 2011 -0700
+++ b/yt/frontends/art/fields.py	Tue Jun 21 17:39:32 2011 -0700
@@ -53,8 +53,6 @@
                     "GasEnergy":"GasEnergy"
                    }
 
-def _generate_translation(mine, theirs):
-
 for f,v in translation_dict.items():
     add_art_field(v, function=NullFunc, take_log=False,
                   validators = [ValidateDataField(v)])


--- a/yt/frontends/castro/fields.py	Tue Jun 21 17:37:52 2011 -0700
+++ b/yt/frontends/castro/fields.py	Tue Jun 21 17:39:32 2011 -0700
@@ -24,8 +24,6 @@
 
 """
 
-from yt.utilities.physical_constants import \
-    mh, kboltz
 from yt.data_objects.field_info_container import \
     FieldInfoContainer, \
     FieldInfo, \
@@ -37,20 +35,34 @@
     ValidateSpatial, \
     ValidateGridType
 import yt.data_objects.universal_fields
+from yt.utilities.physical_constants import mh, kboltz
 
+translation_dict = {
+    "x-velocity": "xvel",
+    "y-velocity": "yvel",
+    "z-velocity": "zvel",
+    "Density": "density",
+    "Total_Energy": "eden",
+    "Temperature": "temperature",
+    "x-momentum": "xmom",
+    "y-momentum": "ymom",
+    "z-momentum": "zmom"
+}
+
+# Setup containers for fields possibly in the output files
+KnownCastroFields = FieldInfoContainer()
+add_castro_field = KnownCastroFields.add_field
+
+# and always derived ones
 CastroFieldInfo = FieldInfoContainer.create_with_fallback(FieldInfo)
 add_field = CastroFieldInfo.add_field
 
-KnownCastroFields = FieldInfoContainer()
-add_castro_field = KnownCastroFields.add_field
-
-# def _convertDensity(data):
-#     return data.convert("Density")
+# Start adding fields
 add_castro_field("density", function=NullFunc, take_log=True,
                  units=r"\rm{g}/\rm{cm}^3")
 
-CastroFieldInfo["density"]._projected_units = r"\rm{g}/\rm{cm}^2"
-#CastroFieldInfo["density"]._convert_function=_convertDensity
+# fix projected units
+KnownCastroFields["density"]._projected_units = r"\rm{g}/\rm{cm}^2"
 
 add_castro_field("eden", function=NullFunc, take_log=True,
                  validators = [ValidateDataField("eden")],
@@ -68,20 +80,11 @@
                  validators = [ValidateDataField("zmom")],
                  units=r"\rm{g}/\rm{cm^2\ s}")
 
-translation_dict = {"x-velocity": "xvel",
-                    "y-velocity": "yvel",
-                    "z-velocity": "zvel",
-                    "Density": "density",
-                    "Total_Energy": "eden",
-                    "Temperature": "temperature",
-                    "x-momentum": "xmom",
-                    "y-momentum": "ymom",
-                    "z-momentum": "zmom"
-                   }
-
-for f, v in translation_dict.items():
-    add_field(theirs, function=TranslationFunc(mine),
-              take_log=KnownCastroFields[theirs].take_log)
+# Now populate derived fields
+for mine, theirs in translation_dict.items():
+    if KnownCastroFields.has_key(theirs):
+        add_field(theirs, function=TranslationFunc(mine),
+                  take_log=KnownCastroFields[theirs].take_log)
 
 # Now fallbacks, in case these fields are not output
 def _xVelocity(field, data):


--- a/yt/frontends/chombo/fields.py	Tue Jun 21 17:37:52 2011 -0700
+++ b/yt/frontends/chombo/fields.py	Tue Jun 21 17:39:32 2011 -0700
@@ -26,6 +26,7 @@
 from yt.data_objects.field_info_container import \
     FieldInfoContainer, \
     FieldInfo, \
+    NullFunc, \
     ValidateParameter, \
     ValidateDataField, \
     ValidateProperty, \
@@ -33,6 +34,9 @@
     ValidateGridType
 import yt.data_objects.universal_fields
 
+KnownChomboFields = FieldInfoContainer()
+add_chombo_field = KnownChomboFields.add_field
+
 ChomboFieldInfo = FieldInfoContainer.create_with_fallback(FieldInfo)
 add_chombo_field = ChomboFieldInfo.add_field
 


--- a/yt/frontends/enzo/fields.py	Tue Jun 21 17:37:52 2011 -0700
+++ b/yt/frontends/enzo/fields.py	Tue Jun 21 17:39:32 2011 -0700
@@ -46,29 +46,33 @@
 KnownEnzoFields = FieldInfoContainer()
 add_enzo_field = KnownEnzoFields.add_field
 
-_speciesList = ["HI","HII","Electron",
-               "HeI","HeII","HeIII",
-               "H2I","H2II","HM",
-               "DI","DII","HDI","Metal","PreShock"]
-_speciesMass = {"HI":1.0,"HII":1.0,"Electron":1.0,
-                "HeI":4.0,"HeII":4.0,"HeIII":4.0,
-                "H2I":2.0,"H2II":2.0,"HM":1.0,
-                "DI":2.0,"DII":2.0,"HDI":3.0}
+_speciesList = ["HI", "HII", "Electron",
+                "HeI", "HeII", "HeIII",
+                "H2I", "H2II", "HM",
+                "DI", "DII", "HDI", "Metal", "PreShock"]
+_speciesMass = {"HI": 1.0, "HII": 1.0, "Electron": 1.0,
+                "HeI": 4.0, "HeII": 4.0, "HeIII": 4.0,
+                "H2I": 2.0, "H2II": 2.0, "HM": 1.0,
+                "DI": 2.0, "DII": 2.0, "HDI": 3.0}
 
 def _SpeciesComovingDensity(field, data):
     sp = field.name.split("_")[0] + "_Density"
     ef = (1.0 + data.pf.current_redshift)**3.0
-    return data[sp]/ef
+    return data[sp] / ef
+
 def _SpeciesFraction(field, data):
     sp = field.name.split("_")[0] + "_Density"
-    return data[sp]/data["Density"]
+    return data[sp] / data["Density"]
+
 def _SpeciesMass(field, data):
     sp = field.name.split("_")[0] + "_Density"
     return data[sp] * data["CellVolume"]
+
 def _SpeciesNumberDensity(field, data):
     species = field.name.split("_")[0]
     sp = field.name.split("_")[0] + "_Density"
-    return data[sp]/_speciesMass[species]
+    return data[sp] / _speciesMass[species]
+
 def _convertCellMassMsun(data):
     return 5.027854e-34 # g^-1
 def _ConvertNumberDensity(data):


--- a/yt/frontends/flash/data_structures.py	Tue Jun 21 17:37:52 2011 -0700
+++ b/yt/frontends/flash/data_structures.py	Tue Jun 21 17:39:32 2011 -0700
@@ -40,10 +40,8 @@
 from yt.utilities.io_handler import \
     io_registry
 
-from .fields import \
-    FLASHFieldInfo, add_flash_field, KnownFLASHFields
-from yt.data_objects.field_info_container import \
-    FieldInfoContainer, NullFunc
+from .fields import FLASHFieldInfo, add_flash_field, KnownFLASHFields
+from yt.data_objects.field_info_container import FieldInfoContainer, NullFunc
 
 class FLASHGrid(AMRGridPatch):
     _id_offset = 1


--- a/yt/frontends/flash/fields.py	Tue Jun 21 17:37:52 2011 -0700
+++ b/yt/frontends/flash/fields.py	Tue Jun 21 17:39:32 2011 -0700
@@ -33,10 +33,12 @@
     ValidateGridType
 import yt.data_objects.universal_fields
 
+
+KnownFLASHFields = FieldInfoContainer()
+add_flash_field = KnownFLASHFields.add_field
+
 FLASHFieldInfo = FieldInfoContainer.create_with_fallback(FieldInfo)
-add_flash_field = FLASHFieldInfo.add_field
-
-add_field = add_flash_field
+add_field = FLASHFieldInfo.add_field
 
 # Common fields in FLASH: (Thanks to John ZuHone for this list)
 #


--- a/yt/frontends/maestro/fields.py	Tue Jun 21 17:37:52 2011 -0700
+++ b/yt/frontends/maestro/fields.py	Tue Jun 21 17:39:32 2011 -0700
@@ -36,11 +36,11 @@
     ValidateGridType
 import yt.data_objects.universal_fields
 
+KnownMaestroFields = FieldInfoContainer()
+add_maestro_field = KnownMaestroFields.add_field
+
 MaestroFieldInfo = FieldInfoContainer.create_with_fallback(FieldInfo)
-add_maestro_field = MaestroFieldInfo.add_field
-
-
-add_field = add_maestro_field
+add_field = MaestroFieldInfo.add_field
 
 add_field("density", function=lambda a,b: None, take_log=True,
           validators = [ValidateDataField("density")],


--- a/yt/frontends/orion/data_structures.py	Tue Jun 21 17:37:52 2011 -0700
+++ b/yt/frontends/orion/data_structures.py	Tue Jun 21 17:39:32 2011 -0700
@@ -23,50 +23,41 @@
   along with this program.  If not, see <http://www.gnu.org/licenses/>.
 """
 
+import os
 import re
-import os
 import weakref
+
+from collections import defaultdict
+from string import strip, rstrip
+from stat import ST_CTIME
+
 import numpy as na
 
-from collections import \
-    defaultdict
-from string import \
-    strip, \
-    rstrip
-from stat import \
-    ST_CTIME
-
 from yt.funcs import *
-from yt.data_objects.grid_patch import \
-           AMRGridPatch
-from yt.data_objects.hierarchy import \
-           AMRHierarchy
-from yt.data_objects.static_output import \
-           StaticOutput
-from yt.utilities.definitions import \
-    mpc_conversion
+from yt.data_objects.field_info_container import FieldInfoContainer, NullFunc
+from yt.data_objects.grid_patch import AMRGridPatch
+from yt.data_objects.hierarchy import AMRHierarchy
+from yt.data_objects.static_output import StaticOutput
+from yt.utilities.definitions import mpc_conversion
 from yt.utilities.parallel_tools.parallel_analysis_interface import \
-     parallel_root_only
+    parallel_root_only
 
 from .definitions import \
     orion2enzoDict, \
     parameterDict, \
     yt2orionFieldsDict, \
     orion_FAB_header_pattern
-
 from .fields import \
     OrionFieldInfo, \
     add_orion_field, \
     KnownOrionFields
 
-from yt.data_objects.field_info_container import \
-    FieldInfoContainer, NullFunc
-
 
 class OrionGrid(AMRGridPatch):
     _id_offset = 0
-    def __init__(self, LeftEdge, RightEdge, index, level, filename, offset, dimensions,start,stop,paranoia=False,**kwargs):
-        AMRGridPatch.__init__(self, index,**kwargs)
+    def __init__(self, LeftEdge, RightEdge, index, level, filename, offset,
+                 dimensions, start, stop, paranoia=False, **kwargs):
+        AMRGridPatch.__init__(self, index, **kwargs)
         self.filename = filename
         self._offset = offset
         self._paranoid = paranoia


--- a/yt/frontends/orion/fields.py	Tue Jun 21 17:37:52 2011 -0700
+++ b/yt/frontends/orion/fields.py	Tue Jun 21 17:39:32 2011 -0700
@@ -34,19 +34,17 @@
     ValidateGridType
 import yt.data_objects.universal_fields
 
+
+KnownOrionFields = FieldInfoContainer()
+add_orion_field = KnownOrionFields.add_field
+
 OrionFieldInfo = FieldInfoContainer.create_with_fallback(FieldInfo)
-add_orion_field = OrionFieldInfo.add_field
+add_field = OrionFieldInfo.add_field
 
-
-add_field = add_orion_field
-
-# def _convertDensity(data):
-#     return data.convert("Density")
 add_field("density", function=lambda a,b: None, take_log=True,
           validators = [ValidateDataField("density")],
           units=r"\rm{g}/\rm{cm}^3")
 OrionFieldInfo["density"]._projected_units =r"\rm{g}/\rm{cm}^2"
-#OrionFieldInfo["density"]._convert_function=_convertDensity
 
 add_field("eden", function=lambda a,b: None, take_log=True,
           validators = [ValidateDataField("eden")],


--- a/yt/frontends/ramses/fields.py	Tue Jun 21 17:37:52 2011 -0700
+++ b/yt/frontends/ramses/fields.py	Tue Jun 21 17:39:32 2011 -0700
@@ -33,10 +33,12 @@
     ValidateGridType
 import yt.data_objects.universal_fields
 
+
+KnownRAMSESFields = FieldInfoContainer()
+add_ramses_field = KnownRAMSESFields.add_field
+
 RAMSESFieldInfo = FieldInfoContainer.create_with_fallback(FieldInfo)
-add_ramses_field = RAMSESFieldInfo.add_field
-
-add_field = add_ramses_field
+add_field = RAMSESFieldInfo.add_field
 
 known_ramses_fields = [
     "Density",


--- a/yt/frontends/tiger/fields.py	Tue Jun 21 17:37:52 2011 -0700
+++ b/yt/frontends/tiger/fields.py	Tue Jun 21 17:39:32 2011 -0700
@@ -33,6 +33,9 @@
     ValidateGridType
 import yt.data_objects.universal_fields
 
+KnownTigerFields = FieldInfoContainer()
+add_tiger_field = KnownTigerFields.add_field
+
 TigerFieldInfo = FieldInfoContainer.create_with_fallback(FieldInfo)
-add_tiger_field = TigerFieldInfo.add_field
+add_field = TigerFieldInfo.add_field
 


http://bitbucket.org/yt_analysis/yt/changeset/0c13b98b67c3/
changeset:   0c13b98b67c3
branch:      deliberate_fields
user:        MatthewTurk
date:        2011-06-22 05:24:28
summary:     Merging from yt
affected #:  6 files (-1 bytes)

--- a/tests/DD0010/moving7_0010	Tue Jun 21 17:39:32 2011 -0700
+++ b/tests/DD0010/moving7_0010	Tue Jun 21 23:24:28 2011 -0400
@@ -1,6 +1,7 @@
 InitialCycleNumber  = 10
 InitialTime         = 0.81751317119117
 InitialCPUTime      = 2.15207e+09
+CurrentTimeIdentifier = 0
 
 StopTime            = 20.097275649537
 StopCycle           = 10000


--- a/tests/runall.py	Tue Jun 21 17:39:32 2011 -0700
+++ b/tests/runall.py	Tue Jun 21 23:24:28 2011 -0400
@@ -4,7 +4,8 @@
 
 from yt.utilities.answer_testing.api import \
     RegressionTestRunner, clear_registry, create_test, \
-    TestFieldStatistics, TestAllProjections, registry_entries
+    TestFieldStatistics, TestAllProjections, registry_entries, \
+    Xunit
 
 from yt.utilities.command_line import get_yt_version
 
@@ -26,7 +27,6 @@
 #
 
 cwd = os.path.dirname(globals().get("__file__", os.getcwd()))
-#cwd = os.path.abspath(os.path.dirname(sys.argv[0]))
 
 def load_tests(iname, idir):
     f, filename, desc = imp.find_module(iname, [idir])
@@ -83,8 +83,10 @@
 
     # Now we modify our compare name and self name to include the pf.
     compare_id = opts.compare_name
+    watcher = None
     if compare_id is not None:
         compare_id += "_%s_%s" % (pf, pf._hash())
+        watcher = Xunit()
     this_id = opts.this_name + "_%s_%s" % (pf, pf._hash())
 
     rtr = RegressionTestRunner(this_id, compare_id,
@@ -92,9 +94,9 @@
                                compare_results_path=opts.storage_dir,
                                io_log=[opts.parameter_file])
 
+    rtr.watcher = watcher
     tests_to_run = []
     for m, vals in mapping.items():
-        print vals, opts.test_pattern
         new_tests = fnmatch.filter(vals, opts.test_pattern)
 
         if len(new_tests) == 0: continue
@@ -103,6 +105,8 @@
 
     for test_name in sorted(tests_to_run):
         rtr.run_test(test_name)
+    if watcher is not None:
+        rtr.watcher.report()
 
     for test_name, result in sorted(rtr.passed_tests.items()):
         print "TEST %s: %s" % (test_name, result)


--- a/yt/data_objects/universal_fields.py	Tue Jun 21 17:39:32 2011 -0700
+++ b/yt/data_objects/universal_fields.py	Tue Jun 21 23:24:28 2011 -0400
@@ -413,17 +413,15 @@
 
 # This is rho_b / <rho_b>.
 def _Baryon_Overdensity(field, data):
-    return data['Density']
-def _Convert_Baryon_Overdensity(data):
     if data.pf.has_key('omega_baryon_now'):
         omega_baryon_now = data.pf['omega_baryon_now']
     else:
         omega_baryon_now = 0.0441
-    return 1 / (omega_baryon_now * rho_crit_now * 
-                (data.pf['CosmologyHubbleConstantNow']**2) * 
-                ((1+data.pf['CosmologyCurrentRedshift'])**3))
+    return data['Density'] / (omega_baryon_now * rho_crit_now * 
+                              (data.pf['CosmologyHubbleConstantNow']**2) * 
+                              ((1+data.pf['CosmologyCurrentRedshift'])**3))
 add_field("Baryon_Overdensity", function=_Baryon_Overdensity, 
-          convert_function=_Convert_Baryon_Overdensity, units=r"")
+          units=r"")
 
 # Weak lensing convergence.
 # Eqn 4 of Metzler, White, & Loken (2001, ApJ, 547, 560).


--- a/yt/frontends/flash/data_structures.py	Tue Jun 21 17:39:32 2011 -0700
+++ b/yt/frontends/flash/data_structures.py	Tue Jun 21 23:24:28 2011 -0400
@@ -83,6 +83,12 @@
     def _detect_fields(self):
         ncomp = self._handle["/unknown names"].shape[0]
         self.field_list = [s for s in self._handle["/unknown names"][:].flat]
+        facevars = [s for s in self._handle
+                    if s.startswith(("fcx","fcy","fcz")) and s[-1].isdigit()]
+        nfacevars = len(facevars)
+        if (nfacevars > 0) :
+            ncomp += nfacevars
+            self.field_list.append(facevars)
         if ("/particle names" in self._handle) :
             self.field_list += ["particle_" + s[0].strip() for s
                                 in self._handle["/particle names"][:]]


--- a/yt/utilities/amr_kdtree/amr_kdtree.py	Tue Jun 21 17:39:32 2011 -0700
+++ b/yt/utilities/amr_kdtree/amr_kdtree.py	Tue Jun 21 23:24:28 2011 -0400
@@ -259,6 +259,7 @@
         self.current_split_dim = 0
 
         self.pf = pf
+        self._id_offset = pf.h.grids[0]._id_offset
         if nprocs > len(pf.h.grids):
             print('Parallel rendering requires that the number of \n \
             grids in the dataset is greater or equal to the number of \n \
@@ -568,7 +569,7 @@
         None
         
         """
-        thisnode.grid = self.pf.hierarchy.grids[thisnode.grid - 1]
+        thisnode.grid = self.pf.hierarchy.grids[thisnode.grid - self._id_offset]
         
         dds = thisnode.grid.dds
         gle = thisnode.grid.LeftEdge
@@ -844,7 +845,7 @@
                     # Check if we have children and have not exceeded l_max
                     if len(thisgrid.Children) > 0 and thisgrid.Level < self.l_max:
                         # Get the children that are actually in the current volume
-                        children = [child.id - 1 for child in thisgrid.Children  
+                        children = [child.id - self._id_offset for child in thisgrid.Children  
                                     if na.all(child.LeftEdge < current_node.r_corner) & 
                                     na.all(child.RightEdge > current_node.l_corner)]
 


--- a/yt/utilities/answer_testing/api.py	Tue Jun 21 17:39:32 2011 -0700
+++ b/yt/utilities/answer_testing/api.py	Tue Jun 21 23:24:28 2011 -0400
@@ -42,3 +42,6 @@
 from .default_tests import \
     TestFieldStatistics, \
     TestAllProjections
+
+from .xunit import \
+    Xunit


http://bitbucket.org/yt_analysis/yt/changeset/c60f5e4866d8/
changeset:   c60f5e4866d8
branch:      deliberate_fields
user:        MatthewTurk
date:        2011-08-11 21:36:43
summary:     Merging from yt tip
affected #:  84 files (-1 bytes)

--- a/CREDITS	Tue Jun 21 23:24:28 2011 -0400
+++ b/CREDITS	Thu Aug 11 13:36:43 2011 -0600
@@ -18,6 +18,8 @@
                                 Andrew Myers (atmyers at astro.berkeley.edu)
                                 Michael Kuhlen (mqk at astro.berkeley.edu)
                                 Casey Stark (caseywstark at gmail.com)
+                                JC Passy (jcpassy at gmail.com)
+                                Eve Lee (elee at cita.utoronto.ca)
 
 We also include the Delaunay Triangulation module written by Robert Kern of
 Enthought, the cmdln.py module by Trent Mick, and the progressbar module by


--- a/setup.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/setup.py	Thu Aug 11 13:36:43 2011 -0600
@@ -95,7 +95,7 @@
                        quiet=True)
     
     config.make_config_py()
-    config.make_svn_version_py()
+    #config.make_svn_version_py()
     config.add_subpackage('yt','yt')
     config.add_scripts("scripts/*")
 


--- a/yt/analysis_modules/absorption_spectrum/absorption_spectrum.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/analysis_modules/absorption_spectrum/absorption_spectrum.py	Thu Aug 11 13:36:43 2011 -0600
@@ -144,8 +144,9 @@
             column_density = field_data[continuum['field_name']] * field_data['dl']
             delta_lambda = continuum['wavelength'] * field_data['redshift']
             if use_peculiar_velocity:
-                delta_lambda += continuum['wavelength'] * field_data['los_velocity'] / \
-                    speed_of_light_cgs
+                # include factor of (1 + z) because our velocity is in proper frame.
+                delta_lambda += continuum['wavelength'] * (1 + field_data['redshift']) * \
+                    field_data['los_velocity'] / speed_of_light_cgs
             this_wavelength = delta_lambda + continuum['wavelength']
             right_index = na.digitize(this_wavelength, self.lambda_bins).clip(0, self.n_lambda)
             left_index = na.digitize((this_wavelength * 
@@ -178,8 +179,9 @@
             column_density = field_data[line['field_name']] * field_data['dl']
             delta_lambda = line['wavelength'] * field_data['redshift']
             if use_peculiar_velocity:
-                delta_lambda += line['wavelength'] * field_data['los_velocity'] / \
-                    speed_of_light_cgs
+                # include factor of (1 + z) because our velocity is in proper frame.
+                delta_lambda += line['wavelength'] * (1 + field_data['redshift']) * \
+                    field_data['los_velocity'] / speed_of_light_cgs
             thermal_b = km_per_cm * na.sqrt((2 * boltzmann_constant_cgs * 
                                              field_data['Temperature']) / 
                                             (amu_cgs * line['atomic_mass']))


--- a/yt/analysis_modules/coordinate_transformation/setup.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/analysis_modules/coordinate_transformation/setup.py	Thu Aug 11 13:36:43 2011 -0600
@@ -8,5 +8,5 @@
     from numpy.distutils.misc_util import Configuration
     config = Configuration('coordinate_transformation',parent_package,top_path)
     config.make_config_py() # installs __config__.py
-    config.make_svn_version_py()
+    #config.make_svn_version_py()
     return config


--- a/yt/analysis_modules/halo_finding/fof/setup.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/analysis_modules/halo_finding/fof/setup.py	Thu Aug 11 13:36:43 2011 -0600
@@ -12,5 +12,5 @@
                                      "kd.c"],
                                     libraries=["m"])
     config.make_config_py() # installs __config__.py
-    config.make_svn_version_py()
+    #config.make_svn_version_py()
     return config


--- a/yt/analysis_modules/halo_finding/halo_objects.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/analysis_modules/halo_finding/halo_objects.py	Thu Aug 11 13:36:43 2011 -0600
@@ -208,7 +208,8 @@
         r""" Returns a sphere source.
 
         This will generate a new, empty sphere source centered on this halo,
-        with the maximum radius of the halo.
+        with the maximum radius of the halo. This can be used like any other
+        data container in yt.
         
         Parameters
         ----------
@@ -934,6 +935,34 @@
         """
         return self.max_radius
 
+    def get_sphere(self):
+        r"""Returns a sphere source.
+
+        This will generate a new, empty sphere source centered on this halo,
+        with the maximum radius of the halo. This can be used like any other
+        data container in yt.
+        
+        Parameters
+        ----------
+        center_of_mass : bool, optional
+            True chooses the center of mass when calculating the maximum radius.
+            False chooses from the maximum density location for HOP halos
+            (it has no effect for FOF halos).
+            Default = True.
+        
+        Returns
+        -------
+        sphere : `yt.data_objects.api.AMRSphereBase`
+            The empty data source.
+
+        Examples
+        --------
+        >>> sp = halos[0].get_sphere()
+        """
+        cen = self.center_of_mass()
+        r = self.maximum_radius()
+        return self.pf.h.sphere(cen, r)
+
 class HaloList(object):
 
     _fields = ["particle_position_%s" % ax for ax in 'xyz']
@@ -1297,7 +1326,11 @@
         locations = []
         for line in lines:
             line = line.split()
-            locations.append(line[1:])
+            # Prepend the hdf5 file names with the full path.
+            temp = []
+            for item in line[1:]:
+                temp.append(self.pf.fullpath + '/' + item)
+            locations.append(temp)
         lines.close()
         return locations
 
@@ -2176,4 +2209,4 @@
         LoadedHaloList.__init__(self, pf, self.basename)
 
 
-        
\ No newline at end of file
+        


--- a/yt/analysis_modules/halo_finding/hop/setup.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/analysis_modules/halo_finding/hop/setup.py	Thu Aug 11 13:36:43 2011 -0600
@@ -15,5 +15,5 @@
                                      "hop_slice.c",
                                      "hop_smooth.c",])
     config.make_config_py() # installs __config__.py
-    config.make_svn_version_py()
+    #config.make_svn_version_py()
     return config


--- a/yt/analysis_modules/halo_finding/parallel_hop/setup.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/analysis_modules/halo_finding/parallel_hop/setup.py	Thu Aug 11 13:36:43 2011 -0600
@@ -8,5 +8,5 @@
     from numpy.distutils.misc_util import Configuration
     config = Configuration('parallel_hop',parent_package,top_path)
     config.make_config_py() # installs __config__.py
-    config.make_svn_version_py()
+    #config.make_svn_version_py()
     return config


--- a/yt/analysis_modules/halo_finding/setup.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/analysis_modules/halo_finding/setup.py	Thu Aug 11 13:36:43 2011 -0600
@@ -11,5 +11,5 @@
     config.add_subpackage("hop")
     config.add_subpackage("parallel_hop")
     config.make_config_py() # installs __config__.py
-    config.make_svn_version_py()
+    #config.make_svn_version_py()
     return config


--- a/yt/analysis_modules/halo_mass_function/setup.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/analysis_modules/halo_mass_function/setup.py	Thu Aug 11 13:36:43 2011 -0600
@@ -8,5 +8,5 @@
     from numpy.distutils.misc_util import Configuration
     config = Configuration('halo_mass_function',parent_package,top_path)
     config.make_config_py() # installs __config__.py
-    config.make_svn_version_py()
+    #config.make_svn_version_py()
     return config


--- a/yt/analysis_modules/halo_merger_tree/setup.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/analysis_modules/halo_merger_tree/setup.py	Thu Aug 11 13:36:43 2011 -0600
@@ -8,5 +8,5 @@
     from numpy.distutils.misc_util import Configuration
     config = Configuration('halo_merger_tree',parent_package,top_path)
     config.make_config_py() # installs __config__.py
-    config.make_svn_version_py()
+    #config.make_svn_version_py()
     return config


--- a/yt/analysis_modules/halo_profiler/halo_filters.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/analysis_modules/halo_profiler/halo_filters.py	Thu Aug 11 13:36:43 2011 -0600
@@ -33,30 +33,51 @@
                  virial_filters=[['TotalMassMsun', '>=','1e14']],
                  virial_quantities=['TotalMassMsun', 'RadiusMpc'],
                  virial_index=None, use_log=False):
-    """
-    Filter halos by virial quantities.
+    r"""Filter halos by virial quantities.
+    
     Return values are a True or False whether the halo passed the filter, 
     along with a dictionary of virial quantities for the fields specified in 
     the virial_quantities keyword.  Thresholds for virial quantities are 
-    given with the virial_filters keyword in the following way: 
+    given with the virial_filters keyword in the following way:
     [field, condition, value].
-    :param: overdensity_field (str): the field used for interpolation with the 
-    specified critical value given with 'virial_overdensity'.  
-    Default: 'ActualOverdensity'.
-    :param: virial_overdensity (flt): the value used for interpolation.  
-    Default: 200.[['TotalMassMsun', '>=','1e14']]
-    :param: must_be_virialized (bool): if no values in the profile are above the 
-    value of virial_overdensity, the halo does not pass the filter.  
-    Default: True.
-    :param: virial_filters (list): conditional filters based on virial quantities 
-    given in the following way: [field, condition, value].  
-    Default: [['TotalMassMsun', '>=','1e14']].
-    :param: virial_quantities (list): fields for which interpolated values should 
-    be calculated and returned.  Default: ['TotalMassMsun', 'RadiusMpc'].
-    :param: virial_index (list): if given as a list, the index of the radial profile 
-    which is used for interpolation is placed here.  Default: None.
-    :param: use_log (bool): if True, interpolation is done in log space.  
-    Default: False.
+    
+    This is typically used as part of a call to `add_halo_filter`.
+    
+    Parameters
+    ----------
+    overdensity_field : string
+        The field used for interpolation with the 
+        specified critical value given with 'virial_overdensity'.  
+        Default='ActualOverdensity'.
+    virial_overdensity : float
+        The value used to determine the outer radius of the virialized halo.
+        Default: 200.
+    must_be_virialized : bool
+        If no values in the profile are above the 
+        value of virial_overdensity, the halo does not pass the filter.  
+        Default: True.
+    virial_filters : array_like
+        Conditional filters based on virial quantities 
+        given in the following way: [field, condition, value].  
+        Default: [['TotalMassMsun', '>=','1e14']].
+    virial_quantities : array_like
+        Fields for which interpolated values should 
+        be calculated and returned.  Default: ['TotalMassMsun', 'RadiusMpc'].
+    virial_index : array_like
+        If given as a list, the index of the radial profile 
+        which is used for interpolation is placed here.  Default: None.
+    use_log : bool
+        If True, interpolation is done in log space.  
+        Default: False.
+    
+    Examples
+    --------
+    >>> hp.add_halo_filter(HP.VirialFilter, must_be_virialized=True,
+                   overdensity_field='ActualOverdensity',
+                   virial_overdensity=200,
+                   virial_filters=[['TotalMassMsun','>=','1e14']],
+                   virial_quantities=['TotalMassMsun','RadiusMpc'])
+    
     """
 
     fields = deepcopy(virial_quantities)


--- a/yt/analysis_modules/halo_profiler/multi_halo_profiler.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/analysis_modules/halo_profiler/multi_halo_profiler.py	Thu Aug 11 13:36:43 2011 -0600
@@ -68,53 +68,101 @@
                  projection_width=8.0, projection_width_units='mpc', project_at_level='max',
                  velocity_center=['bulk', 'halo'], filter_quantities=['id','center'], 
                  use_critical_density=False):
-        """
-        Initialize a HaloProfiler object.
-        :param output_dir (str): if specified, all output will be put into this path instead of 
-               in the dataset directories.  Default: None.
-        :param halos (str): "multiple" for profiling more than one halo.  In this mode halos are read in 
-               from a list or identified with a halo finder.  In "single" mode, the one and only halo 
-               center is identified automatically as the location of the peak in the density field.  
-               Default: "multiple".
-        :param halo_list_file (str): name of file containing the list of halos.  The HaloProfiler will 
-               look for this file in the data directory.  Default: "HopAnalysis.out".
-        :param halo_list_format (str or dict): the format of the halo list file.  "yt_hop" for the format 
-               given by yt's halo finders.  "enzo_hop" for the format written by enzo_hop.  "p-groupfinder" 
-               for P-Groupfinder.  This keyword 
-               can also be given in the form of a dictionary specifying the column in which various 
-               properties can be found.  For example, {"id": 0, "center": [1, 2, 3], "mass": 4, "radius": 5}.  
-               Default: "yt_hop".
-        :param halo_finder_function (function): If halos is set to multiple and the file given by 
-               halo_list_file does not exit, the halo finding function specified here will be called.  
-               Default: HaloFinder (yt_hop).
-        :param halo_finder_args (tuple): args given with call to halo finder function.  Default: None.
-        :param halo_finder_kwargs (dict): kwargs given with call to halo finder function. Default: None.
-        :param recenter (str or function name): The name of a function that
-               recenters the halo.
-        :param halo_radius (float): if no halo radii are provided in the halo list file, this parameter is 
-               used to specify the radius out to which radial profiles will be made.  This keyword is also 
-               used when halos is set to single.  Default: 0.1.
-        :param radius_units (str): the units of halo_radius.  Default: "1" (code units).
-        :param n_profile_bins (int): the number of bins in the radial profiles.  Default: 50.
-        :param profile_output_dir (str): the subdirectory, inside the data directory, in which radial profile 
-               output files will be created.  The directory will be created if it does not exist.  
-               Default: "radial_profiles".
-        :param projection_output_dir (str): the subdirectory, inside the data directory, in which projection 
-               output files will be created.  The directory will be created if it does not exist.  
-               Default: "projections".
-        :param projection_width (float): the width of halo projections.  Default: 8.0.
-        :param projection_width_units (str): the units of projection_width. Default: "mpc".
-        :param project_at_level (int or "max"): the maximum refinement level to be included in projections.  
-               Default: "max" (maximum level within the dataset).
-        :param velocity_center (list): the method in which the halo bulk velocity is calculated (used for 
-               calculation of radial and tangential velocities.  Valid options are:
-     	          - ["bulk", "halo"] (Default): the velocity provided in the halo list
-                  - ["bulk", "sphere"]: the bulk velocity of the sphere centered on the halo center.
-    	          - ["max", field]: the velocity of the cell that is the location of the maximum of the field 
-                                    specified (used only when halos set to single).
-        :param filter_quantities (list): quantities from the original halo list file to be written out in the 
-               filtered list file.  Default: ['id','center'].
-        :param use_critical_density (bool): if True, the definition of overdensity for virial quantities is calculated with respect to the critical density.  If False, overdensity is with respect to mean matter density, which is lower by a factor of Omega_M.  Default: False.
+        r"""Initialize a Halo Profiler object.
+        
+        In order to run the halo profiler, the Halo Profiler object must be
+        instantiated. At the minimum, the path to a parameter file
+        must be provided as the first term.
+        
+        Parameters
+        ----------
+        
+        dataset : string, required
+            The path to the parameter file for the dataset to be analyzed.
+        output_dir : string, optional
+            If specified, all output will be put into this path instead of 
+            in the dataset directories.  Default: None.
+        halos :  {"multiple", "single"}, optional
+            For profiling more than one halo.  In this mode halos are read in 
+            from a list or identified with a halo finder.  In "single" mode,
+            the one and only halo 
+            center is identified automatically as the location of the peak
+            in the density field.  
+            Default: "multiple".
+        halo_list_file : string, optional
+            The name of a file containing the list of halos.  The HaloProfiler
+            will  look for this file in the data directory.
+            Default: "HopAnalysis.out".
+        halo_list_format : {string, dict}
+            The format of the halo list file.  "yt_hop" for the format 
+            given by yt's halo finders.  "enzo_hop" for the format written
+            by enzo_hop. "p-groupfinder"  for P-Groupfinder.  This keyword 
+            can also be given in the form of a dictionary specifying the
+            column in which various properties can be found.
+            For example, {"id": 0, "center": [1, 2, 3], "mass": 4, "radius": 5}.
+            Default: "yt_hop".
+        halo_finder_function : function
+            If halos is set to multiple and the file given by 
+            halo_list_file does not exit, the halo finding function
+            specified here will be called.  
+            Default: HaloFinder (yt_hop).
+        halo_finder_args : tuple
+            Args given with call to halo finder function.  Default: None.
+        halo_finder_kwargs : dict
+            kwargs given with call to halo finder function. Default: None.
+        recenter : {string, function
+            The name of a function that recenters the halo for analysis.
+            Default: None.
+        halo_radius : float
+            If no halo radii are provided in the halo list file, this
+            parameter is used to specify the radius out to which radial
+            profiles will be made.  This keyword is also 
+            used when halos is set to single.  Default: 0.1.
+        radius_units : string
+            The units of halo_radius.  Default: "1" (code units).
+        n_profile_bins : int
+            The number of bins in the radial profiles.  Default: 50.
+        profile_output_dir : str
+            The subdirectory, inside the data directory, in which radial profile 
+            output files will be created.
+            The directory will be created if it does not exist.  
+            Default: "radial_profiles".
+        projection_output_dir : str
+            The subdirectory, inside the data directory, in which projection 
+            output files will be created.
+            The directory will be created if it does not exist.  
+            Default: "projections".
+        projection_width : float
+            The width of halo projections.  Default: 8.0.
+        projection_width_units : string
+            The units of projection_width. Default: "mpc".
+        project_at_level : {"max", int}
+            The maximum refinement level to be included in projections.  
+            Default: "max" (maximum level within the dataset).
+        velocity_center  : array_like
+            The method in which the halo bulk velocity is calculated (used for 
+            calculation of radial and tangential velocities.  Valid options are:
+     	        * ["bulk", "halo"] (Default): the velocity provided in
+     	          the halo list
+                * ["bulk", "sphere"]: the bulk velocity of the sphere
+                  centered on the halo center.
+    	        * ["max", field]: the velocity of the cell that is the
+    	          location of the maximum of the field 
+                  specified (used only when halos set to single).
+        filter_quantities : array_like
+            Quantities from the original halo list file to be written out in the 
+            filtered list file.  Default: ['id','center'].
+        use_critical_density : bool
+            If True, the definition of overdensity for virial quantities
+            is calculated with respect to the critical density.
+            If False, overdensity is with respect to mean matter density,
+            which is lower by a factor of Omega_M.  Default: False.
+        
+        Examples
+        --------
+        >>> import yt.analysis_modules.halo_profiler.api as HP
+        >>> hp = HP.halo_profiler("DD0242/DD0242")
+        
         """
 
         self.dataset = dataset
@@ -239,24 +287,126 @@
             return None
 
     def add_halo_filter(self, function, *args, **kwargs):
-        "Add a halo filter to the filter list."
+        r"""Filters can be added to create a refined list of halos based on
+        their profiles or to avoid profiling halos altogether based on
+        information given in the halo list file.
+        
+        It is often the case that one is looking to identify halos with a
+        specific set of properties. This can be accomplished through the
+        creation of filter functions. A filter function can take as many args
+        and kwargs as you like, as long as the first argument is a profile
+        object, or at least a dictionary which contains the profile arrays
+        for each field. Filter functions must return a list of two things.
+        The first is a True or False indicating whether the halo passed the
+        filter. The second is a dictionary containing quantities calculated 
+        for that halo that will be written to a file if the halo passes the
+        filter. A sample filter function based on virial quantities can be
+        found in yt/analysis_modules/halo_profiler/halo_filters.py.
+        
+        Parameters
+        ----------
+        function : function
+            The name of a halo filter function.
+        args : values
+            Arguments passed to the halo filter function.
+        kwargs : values
+            Arguments passed to the halo filter function.
+        
+        Examples
+        -------
+        >>> hp.add_halo_filter(HP.VirialFilter, must_be_virialized=True,
+                overdensity_field='ActualOverdensity',
+                virial_overdensity=200,
+                virial_filters=[['TotalMassMsun','>=','1e14']],
+                virial_quantities=['TotalMassMsun','RadiusMpc'])
+        
+        """
 
         self._halo_filters.append({'function':function, 'args':args, 'kwargs':kwargs})
 
     def add_profile(self, field, weight_field=None, accumulation=False):
-        "Add a field for profiling."
+        r"""Add a field for profiling.
+        
+        Once the halo profiler object has been instantiated,
+        fields can be added for profiling using this function. This function
+        may be called multiple times, once per field to be added.
+        
+        Parameters
+        ----------
+        field : string
+            The name of the field.
+        weight_field : {None, string}, optional
+            The field that will be used to weight the field `field` when
+            the radial binning is done. Default: None.
+        accumulation : bool
+            Whether or not the `field` values should be summed up with the
+            radius of the profile.
+        
+        Examples
+        >>> hp.add_profile('CellVolume', weight_field=None, accumulation=True)
+        >>> hp.add_profile('TotalMassMsun', weight_field=None, accumulation=True)
+        >>> hp.add_profile('Density', weight_field=None, accumulation=False)
+        >>> hp.add_profile('Temperature', weight_field='CellMassMsun', accumulation=False)
+            
+        """
 
         self.profile_fields.append({'field':field, 'weight_field':weight_field, 'accumulation':accumulation})
 
     def add_projection(self, field, weight_field=None, cmap='algae'):
-        "Add a field for projection."
+        r"""Make a projection of the specified field.
+        
+        For the given field, a projection will be produced that can be saved
+        to HDF5 or image format. See `make_projections`.
+        
+        Parameters
+        ----------
+        field : string
+            The name of the field.
+        weight_field : string
+            The field that will be used to weight the field `field` when
+            the projection is done. Default: None.
+        cmap : string
+            The name of the matplotlib color map that will be used if an
+            image is made from the projection. Default="algae".
+        
+        Examples
+        --------
+        >>> hp.add_projection('Density', weight_field=None)
+        >>> hp.add_projection('Temperature', weight_field='Density')
+        >>> hp.add_projection('Metallicity', weight_field='Density')
+
+        """
 
         self.projection_fields.append({'field':field, 'weight_field':weight_field, 
                                        'cmap': cmap})
 
     @parallel_blocking_call
     def make_profiles(self, filename=None, prefilters=None, **kwargs):
-        "Make radial profiles for all halos on the list."
+        r"""Make radial profiles for all halos in the list.
+        
+        After all the calls to `add_profile`, this will trigger the actual
+        calculations and output the profiles to disk.
+        
+        Paramters
+        ---------
+        filename : string
+            If set, a file will be written with all of the filtered halos
+            and the quantities returned by the filter functions.
+            Default=None.
+        prefilters : array_like
+            A single dataset can contain thousands or tens of thousands of
+            halos. Significant time can be saved by not profiling halos
+            that are certain to not pass any filter functions in place.
+            Simple filters based on quantities provided in the initial
+            halo list can be used to filter out unwanted halos using this
+            parameter.
+        
+        Examples
+        --------
+        >>> hp.make_profiles(filename="FilteredQuantities.out",
+                 prefilters=["halo['mass'] > 1e13"])
+        
+        """
 
         if len(self.all_halos) == 0:
             mylog.error("Halo list is empty, returning.")
@@ -354,9 +504,9 @@
         if filename is not None:
             self._write_filtered_halo_list(filename, **kwargs)
 
-    def _get_halo_profile(self, halo, filename, virial_filter=True, force_write=False):
-        """
-        Profile a single halo and write profile data to a file.
+    def _get_halo_profile(self, halo, filename, virial_filter=True,
+            force_write=False):
+        """Profile a single halo and write profile data to a file.
         If file already exists, read profile data from file.
         Return a dictionary of id, center, and virial quantities if virial_filter is True.
         """
@@ -455,8 +605,34 @@
         return profile
 
     @parallel_blocking_call
-    def make_projections(self, axes=[0, 1, 2], halo_list='filtered', save_images=False, save_cube=True):
-        "Make projections of all halos using specified fields."
+    def make_projections(self, axes=[0, 1, 2], halo_list='filtered',
+            save_images=False, save_cube=True):
+        r"""Make projections of all halos using specified fields.
+        
+        After adding fields using `add_projection`, this starts the actual
+        calculations and saves the output to disk.
+        
+        Parameters
+        ---------
+        axes = array_like
+            A list of the axes to project along, using the usual 0,1,2
+            convention. Default=[0,1,2]
+        halo_list : {'filtered', 'all'}
+            Which set of halos to make profiles of, either ones passed by the
+            halo filters (if enabled/added), or all halos.
+            Default='filtered'.
+        save_images : bool
+            Whether or not to save images of the projections. Default=False.
+        save_cube : bool
+            Whether or not to save the HDF5 files of the halo projections.
+            Default=True.
+        
+        Examples
+        --------
+        >>> hp.make_projections(axes=[0, 1, 2], save_cube=True,
+            save_images=True, halo_list="filtered")
+        
+        """
 
         # Get list of halos for projecting.
         if halo_list == 'filtered':


--- a/yt/analysis_modules/halo_profiler/setup.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/analysis_modules/halo_profiler/setup.py	Thu Aug 11 13:36:43 2011 -0600
@@ -8,5 +8,5 @@
     from numpy.distutils.misc_util import Configuration
     config = Configuration('halo_profiler',parent_package,top_path)
     config.make_config_py() # installs __config__.py
-    config.make_svn_version_py()
+    #config.make_svn_version_py()
     return config


--- a/yt/analysis_modules/hierarchy_subset/setup.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/analysis_modules/hierarchy_subset/setup.py	Thu Aug 11 13:36:43 2011 -0600
@@ -8,5 +8,5 @@
     from numpy.distutils.misc_util import Configuration
     config = Configuration('hierarchy_subset',parent_package,top_path)
     config.make_config_py() # installs __config__.py
-    config.make_svn_version_py()
+    #config.make_svn_version_py()
     return config


--- a/yt/analysis_modules/level_sets/setup.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/analysis_modules/level_sets/setup.py	Thu Aug 11 13:36:43 2011 -0600
@@ -8,5 +8,5 @@
     from numpy.distutils.misc_util import Configuration
     config = Configuration('level_sets',parent_package,top_path)
     config.make_config_py() # installs __config__.py
-    config.make_svn_version_py()
+    #config.make_svn_version_py()
     return config


--- a/yt/analysis_modules/light_cone/setup.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/analysis_modules/light_cone/setup.py	Thu Aug 11 13:36:43 2011 -0600
@@ -8,5 +8,5 @@
     from numpy.distutils.misc_util import Configuration
     config = Configuration('light_cone',parent_package,top_path)
     config.make_config_py() # installs __config__.py
-    config.make_svn_version_py()
+    #config.make_svn_version_py()
     return config


--- a/yt/analysis_modules/light_ray/setup.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/analysis_modules/light_ray/setup.py	Thu Aug 11 13:36:43 2011 -0600
@@ -8,5 +8,5 @@
     from numpy.distutils.misc_util import Configuration
     config = Configuration('light_ray',parent_package,top_path)
     config.make_config_py() # installs __config__.py
-    config.make_svn_version_py()
+    #config.make_svn_version_py()
     return config


--- a/yt/analysis_modules/setup.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/analysis_modules/setup.py	Thu Aug 11 13:36:43 2011 -0600
@@ -5,7 +5,7 @@
     from numpy.distutils.misc_util import Configuration
     config = Configuration('analysis_modules',parent_package,top_path)
     config.make_config_py() # installs __config__.py
-    config.make_svn_version_py()
+    #config.make_svn_version_py()
     config.add_subpackage("absorption_spectrum")
     config.add_subpackage("coordinate_transformation")
     config.add_subpackage("halo_finding")


--- a/yt/analysis_modules/simulation_handler/enzo_simulation.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/analysis_modules/simulation_handler/enzo_simulation.py	Thu Aug 11 13:36:43 2011 -0600
@@ -39,36 +39,62 @@
     load
 
 class EnzoSimulation(object):
-    """
-    Super class for performing the same operation over all data dumps in 
+    r"""Super class for performing the same operation over all data dumps in 
     a simulation from one redshift to another.
     """
     def __init__(self, enzo_parameter_file, initial_time=None, final_time=None, initial_redshift=None, final_redshift=None,
                  links=False, enzo_parameters=None, get_time_outputs=True, get_redshift_outputs=True, get_available_data=False,
                  get_data_by_force=False):
-        """
-        Initialize an EnzoSimulation object.
-        :param initial_time (float): the initial time in code units for the dataset list.  Default: None.
-        :param final_time (float): the final time in code units for the dataset list.  Default: None.
-        :param initial_redshift (float): the initial (highest) redshift for the dataset list.  Only for 
-               cosmological simulations.  Default: None.
-        :param final_redshift (float): the final (lowest) redshift for the dataset list.  Only for cosmological 
-               simulations.  Default: None.
-        :param links (bool): if True, each entry in the dataset list will contain entries, previous and next, that 
-               point to the previous and next entries on the dataset list.  Default: False.
-        :param enzo_parameters (dict): a dictionary specify additional parameters to be retrieved from the 
-               parameter file.  The format should be the name of the parameter as the key and the variable type as 
-               the value.  For example, {'CosmologyComovingBoxSize':float}.  All parameter values will be stored in 
-               the dictionary attribute, enzoParameters.  Default: None.
-        :param get_time_outputs (bool): if False, the time datasets, specified in Enzo with the dtDataDump, will not 
-               be added to the dataset list.  Default: True.
-        :param get_redshift_outputs (bool): if False, the redshift datasets will not be added to the dataset list.  Default: True.
-        :param get_available_data (bool): if True, only datasets that are found to exist at the file path are added 
-               to the list.  Default: False.
-        :param get_data_by_force (bool): if True, time data dumps are not calculated using dtDataDump.  Instead, the 
-               the working directory is searched for directories that match the datadumpname keyword.  Each dataset 
-               is loaded up to get the time and redshift manually.  This is useful with collapse simulations that use 
-               OutputFirstTimeAtLevel or with simulations that make outputs based on cycle numbers.  Default: False.
+        r"""Initialize an Enzo Simulation object.
+        
+        initial_time : float
+            The initial time in code units for the dataset list.  Default: None.
+        final_time : float
+            The final time in code units for the dataset list.  Default: None.
+        initial_redshift : float
+            The initial (highest) redshift for the dataset list. Only for 
+            cosmological simulations.  Default: None.
+        final_redshift : float
+            The final (lowest) redshift for the dataset list.
+            Only for cosmological 
+            simulations.  Default: None.
+        links : bool
+            If True, each entry in the dataset list will contain entries,
+            previous and next, that 
+            point to the previous and next entries on the dataset list.
+            Default: False.
+        enzo_parameters : dict
+            Adictionary specify additional parameters to be retrieved from the 
+            parameter file.  The format should be the name of the parameter
+            as the key and the variable type as 
+            the value.  For example, {'CosmologyComovingBoxSize':float}.
+            All parameter values will be stored in 
+            the dictionary attribute, enzoParameters.  Default: None.
+        get_time_outputs : bool
+            If False, the time datasets, specified in Enzo with the dtDataDump,
+            will not be added to the dataset list.  Default: True.
+        get_redshift_outputs : bool
+            If False, the redshift datasets will not be added to the
+            dataset list.  Default: True.
+        get_available_data : bool
+            If True, only datasets that are found to exist at the
+            file path are added 
+            to the list.  Default: False.
+        get_data_by_force : bool
+            If True, time data dumps are not calculated using dtDataDump.
+            Instead, the 
+            the working directory is searched for directories that match the
+            datadumpname keyword.  Each dataset 
+            is loaded up to get the time and redshift manually.
+            This is useful with collapse simulations that use 
+            OutputFirstTimeAtLevel or with simulations that make outputs based
+            on cycle numbers.  Default: False.
+        
+        Examples
+        --------
+        >>> import yt.analysis_modules.simulation_handler.api as ES
+        >>> es = ES.EnzoSimulation("my_simulation.par")
+
         """
         self.enzo_parameter_file = enzo_parameter_file
         self.enzoParameters = {}
@@ -331,16 +357,38 @@
 
     def imagine_minimal_splice(self, initial_redshift, final_redshift, decimals=3, filename=None, 
                                redshift_output_string='CosmologyOutputRedshift', start_index=0):
-        """
-        Create imaginary list of redshift outputs to maximally span a redshift interval.
-        :param decimals (int): The decimal place to which the output redshift will be rounded.  
-               If the decimal place in question is nonzero, the redshift will be rounded up to 
-               ensure continuity of the splice.  Default: 3.
-        :param filename (str): If provided, a file will be written with the redshift outputs in 
-               the form in which they should be given in the enzo parameter file.  Default: None.
-        :param redshift_output_string (str): The parameter accompanying the redshift outputs in the 
-               enzo parameter file.  Default: "CosmologyOutputRedshift".
-        :param start_index (int): The index of the first redshift output.  Default: 0.
+        r"""Create imaginary list of redshift outputs to maximally
+        span a redshift interval.
+        
+        If you want to run a cosmological simulation that will have just
+        enough data outputs to create a cosmology splice,
+        this method will calculate a list of redshifts outputs that will
+        minimally connect a redshift interval.
+        
+        Parameters
+        ----------
+        decimals : int
+            The decimal place to which the output redshift will be rounded.  
+            If the decimal place in question is nonzero, the redshift will
+            be rounded up to 
+            ensure continuity of the splice.  Default: 3.
+        filename : string
+            If provided, a file will be written with the redshift outputs in 
+            the form in which they should be given in the enzo parameter file.
+            Default: None.
+        redshift_output_string : string
+            The parameter accompanying the redshift outputs in the 
+            enzo parameter file.  Default: "CosmologyOutputRedshift".
+        start_index : int
+            The index of the first redshift output.  Default: 0.
+        
+        Examples
+        --------
+        >>> initial_redshift = 0.4
+        >>> final_redshift = 0.0
+        >>> outputs = es.imagine_minimal_splice(initial_redshift, final_redshift,
+            filename='outputs.out')
+
         """
 
         z = initial_redshift
@@ -370,17 +418,39 @@
         return outputs
 
     def create_cosmology_splice(self, minimal=True, deltaz_min=0.0, initial_redshift=None, final_redshift=None):
-        """
-        Create list of datasets to be used for LightCones or LightRays.
-        :param minimal (bool): if True, the minimum number of datasets is used to connect the initial and final 
-               redshift.  If false, the list will contain as many entries as possible within the redshift 
-               interval.  Default: True.
-        :param deltaz_min (float): specifies the minimum delta z between consecutive datasets in the returned 
-               list.  Default: 0.0.
-        :param initial_redshift (float): the initial (highest) redshift in the cosmology splice list.  If none 
-               given, the highest redshift dataset present will be used.  Default: None.
-        :param final_redshift (float): the final (lowest) redshift in the cosmology splice list.  If none given, 
-               the lowest redshift dataset present will be used.  Default: None.
+        r"""Create list of datasets to be used for `LightCones` or `LightRays`.
+        
+        For cosmological simulations, the physical width of the simulation
+        box corresponds to some \Delta z, which varies with redshift.
+        Using this logic, one can stitch together a series of datasets to
+        create a continuous volume or length element from one redshift to
+        another. This method will return such a list
+        
+        Parameters
+        ----------
+        minimal : bool
+            If True, the minimum number of datasets is used to connect the
+            initial and final redshift.  If false, the list will contain as
+            many entries as possible within the redshift 
+            interval.  Default: True.
+        deltaz_min : float
+            Specifies the minimum delta z between consecutive datasets
+            in the returned 
+            list.  Default: 0.0.
+        initial_redshift : float
+            The initial (highest) redshift in the cosmology splice list. If none 
+            given, the highest redshift dataset present will be used.
+            Default: None.
+        final_redshift : float
+            The final (lowest) redshift in the cosmology splice list.
+            If none given, 
+            the lowest redshift dataset present will be used.  Default: None.
+        
+        Examples
+        --------
+        >>> cosmo = es.create_cosmology_splice(minimal=True, deltaz_min=0.0,
+            initial_redshift=1.0, final_redshift=0.0)
+        
         """
 
         if initial_redshift is None: initial_redshift = self.initial_redshift
@@ -452,10 +522,20 @@
         return cosmology_splice
 
     def get_data_by_redshift(self, redshifts, tolerance=None):
-        """
-        : param redshifts: a list of redshifts.
-        : tolerance: if not None, do not return a dataset unless the redshift is within the tolerance value.
-        Get datasets for a list of redshifts.
+        r"""Get datasets at or near to given redshifts.
+        
+        Parameters
+        ----------
+        redshifts: array_like
+            A list of redshifts, given as floats.
+        tolerance : float
+            If not None, do not return a dataset unless the redshift is
+            within the tolerance value. Default = None.
+        
+        Examples
+        --------
+        >>> datasets = es.get_data_by_redshift([0, 1, 2], tolerance=0.1)
+        
         """
 
         redshifts = ensure_list(redshifts)
@@ -472,10 +552,20 @@
         return my_datasets
 
     def get_data_by_time(self, times, tolerance=None):
-        """
-        : param redshifts: a list of times.
-        : tolerance: if not None, do not return a dataset unless the redshift is within the tolerance value.
-        Get datasets for a list of times.
+        r"""Get datasets at or near to given times.
+        
+        Parameters
+        ----------
+        times: array_like
+            A list of times, given in code units as floats.
+        tolerance : float
+            If not None, do not return a dataset unless the time is
+            within the tolerance value. Default = None.
+        
+        Examples
+        --------
+        >>> datasets = es.get_data_by_time([600, 500, 400], tolerance=10.)
+        
         """
 
         times = ensure_list(times)
@@ -486,7 +576,7 @@
                     and self.allOutputs[0] not in my_datasets:
                 my_datasets.append(self.allOutputs[0])
             else:
-                mylog.error("No dataset added for z = %f." % my_time)
+                mylog.error("No dataset added for time = %f." % my_time)
 
         self.allOutputs.sort(key=lambda obj: obj['time'])
         return my_datasets


--- a/yt/analysis_modules/simulation_handler/setup.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/analysis_modules/simulation_handler/setup.py	Thu Aug 11 13:36:43 2011 -0600
@@ -8,5 +8,5 @@
     from numpy.distutils.misc_util import Configuration
     config = Configuration('simulation_handler',parent_package,top_path)
     config.make_config_py() # installs __config__.py
-    config.make_svn_version_py()
+    #config.make_svn_version_py()
     return config


--- a/yt/analysis_modules/spectral_integrator/setup.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/analysis_modules/spectral_integrator/setup.py	Thu Aug 11 13:36:43 2011 -0600
@@ -8,5 +8,5 @@
     from numpy.distutils.misc_util import Configuration
     config = Configuration('spectral_integrator',parent_package,top_path)
     config.make_config_py() # installs __config__.py
-    config.make_svn_version_py()
+    #config.make_svn_version_py()
     return config


--- a/yt/analysis_modules/star_analysis/setup.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/analysis_modules/star_analysis/setup.py	Thu Aug 11 13:36:43 2011 -0600
@@ -8,5 +8,5 @@
     from numpy.distutils.misc_util import Configuration
     config = Configuration('star_analysis',parent_package,top_path)
     config.make_config_py() # installs __config__.py
-    config.make_svn_version_py()
+    #config.make_svn_version_py()
     return config


--- a/yt/analysis_modules/two_point_functions/setup.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/analysis_modules/two_point_functions/setup.py	Thu Aug 11 13:36:43 2011 -0600
@@ -8,5 +8,5 @@
     from numpy.distutils.misc_util import Configuration
     config = Configuration('two_point_functions',parent_package,top_path)
     config.make_config_py() # installs __config__.py
-    config.make_svn_version_py()
+    #config.make_svn_version_py()
     return config


--- a/yt/astro_objects/setup.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/astro_objects/setup.py	Thu Aug 11 13:36:43 2011 -0600
@@ -8,5 +8,5 @@
     from numpy.distutils.misc_util import Configuration
     config = Configuration('astro_objects',parent_package,top_path)
     config.make_config_py() # installs __config__.py
-    config.make_svn_version_py()
+    #config.make_svn_version_py()
     return config


--- a/yt/data_objects/data_containers.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/data_objects/data_containers.py	Thu Aug 11 13:36:43 2011 -0600
@@ -32,6 +32,7 @@
 import weakref
 import exceptions
 import itertools
+import shelve
 
 from yt.funcs import *
 
@@ -410,6 +411,23 @@
                              __del_grid_levels)
 
 
+    def __get_grid_dimensions(self):
+        if self.__grid_dimensions == None:
+            self.__grid_dimensions = na.array([g.ActiveDimensions for g in self._grids])
+        return self.__grid_dimensions
+
+    def __del_grid_dimensions(self):
+        del self.__grid_dimensions
+        self.__grid_dimensions = None
+
+    def __set_grid_dimensions(self, val):
+        self.__grid_dimensions = val
+
+    __grid_dimensions = None
+    grid_dimensions = property(__get_grid_dimensions, __set_grid_dimensions,
+                             __del_grid_dimensions)
+
+
 class AMR1DData(AMRData, GridPropertiesMixin):
     _spatial = False
     def __init__(self, pf, fields, **kwargs):


--- a/yt/data_objects/derived_quantities.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/data_objects/derived_quantities.py	Thu Aug 11 13:36:43 2011 -0600
@@ -155,6 +155,19 @@
 add_quantity("TotalMass", function=_TotalMass,
              combine_function=_combTotalMass, n_ret = 2)
 
+def _MatterMass(data):
+    """
+    This function takes no arguments and returns the array sum of cell masses
+    and particle masses.
+    """
+    cellvol = data["CellVolume"]
+    matter_rho = data["Matter_Density"]
+    return cellvol, matter_rho 
+def _combMatterMass(data, cellvol, matter_rho):
+    return cellvol*matter_rho
+add_quantity("MatterMass", function=_MatterMass,
+	     combine_function=_combMatterMass, n_ret=2)
+
 def _CenterOfMass(data, use_cells=True, use_particles=False):
     """
     This function returns the location of the center
@@ -297,7 +310,7 @@
              combine_function=_combBaryonSpinParameter, n_ret=4)
     
 def _IsBound(data, truncate = True, include_thermal_energy = False,
-    treecode = True, opening_angle = 1.0, periodic_test = False):
+    treecode = True, opening_angle = 1.0, periodic_test = False, include_particles = True):
     r"""
     This returns whether or not the object is gravitationally bound. If this
     returns a value greater than one, it is bound, and otherwise not.
@@ -319,7 +332,10 @@
         used to calculate the potential between masses.
     periodic_test : Bool 
         Used for testing the periodic adjustment machinery
-        of this derived quantity. 
+        of this derived quantity.
+    include_particles : Bool
+	Should we add the mass contribution of particles
+	to calculate binding energy?
 
     Examples
     --------
@@ -331,13 +347,21 @@
     bv_x,bv_y,bv_z = data.quantities["BulkVelocity"]()
     # One-cell objects are NOT BOUND.
     if data["CellMass"].size == 1: return [0.0]
-    kinetic = 0.5 * (data["CellMass"] * (
+    """
+    Changing data["CellMass"] to mass_to_use
+    Add the mass contribution of particles if include_particles = True
+    """
+    if (include_particles):
+	mass_to_use = data.quantities["MatterMass"]()[0] 
+    else:
+	mass_to_use = data["CellMass"]
+    kinetic = 0.5 * (mass_to_use * (
                        (data["x-velocity"] - bv_x)**2
                      + (data["y-velocity"] - bv_y)**2
                      + (data["z-velocity"] - bv_z)**2 )).sum()
     # Add thermal energy to kinetic energy
     if (include_thermal_energy):
-        thermal = (data["ThermalEnergy"] * data["CellMass"]).sum()
+        thermal = (data["ThermalEnergy"] * mass_to_use).sum()
         kinetic += thermal
     if periodic_test:
         kinetic = na.ones_like(kinetic)
@@ -368,8 +392,11 @@
     # This dict won't make a copy of the data, but it will make a copy to 
     # change if needed in the periodic section immediately below.
     local_data = {}
-    for label in ["x", "y", "z", "CellMass"]:
+    for label in ["x", "y", "z"]: # Separating CellMass from the for loop
         local_data[label] = data[label]
+    local_data["CellMass"] = mass_to_use # Adding CellMass separately
+					 # NOTE: if include_particles = True, local_data["CellMass"]
+					 #       is not the same as data["CellMass"]!!!
     if periodic.any():
         # Adjust local_data to re-center the clump to remove the periodicity
         # by the gap calculated above.
@@ -424,7 +451,7 @@
             thisx = (local_data["x"][sel] / dx).astype('int64') - cover_imin[0] * 2**L
             thisy = (local_data["y"][sel] / dy).astype('int64') - cover_imin[1] * 2**L
             thisz = (local_data["z"][sel] / dz).astype('int64') - cover_imin[2] * 2**L
-            vals = na.array([local_data["CellMass"][sel]], order='F')
+	    vals = na.array([local_data["CellMass"][sel]], order='F')
             octree.add_array_to_tree(L, thisx, thisy, thisz, vals,
                na.ones_like(thisx).astype('float64'), treecode = 1)
         # Now we calculate the binding energy using a treecode.
@@ -663,7 +690,7 @@
     totals = []
     for field in fields:
         if data[field].size < 1:
-            totals.append(0)
+            totals.append(0.0)
             continue
         totals.append(data[field].sum())
     return len(fields), totals


--- a/yt/data_objects/field_info_container.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/data_objects/field_info_container.py	Thu Aug 11 13:36:43 2011 -0600
@@ -151,7 +151,9 @@
             pf = fake_parameter_file(lambda: 1)
             pf.current_redshift = pf.omega_lambda = pf.omega_matter = \
                 pf.hubble_constant = pf.cosmological_simulation = 0.0
-
+            pf.domain_left_edge = na.zeros(3, 'float64')
+            pf.domain_right_edge = na.ones(3, 'float64')
+            pf.dimensionality = 3
         self.pf = pf
 
         class fake_hierarchy(object):


--- a/yt/data_objects/grid_patch.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/data_objects/grid_patch.py	Thu Aug 11 13:36:43 2011 -0600
@@ -61,7 +61,7 @@
         self.field_parameters = {}
         self.id = id
         if hierarchy: self.hierarchy = weakref.proxy(hierarchy)
-        self.pf = self.hierarchy.parameter_file # weakref already
+        self.pf = self.hierarchy.parameter_file  # weakref already
         self._child_mask = self._child_indices = self._child_index_mask = None
         self.start_index = None
 
@@ -159,7 +159,7 @@
 
     def keys(self):
         return self.data.keys()
-    
+
     def get_data(self, field):
         """ Returns a field or set of fields for a key or set of keys. """
         if not self.data.has_key(field):
@@ -227,7 +227,7 @@
         cond = na.logical_and(cond, self.RightEdge[y] >= LE[:,y])
         cond = na.logical_and(cond, self.LeftEdge[y] <= RE[:,y])
         return cond
-   
+
     def __repr__(self):
         return "AMRGridPatch_%04i" % (self.id)
 
@@ -362,7 +362,7 @@
         mask[startIndex[0]:endIndex[0],
              startIndex[1]:endIndex[1],
              startIndex[2]:endIndex[2]] = tofill
-        
+
     def __generate_child_mask(self):
         """
         Generates self.child_mask, which is zero where child grids exist (and


--- a/yt/data_objects/setup.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/data_objects/setup.py	Thu Aug 11 13:36:43 2011 -0600
@@ -8,5 +8,5 @@
     from numpy.distutils.misc_util import Configuration
     config = Configuration('data_objects',parent_package,top_path)
     config.make_config_py() # installs __config__.py
-    config.make_svn_version_py()
+    #config.make_svn_version_py()
     return config


--- a/yt/data_objects/static_output.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/data_objects/static_output.py	Thu Aug 11 13:36:43 2011 -0600
@@ -47,11 +47,11 @@
     class __metaclass__(type):
         def __init__(cls, name, b, d):
             type.__init__(cls, name, b, d)
-            output_type_registry[name]=cls
+            output_type_registry[name] = cls
             mylog.debug("Registering: %s as %s", name, cls)
 
     def __new__(cls, filename=None, *args, **kwargs):
-        if not isinstance(filename, types.StringTypes): 
+        if not isinstance(filename, types.StringTypes):
             obj = object.__new__(cls)
             obj.__init__(filename, *args, **kwargs)
             return obj
@@ -69,21 +69,27 @@
         """
         self.data_style = data_style
         self.file_style = file_style
+        self.conversion_factors = {}
+        self.parameters = {}
+
+        # path stuff
         self.parameter_filename = str(filename)
         self.basename = os.path.basename(filename)
         self.directory = os.path.expanduser(os.path.dirname(filename))
         self.fullpath = os.path.abspath(self.directory)
-        self._instantiated = time.time()
         if len(self.directory) == 0:
             self.directory = "."
-        self.conversion_factors = {}
-        self.parameters = {}
+
+        # to get the timing right, do this before the heavy lifting
+        self._instantiated = time.time()
+
         self._parse_parameter_file()
         self._set_units()
+
         # Because we need an instantiated class to check the pf's existence in
         # the cache, we move that check to here from __new__.  This avoids
         # double-instantiation.
-        if ytcfg.getboolean('yt','serialize'):
+        if ytcfg.getboolean('yt', 'serialize'):
             try:
                 _pf_store.check_pf(self)
             except NoParameterShelf:
@@ -111,9 +117,7 @@
         return False
 
     def __getitem__(self, key):
-        """
-        Returns _units, parameters, or _conversion_factors in that order
-        """
+        """ Returns units, parameters, or conversion_factors in that order. """
         for d in [self.units, self.time_units, self.parameters, \
                   self.conversion_factors]:
             if key in d: return d[key]
@@ -121,8 +125,9 @@
 
     def keys(self):
         """
-        Returns a list of possible keys, from _units, parameters and
-        _conversion_factors
+        Returns a list of possible keys, from units, parameters and
+        conversion_factors.
+
         """
         return self.units.keys() \
              + self.time_units.keys() \
@@ -137,7 +142,7 @@
     def get_smallest_appropriate_unit(self, v):
         max_nu = 1e30
         good_u = None
-        for unit in ['mpc','kpc','pc','au','rsun','cm']:
+        for unit in ['mpc', 'kpc', 'pc', 'au', 'rsun', 'cm']:
             vv = v*self[unit]
             if vv < max_nu and vv > 1.0:
                 good_u = unit
@@ -146,7 +151,8 @@
 
     def has_key(self, key):
         """
-        Returns true or false
+        Checks units, parameters, and conversion factors. Returns a boolean.
+
         """
         return key in self.units or \
                key in self.time_units or \
@@ -162,19 +168,19 @@
             self._instantiated_hierarchy = self._hierarchy_class(
                 self, data_style=self.data_style)
         return self._instantiated_hierarchy
-    h = hierarchy
+    h = hierarchy  # alias
 
     @parallel_root_only
     def print_key_parameters(self):
         for a in ["current_time", "domain_dimensions", "domain_left_edge",
-                 "domain_right_edge", "cosmological_simulation"]:
+                  "domain_right_edge", "cosmological_simulation"]:
             if not hasattr(self, a):
                 mylog.error("Missing %s in parameter file definition!", a)
                 continue
             v = getattr(self, a)
             mylog.info("Parameters: %-25s = %s", a, v)
         if hasattr(self, "cosmological_simulation") and \
-            getattr(self, "cosmological_simulation"):
+           getattr(self, "cosmological_simulation"):
             for a in ["current_redshift", "omega_lambda", "omega_matter",
                       "hubble_constant"]:
                 if not hasattr(self, a):


--- a/yt/data_objects/universal_fields.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/data_objects/universal_fields.py	Thu Aug 11 13:36:43 2011 -0600
@@ -525,12 +525,14 @@
     ds = div_fac * data['dx'].flat[0]
     f  = data["x-velocity"][sl_right,1:-1,1:-1]/ds
     f -= data["x-velocity"][sl_left ,1:-1,1:-1]/ds
-    ds = div_fac * data['dy'].flat[0]
-    f += data["y-velocity"][1:-1,sl_right,1:-1]/ds
-    f -= data["y-velocity"][1:-1,sl_left ,1:-1]/ds
-    ds = div_fac * data['dz'].flat[0]
-    f += data["z-velocity"][1:-1,1:-1,sl_right]/ds
-    f -= data["z-velocity"][1:-1,1:-1,sl_left ]/ds
+    if data.pf.dimensionality > 1:
+        ds = div_fac * data['dy'].flat[0]
+        f += data["y-velocity"][1:-1,sl_right,1:-1]/ds
+        f -= data["y-velocity"][1:-1,sl_left ,1:-1]/ds
+    if data.pf.dimensionality > 2:
+        ds = div_fac * data['dz'].flat[0]
+        f += data["z-velocity"][1:-1,1:-1,sl_right]/ds
+        f -= data["z-velocity"][1:-1,1:-1,sl_left ]/ds
     new_field = na.zeros(data["x-velocity"].shape, dtype='float64')
     new_field[1:-1,1:-1,1:-1] = f
     return new_field
@@ -624,17 +626,17 @@
 def _AngularMomentumX(field, data):
     return data["CellMass"] * data["SpecificAngularMomentumX"]
 add_field("AngularMomentumX", function=_AngularMomentumX,
-         units=r"\rm{g}\/\rm{cm}^2/\rm{s}", vector_field=True,
+         units=r"\rm{g}\/\rm{cm}^2/\rm{s}", vector_field=False,
          validators=[ValidateParameter('center')])
 def _AngularMomentumY(field, data):
     return data["CellMass"] * data["SpecificAngularMomentumY"]
 add_field("AngularMomentumY", function=_AngularMomentumY,
-         units=r"\rm{g}\/\rm{cm}^2/\rm{s}", vector_field=True,
+         units=r"\rm{g}\/\rm{cm}^2/\rm{s}", vector_field=False,
          validators=[ValidateParameter('center')])
 def _AngularMomentumZ(field, data):
     return data["CellMass"] * data["SpecificAngularMomentumZ"]
 add_field("AngularMomentumZ", function=_AngularMomentumZ,
-         units=r"\rm{g}\/\rm{cm}^2/\rm{s}", vector_field=True,
+         units=r"\rm{g}\/\rm{cm}^2/\rm{s}", vector_field=False,
          validators=[ValidateParameter('center')])
 
 def _ParticleSpecificAngularMomentum(field, data):
@@ -930,3 +932,47 @@
                         ValidateDataField("By"),
                         ValidateDataField("Bz")])
 
+def _VorticitySquared(field, data):
+    mylog.debug("Generating vorticity on %s", data)
+    # We need to set up stencils
+    if data.pf["HydroMethod"] == 2:
+        sl_left = slice(None,-2,None)
+        sl_right = slice(1,-1,None)
+        div_fac = 1.0
+    else:
+        sl_left = slice(None,-2,None)
+        sl_right = slice(2,None,None)
+        div_fac = 2.0
+    new_field = na.zeros(data["x-velocity"].shape)
+    dvzdy = (data["z-velocity"][1:-1,sl_right,1:-1] -
+             data["z-velocity"][1:-1,sl_left,1:-1]) \
+             / (div_fac*data["dy"].flat[0])
+    dvydz = (data["y-velocity"][1:-1,1:-1,sl_right] -
+             data["y-velocity"][1:-1,1:-1,sl_left]) \
+             / (div_fac*data["dz"].flat[0])
+    new_field[1:-1,1:-1,1:-1] += (dvzdy - dvydz)**2.0
+    del dvzdy, dvydz
+    dvxdz = (data["x-velocity"][1:-1,1:-1,sl_right] -
+             data["x-velocity"][1:-1,1:-1,sl_left]) \
+             / (div_fac*data["dz"].flat[0])
+    dvzdx = (data["z-velocity"][sl_right,1:-1,1:-1] -
+             data["z-velocity"][sl_left,1:-1,1:-1]) \
+             / (div_fac*data["dx"].flat[0])
+    new_field[1:-1,1:-1,1:-1] += (dvxdz - dvzdx)**2.0
+    del dvxdz, dvzdx
+    dvydx = (data["y-velocity"][sl_right,1:-1,1:-1] -
+             data["y-velocity"][sl_left,1:-1,1:-1]) \
+             / (div_fac*data["dx"].flat[0])
+    dvxdy = (data["x-velocity"][1:-1,sl_right,1:-1] -
+             data["x-velocity"][1:-1,sl_left,1:-1]) \
+             / (div_fac*data["dy"].flat[0])
+    new_field[1:-1,1:-1,1:-1] += (dvydx - dvxdy)**2.0
+    del dvydx, dvxdy
+    new_field = na.abs(new_field)
+    return new_field
+def _convertVorticitySquared(data):
+    return data.convert("cm")**-2.0
+add_field("VorticitySquared", function=_VorticitySquared,
+          validators=[ValidateSpatial(1)],
+          units=r"\rm{s}^{-2}",
+          convert_function=_convertVorticitySquared)


--- a/yt/frontends/art/setup.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/frontends/art/setup.py	Thu Aug 11 13:36:43 2011 -0600
@@ -6,5 +6,5 @@
     from numpy.distutils.misc_util import Configuration
     config = Configuration('art',parent_package,top_path)
     config.make_config_py() # installs __config__.py
-    config.make_svn_version_py()
+    #config.make_svn_version_py()
     return config


--- a/yt/frontends/castro/data_structures.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/frontends/castro/data_structures.py	Thu Aug 11 13:36:43 2011 -0600
@@ -1,5 +1,5 @@
 """
-Data structures for Castro. 
+Data structures for Castro.
 
 Author: J. S. Oishi <jsoishi at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
@@ -138,7 +138,7 @@
         self._setup_data_io()
         self._setup_field_list()
         self._populate_hierarchy()
-        
+
     def read_global_header(self, filename, paranoid_read):
         """ Read the global header file for an Castro plotfile output. """
         counter = 0
@@ -289,7 +289,7 @@
             self.num_grids = grid_counter
             self.float_type = 'float64'
 
-        self.maxLevel = self.n_levels - 1 
+        self.maxLevel = self.n_levels - 1
         self.max_level = self.n_levels - 1
         header_file.close()
 
@@ -332,7 +332,6 @@
         header = in_file.readline()
         in_file.close()
         header.strip()
-        
         # Parse it. The pattern is in castro.definitions.py
         header_re = re.compile(castro_FAB_header_pattern)
         bytes_per_real, endian, start, stop, centerType, n_components = header_re.search(header).groups()
@@ -352,7 +351,7 @@
         stop = na.array(map(int, start_stop[1].split(',')))
         dimension = stop - start + 1
         return dimension, start, stop
-        
+
     def _populate_grid_objects(self):
         mylog.debug("Creating grid objects")
 
@@ -482,7 +481,7 @@
 
     def _parse_hierarchy(self):
         pass
-    
+
     def _detect_fields(self):
         pass
 
@@ -574,10 +573,12 @@
         if not os.path.exists(pfn):
             return False
         castro = any(("castro." in line for line in open(pfn)))
+        nyx = any(("nyx." in line for line in open(pfn)))
+        castro = castro and (not nyx) # it's only castro if it's not nyx
         maestro = os.path.exists(os.path.join(pname, "job_info"))
         orion = (not castro) and (not maestro)
         return castro
-        
+
     def _parse_parameter_file(self):
         """
         Parses the parameter file and establishes the various dictionaries.


--- a/yt/frontends/castro/definitions.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/frontends/castro/definitions.py	Thu Aug 11 13:36:43 2011 -0600
@@ -5,7 +5,7 @@
 Affiliation: KIPAC/SLAC/Stanford
 Homepage: http://yt.enzotools.org/
 License:
-  Copyright (C) 2008-20010 J.S. Oishi.  All Rights Reserved.
+  Copyright (C) 2008-2010 J.S. Oishi.  All Rights Reserved.
 
   This file is part of yt.
 
@@ -89,4 +89,4 @@
 castro_particle_field_names = \
     ['particle_position_%s' % ax for ax in 'xyz'] + \
     ['particle_mass'] +  \
-    ['particle_velocity_%s' % ax for ax in 'xyz'] 
+    ['particle_velocity_%s' % ax for ax in 'xyz']


--- a/yt/frontends/castro/setup.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/frontends/castro/setup.py	Thu Aug 11 13:36:43 2011 -0600
@@ -8,5 +8,5 @@
     from numpy.distutils.misc_util import Configuration
     config = Configuration('castro', parent_package, top_path)
     config.make_config_py() # installs __config__.py
-    config.make_svn_version_py()
+    #config.make_svn_version_py()
     return config


--- a/yt/frontends/chombo/fields.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/frontends/chombo/fields.py	Thu Aug 11 13:36:43 2011 -0600
@@ -83,21 +83,17 @@
             data["Y-magnfield"]**2 +
             data["Z-magnfield"]**2)/2.
 add_field("MagneticEnergy", function=_MagneticEnergy, take_log=True,
-          units=r"",display_name=r"B^2/8\pi")
+          units=r"", display_name=r"B^2 / 8 \pi")
 ChomboFieldInfo["MagneticEnergy"]._projected_units=r""
 
 def _xVelocity(field, data):
-    """generate x-velocity from x-momentum and density
-
-    """
+    """ Generate x-velocity from x-momentum and density. """
     return data["X-momentum"]/data["density"]
 add_field("x-velocity",function=_xVelocity, take_log=False,
           units=r'\rm{cm}/\rm{s}')
 
 def _yVelocity(field,data):
-    """generate y-velocity from y-momentum and density
-
-    """
+    """ Generate y-velocity from y-momentum and density. """
     #try:
     #    return data["xvel"]
     #except KeyError:
@@ -106,10 +102,7 @@
           units=r'\rm{cm}/\rm{s}')
 
 def _zVelocity(field,data):
-    """generate z-velocity from z-momentum and density
-
-    """
+    """ Generate z-velocity from z-momentum and density. """
     return data["Z-momentum"]/data["density"]
 add_field("z-velocity",function=_zVelocity, take_log=False,
           units=r'\rm{cm}/\rm{s}')
-    


--- a/yt/frontends/chombo/setup.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/frontends/chombo/setup.py	Thu Aug 11 13:36:43 2011 -0600
@@ -8,5 +8,5 @@
     from numpy.distutils.misc_util import Configuration
     config = Configuration('chombo',parent_package,top_path)
     config.make_config_py() # installs __config__.py
-    config.make_svn_version_py()
+    #config.make_svn_version_py()
     return config


--- a/yt/frontends/enzo/fields.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/frontends/enzo/fields.py	Thu Aug 11 13:36:43 2011 -0600
@@ -222,7 +222,7 @@
 _default_fields = ["Density","Temperature",
                    "x-velocity","y-velocity","z-velocity",
                    "x-momentum","y-momentum","z-momentum",
-                   "Bx", "By", "Bz", "Dust_Temperature_Density"]
+                   "Bx", "By", "Bz", "Dust_Temperature"]
 # else:
 #     _default_fields = ["Density","Temperature","Gas_Energy","Total_Energy",
 #                        "x-velocity","y-velocity","z-velocity"]
@@ -263,6 +263,8 @@
 
 KnownEnzoFields["Temperature"]._units = r"\rm{K}"
 KnownEnzoFields["Temperature"].units = r"K"
+KnownEnzoFields["Dust_Temperature"]._units = r"\rm{K}"
+KnownEnzoFields["Dust_Temperature"].units = r"K"
 
 def _convertVelocity(data):
     return data.convert("x-velocity")
@@ -272,17 +274,6 @@
     f._convert_function = _convertVelocity
     f.take_log = False
 
-# Dust temperature - raw field is T_dust * Density
-def _dust_temperature(field, data):
-    return data['Dust_Temperature_Density'] / data['Density']
-def _convert_dust_temperature(data):
-    ef = (1.0 + data.pf.current_redshift)**3.0
-    return data.convert("Density") / ef
-add_field("Dust_Temperature", function=_dust_temperature, 
-          convert_function=_convert_dust_temperature, take_log=True,
-          validators=[ValidateDataField('Dust_Temperature_Density')],
-          units = r"K")
-
 def _spdensity(field, data):
     blank = na.zeros(data.ActiveDimensions, dtype='float32')
     if data.NumberOfParticles == 0: return blank
@@ -303,8 +294,11 @@
 def _dmpdensity(field, data):
     blank = na.zeros(data.ActiveDimensions, dtype='float32')
     if data.NumberOfParticles == 0: return blank
-    filter = data['creation_time'] <= 0.0
-    if not filter.any(): return blank
+    if 'creation_time' in data.keys():
+        filter = data['creation_time'] <= 0.0
+        if not filter.any(): return blank
+    else:
+        filter = na.ones(data.NumberOfParticles, dtype='bool')
     amr_utils.CICDeposit_3(data["particle_position_x"][filter].astype(na.float64),
                            data["particle_position_y"][filter].astype(na.float64),
                            data["particle_position_z"][filter].astype(na.float64),


--- a/yt/frontends/enzo/setup.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/frontends/enzo/setup.py	Thu Aug 11 13:36:43 2011 -0600
@@ -8,5 +8,5 @@
     from numpy.distutils.misc_util import Configuration
     config = Configuration('enzo',parent_package,top_path)
     config.make_config_py() # installs __config__.py
-    config.make_svn_version_py()
+    #config.make_svn_version_py()
     return config


--- a/yt/frontends/flash/data_structures.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/frontends/flash/data_structures.py	Thu Aug 11 13:36:43 2011 -0600
@@ -323,6 +323,7 @@
             self.current_redshift = self.omega_lambda = self.omega_matter = \
                 self.hubble_constant = self.cosmological_simulation = 0.0
         self._handle.close()
+        self._handle = None
 
     @classmethod
     def _is_valid(self, *args, **kwargs):


--- a/yt/frontends/flash/setup.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/frontends/flash/setup.py	Thu Aug 11 13:36:43 2011 -0600
@@ -8,5 +8,5 @@
     from numpy.distutils.misc_util import Configuration
     config = Configuration('flash',parent_package,top_path)
     config.make_config_py() # installs __config__.py
-    config.make_svn_version_py()
+    #config.make_svn_version_py()
     return config


--- a/yt/frontends/gadget/setup.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/frontends/gadget/setup.py	Thu Aug 11 13:36:43 2011 -0600
@@ -8,5 +8,5 @@
     from numpy.distutils.misc_util import Configuration
     config = Configuration('gadget',parent_package,top_path)
     config.make_config_py() # installs __config__.py
-    config.make_svn_version_py()
+    #config.make_svn_version_py()
     return config


--- a/yt/frontends/gdf/setup.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/frontends/gdf/setup.py	Thu Aug 11 13:36:43 2011 -0600
@@ -8,5 +8,5 @@
     from numpy.distutils.misc_util import Configuration
     config = Configuration('gdf',parent_package,top_path)
     config.make_config_py() # installs __config__.py
-    config.make_svn_version_py()
+    #config.make_svn_version_py()
     return config


--- a/yt/frontends/maestro/setup.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/frontends/maestro/setup.py	Thu Aug 11 13:36:43 2011 -0600
@@ -8,5 +8,5 @@
     from numpy.distutils.misc_util import Configuration
     config = Configuration('maestro',parent_package,top_path)
     config.make_config_py() # installs __config__.py
-    config.make_svn_version_py()
+    #config.make_svn_version_py()
     return config


--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/yt/frontends/nyx/api.py	Thu Aug 11 13:36:43 2011 -0600
@@ -0,0 +1,29 @@
+"""
+API for yt.frontends.nyx
+
+Author: Casey W. Stark <caseywstark at gmail.com>
+Affiliation: UC Berkeley
+Homepage: http://yt.enzotools.org/
+License:
+  Copyright (C) 2011 Casey W. Stark, Matthew Turk.  All Rights Reserved.
+
+  This file is part of yt.
+
+  yt is free software; you can redistribute it and/or modify
+  it under the terms of the GNU General Public License as published by
+  the Free Software Foundation; either version 3 of the License, or
+  (at your option) any later version.
+
+  This program is distributed in the hope that it will be useful,
+  but WITHOUT ANY WARRANTY; without even the implied warranty of
+  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+  GNU General Public License for more details.
+
+  You should have received a copy of the GNU General Public License
+  along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+"""
+
+from .data_structures import NyxGrid, NyxHierarchy, NyxStaticOutput
+from .fields import NyxFieldContainer, nyx_fields, add_nyx_field
+from .io import IOHandlerNative


--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/yt/frontends/nyx/data_structures.py	Thu Aug 11 13:36:43 2011 -0600
@@ -0,0 +1,759 @@
+"""
+Data structures for Nyx.
+
+Author: Casey W. Stark <caseywstark at gmail.com>
+Affiliation: UC Berkeley
+Author: J. S. Oishi <jsoishi at gmail.com>
+Affiliation: KIPAC/SLAC/Stanford
+Homepage: http://yt.enzotools.org/
+License:
+  Copyright (C) 2011 Casey W. Stark, J. S. Oishi, Matthew Turk.  All Rights
+  Reserved.
+
+  This file is part of yt.
+
+  yt is free software; you can redistribute it and/or modify
+  it under the terms of the GNU General Public License as published by
+  the Free Software Foundation; either version 3 of the License, or
+  (at your option) any later version.
+
+  This program is distributed in the hope that it will be useful,
+  but WITHOUT ANY WARRANTY; without even the implied warranty of
+  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+  GNU General Public License for more details.
+
+  You should have received a copy of the GNU General Public License
+  along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+"""
+
+from collections import defaultdict
+import itertools
+import os
+import re
+from stat import ST_CTIME
+from string import strip, rstrip
+import weakref
+
+import numpy as na
+
+from yt.funcs import *
+from yt.data_objects.grid_patch import AMRGridPatch
+from yt.data_objects.hierarchy import AMRHierarchy
+from yt.data_objects.static_output import StaticOutput
+from yt.utilities.amr_utils import get_box_grids_level
+from yt.utilities.definitions import mpc_conversion
+
+from .definitions import parameter_type_dict, nyx_to_enzo_dict, \
+                         fab_header_pattern, nyx_particle_field_names
+from .utils import boxlib_bool_to_int
+from .fields import NyxFieldContainer, add_field
+
+
+class NyxGrid(AMRGridPatch):
+    _id_offset = 0
+
+    def __init__(self, left_edge, right_edge, index, level, filename, offset,
+                 dimensions, start, stop, **kwargs):
+        """ Build a grid that understands Nyx's boxlib format. """
+        # passes index as the patch ``id``
+        AMRGridPatch.__init__(self, index, **kwargs)
+        self.filename = filename
+        self._offset = offset
+
+        # @todo: enzo-isms.
+        # Must copy to avoid refs, in case the user alters the args later.
+        self.ActiveDimensions = (dimensions.copy()).astype('int32')
+        self.start_index = start.copy()
+        self.stop_index = stop.copy()
+        self.LeftEdge  = left_edge.copy()
+        self.RightEdge = right_edge.copy()
+        self.index = index
+        self.Level = level
+
+    def get_global_startindex(self):
+        return self.start_index
+
+    def _prepare_grid(self):
+        """ Copies all the appropriate attributes from the hierarchy. """
+        # This is definitely the slowest part of generating the hierarchy
+        h = self.hierarchy  # alias
+        h.grid_levels[self.id, 0] = self.Level
+        h.grid_left_edge[self.id,:] = self.LeftEdge[:]
+        h.grid_right_edge[self.id,:] = self.RightEdge[:]
+
+        # Might still work
+        #self.Time = h.gridTimes[self.id,0]
+        #self.NumberOfParticles = h.gridNumberOfParticles[self.id,0]
+
+        # @todo: enzo-isms
+        self.field_indexes = h.field_indexes
+        self.Children = h.gridTree[self.id]
+        pIDs = h.gridReverseTree[self.id]
+
+        if len(pIDs) > 0:
+            self.Parent = [weakref.proxy(h.grids[pID]) for pID in pIDs]
+        else:
+            self.Parent = None
+
+    def _setup_dx(self):
+        # So first we figure out what the index is. We don't assume that
+        # dx=dy=dz, at least here. We probably do elsewhere.
+        id = self.id - self._id_offset
+        if self.Parent is not None:
+            self.dds = self.Parent[0].dds / self.pf.refine_by
+        else:
+            LE, RE = self.hierarchy.grid_left_edge[id,:], \
+                     self.hierarchy.grid_right_edge[id,:]
+            self.dds = na.array((RE - LE) / self.ActiveDimensions)
+
+        if self.pf.dimensionality < 2: self.dds[1] = 1.0
+        if self.pf.dimensionality < 3: self.dds[2] = 1.0
+        self.data['dx'], self.data['dy'], self.data['dz'] = self.dds
+
+    def __repr__(self):
+        return "NyxGrid_%04i" % (self.id)
+
+class NyxHierarchy(AMRHierarchy):
+    grid = NyxGrid
+
+    def __init__(self, pf, data_style="nyx_native"):
+        self.field_info = NyxFieldContainer()
+        self.field_indexes = {}
+        self.parameter_file = weakref.proxy(pf)
+        self.directory = pf.path
+        header_path = os.path.join(self.directory, "Header")  # make a kwarg?
+
+        self.data_style = data_style
+        #self._setup_classes()
+
+        # This also sets up the grid objects
+        self.read_global_header(header_path)
+        self.read_particle_header()
+        self.__cache_endianness(self.levels[-1].grids[-1])
+
+        # @todo: should be first line
+        AMRHierarchy.__init__(self, pf, self.data_style)
+        self._setup_data_io()
+        self._setup_field_list()
+        self._populate_hierarchy()
+
+    def read_global_header(self, header_path):
+        """ Read the global header file for an Nyx plotfile output. """
+        counter = 0
+        header_file = open(header_path, 'r')
+        self.__global_header_lines = header_file.readlines()
+
+        # parse the file
+        self.nyx_version = self.__global_header_lines[0].rstrip()
+        self.n_fields = int(self.__global_header_lines[1])
+
+        # why the 2?
+        counter = self.n_fields + 2
+        self.field_list = []
+        for i, line in enumerate(self.__global_header_lines[2:counter]):
+            self.field_list.append(line.rstrip())
+
+        # figure out dimensions and make sure it's 3D
+        self.dimension = int(self.__global_header_lines[counter])
+        if self.dimension != 3:
+            raise RunTimeError("Current data is %iD. yt only supports Nyx data in 3D" % self.dimension)
+
+        counter += 1
+        self.Time = float(self.__global_header_lines[counter])
+        counter += 1
+        self.finest_grid_level = int(self.__global_header_lines[counter])
+        self.n_levels = self.finest_grid_level + 1
+        counter += 1
+
+        # quantities with _unnecessary are also stored in the inputs
+        # file and are not needed.  they are read in and stored in
+        # case in the future we want to enable a "backwards" way of
+        # taking the data out of the Header file and using it to fill
+        # in in the case of a missing inputs file
+        self.domainLeftEdge_unnecessary = na.array(map(float, self.__global_header_lines[counter].split()))
+        counter += 1
+        self.domainRightEdge_unnecessary = na.array(map(float, self.__global_header_lines[counter].split()))
+        counter += 1
+        self.refinementFactor_unnecessary = self.__global_header_lines[counter].split() #na.array(map(int, self.__global_header_lines[counter].split()))
+        counter += 1
+        self.globalIndexSpace_unnecessary = self.__global_header_lines[counter]
+        counter += 1
+        self.timestepsPerLevel_unnecessary = self.__global_header_lines[counter]
+        counter += 1
+
+        self.dx = na.zeros((self.n_levels, 3))
+        for i, line in enumerate(self.__global_header_lines[counter:counter + self.n_levels]):
+            self.dx[i] = na.array(map(float, line.split()))
+        counter += self.n_levels
+        self.geometry = int(self.__global_header_lines[counter])
+        if self.geometry != 0:
+            raise RunTimeError("yt only supports cartesian coordinates.")
+        counter += 1
+
+        # @todo: this is just to debug. eventually it should go away.
+        linebreak = int(self.__global_header_lines[counter])
+        if linebreak != 0:
+            raise RunTimeError("INTERNAL ERROR! This should be a zero.")
+        counter += 1
+
+        # each level is one group with ngrids on it. each grid has 3 lines of 2
+        # reals
+        self.levels = []
+        grid_counter = 0
+        file_finder_pattern = r"FabOnDisk: (\w+_D_[0-9]{4}) (\d+)\n"
+        re_file_finder = re.compile(file_finder_pattern)
+        dim_finder_pattern = r"\(\((\d+,\d+,\d+)\) \((\d+,\d+,\d+)\) \(\d+,\d+,\d+\)\)\n"
+        re_dim_finder = re.compile(dim_finder_pattern)
+        data_files_pattern = r"Level_[\d]/"
+        data_files_finder = re.compile(data_files_pattern)
+
+        for level in range(0, self.n_levels):
+            tmp = self.__global_header_lines[counter].split()
+            # should this be grid_time or level_time??
+            lev, ngrids, grid_time = int(tmp[0]), int(tmp[1]), float(tmp[2])
+            counter += 1
+            nsteps = int(self.__global_header_lines[counter])
+            counter += 1
+            self.levels.append(NyxLevel(lev, ngrids))
+
+            # Open level header, extract file names and offsets for each grid
+            # read slightly out of order here: at the end of the lo, hi pairs
+            # for x, y, z is a *list* of files types in the Level directory.
+            # Each type has Header and a number of data files (one per
+            # processor)
+            tmp_offset = counter + 3 * ngrids
+            nfiles = 0
+            key_off = 0
+            files = {}
+            offsets = {}
+            while nfiles + tmp_offset < len(self.__global_header_lines) \
+                  and data_files_finder.match(self.__global_header_lines[nfiles + tmp_offset]):
+                filen = os.path.join(self.parameter_file.path, \
+                                     self.__global_header_lines[nfiles + tmp_offset].strip())
+                # open each "_H" header file, and get the number of
+                # components within it
+                level_header_file = open(filen + '_H', 'r').read()
+                start_stop_index = re_dim_finder.findall(level_header_file) # just take the last one
+                grid_file_offset = re_file_finder.findall(level_header_file)
+                ncomp_this_file = int(level_header_file.split('\n')[2])
+
+                for i in range(ncomp_this_file):
+                    key = self.field_list[i + key_off]
+                    f, o = zip(*grid_file_offset)
+                    files[key] = f
+                    offsets[key] = o
+                    self.field_indexes[key] = i
+
+                key_off += ncomp_this_file
+                nfiles += 1
+
+            # convert dict of lists to list of dicts
+            fn = []
+            off = []
+            lead_path = os.path.join(self.parameter_file.path,
+                                     'Level_%i' % level)
+            for i in range(ngrids):
+                fi = [os.path.join(lead_path, files[key][i]) for key in self.field_list]
+                of = [int(offsets[key][i]) for key in self.field_list]
+                fn.append(dict(zip(self.field_list, fi)))
+                off.append(dict(zip(self.field_list, of)))
+
+            for grid in range(0, ngrids):
+                gfn = fn[grid]  # filename of file containing this grid
+                gfo = off[grid] # offset within that file
+                xlo, xhi = map(float, self.__global_header_lines[counter].split())
+                counter += 1
+                ylo, yhi = map(float, self.__global_header_lines[counter].split())
+                counter += 1
+                zlo, zhi = map(float, self.__global_header_lines[counter].split())
+                counter += 1
+                lo = na.array([xlo, ylo, zlo])
+                hi = na.array([xhi, yhi, zhi])
+                dims, start, stop = self.__calculate_grid_dimensions(start_stop_index[grid])
+                self.levels[-1].grids.append(self.grid(lo, hi, grid_counter,
+                                             level, gfn, gfo, dims, start, stop,
+                                             hierarchy=self))
+                grid_counter += 1 # this is global, and shouldn't be reset
+                                  # for each level
+
+            # already read the filenames above...
+            counter += nfiles
+            self.num_grids = grid_counter
+            self.float_type = 'float64'
+
+        self.maxLevel = self.n_levels - 1
+        self.max_level = self.n_levels - 1
+        header_file.close()
+
+    def read_particle_header(self):
+        # We need to get particle offsets and particle counts
+        if not self.parameter_file.use_particles:
+            self.pgrid_info = na.zeros((self.num_grids, 3), dtype='int64')
+            return
+        self.field_list += nyx_particle_field_names[:]
+        header = open(os.path.join(self.parameter_file.path, "DM", "Header"))
+        version = header.readline()
+        ndim = header.readline()
+        nfields = header.readline()
+        ntotalpart = int(header.readline())
+        dummy = header.readline() # nextid
+        maxlevel = int(header.readline()) # max level
+
+        # Skip over how many grids on each level; this is degenerate
+        for i in range(maxlevel + 1):dummy = header.readline()
+
+        grid_info = na.fromiter((int(i) for line in header.readlines()
+                                 for i in line.split()),
+                                dtype='int64',
+                                count=3*self.num_grids).reshape((self.num_grids, 3))
+        self.pgrid_info = grid_info
+
+    def __cache_endianness(self, test_grid):
+        """
+        Cache the endianness and bytes per real of the grids by using a test
+        grid and assuming that all grids have the same endianness. This is a
+        pretty safe assumption since Nyx uses one file per processor (@todo:
+        make sure this is still true, I don't think so). If you are running on a
+        cluster with different endian processors, then you are disappoint.
+
+        """
+        # open the test file & grab the header
+        inFile = open(os.path.expanduser(test_grid.filename[self.field_list[0]]), 'rb')
+        header = inFile.readline()
+        inFile.close()
+        header.strip()
+
+        headerRe = re.compile(fab_header_pattern)
+        bytesPerReal, endian, start, stop, centerType, nComponents = \
+            headerRe.search(header).groups()
+        self._bytesPerReal = int(bytesPerReal)
+        if self._bytesPerReal == int(endian[0]):
+            dtype = '<'
+        elif self._bytesPerReal == int(endian[-1]):
+            dtype = '>'
+        else:
+            raise ValueError("FAB header is neither big nor little endian. Perhaps the file is corrupt?")
+
+        dtype += ('f%i' % self._bytesPerReal) # always a floating point
+        self._dtype = dtype
+
+    def __calculate_grid_dimensions(self, start_stop):
+        start = na.array(map(int, start_stop[0].split(',')))
+        stop = na.array(map(int, start_stop[1].split(',')))
+        dimension = stop - start + 1
+        return dimension, start, stop
+
+    def _populate_grid_objects(self):
+        mylog.debug("Creating grid objects")
+
+        self.grids = na.concatenate([level.grids for level in self.levels])
+        basedir = self.parameter_file.path
+        for g, pg in itertools.izip(self.grids, self.pgrid_info):
+            g.particle_filename = os.path.join(basedir, "DM",
+                                               "Level_%s" % (g.Level),
+                                               "DATA_%04i" % pg[0])
+            g.NumberOfParticles = pg[1]
+            g._particle_offset = pg[2]
+
+        self.grid_particle_count[:,0] = self.pgrid_info[:,1]
+        del self.pgrid_info  # if this is all pgrid_info is used for...
+
+        gls = na.concatenate([level.ngrids * [level.level] for level in self.levels])
+        self.grid_levels[:] = gls.reshape((self.num_grids, 1))
+        grid_dcs = na.concatenate([level.ngrids*[self.dx[level.level]]
+                                   for level in self.levels], axis=0)
+
+        self.grid_dxs = grid_dcs[:,0].reshape((self.num_grids, 1))
+        self.grid_dys = grid_dcs[:,1].reshape((self.num_grids, 1))
+        self.grid_dzs = grid_dcs[:,2].reshape((self.num_grids, 1))
+
+        left_edges = []
+        right_edges = []
+        dims = []
+        for level in self.levels:
+            left_edges += [g.LeftEdge for g in level.grids]
+            right_edges += [g.RightEdge for g in level.grids]
+            dims += [g.ActiveDimensions for g in level.grids]
+
+        self.grid_left_edge = na.array(left_edges)
+        self.grid_right_edge = na.array(right_edges)
+        self.grid_dimensions = na.array(dims)
+        self.gridReverseTree = [] * self.num_grids
+        self.gridReverseTree = [ [] for i in range(self.num_grids)]  # why the same thing twice?
+        self.gridTree = [ [] for i in range(self.num_grids)]  # meh
+
+        mylog.debug("Done creating grid objects")
+
+    def _populate_hierarchy(self):
+        self.__setup_grid_tree()
+
+        for i, grid in enumerate(self.grids):
+            if (i%1e4) == 0:
+                mylog.debug("Prepared % 7i / % 7i grids", i, self.num_grids)
+
+            grid._prepare_grid()
+            grid._setup_dx()
+
+    def __setup_grid_tree(self):
+        mask = na.empty(self.grids.size, dtype='int32')
+        for i, grid in enumerate(self.grids):
+            get_box_grids_level(grid.LeftEdge, grid.RightEdge, grid.Level + 1,
+                                self.grid_left_edge, self.grid_right_edge,
+                                self.grid_levels, mask)
+            children = self.grids[mask.astype("bool")]
+            for child in children:
+                self.gridReverseTree[child.id].append(i)
+                self.gridTree[i].append(weakref.proxy(child))
+
+    def _setup_classes(self):
+        dd = self._get_data_reader_dict()
+        dd["field_indexes"] = self.field_indexes
+        AMRHierarchy._setup_classes(self, dd)
+        self.object_types.sort()
+
+    def _get_grid_children(self, grid):
+        mask = na.zeros(self.num_grids, dtype='bool')
+        grids, grid_ind = self.get_box_grids(grid.LeftEdge, grid.RightEdge)
+        mask[grid_ind] = True
+        mask = na.logical_and(mask, (self.grid_levels == (grid.Level + 1)).flat)
+        return self.grids[mask]
+
+    def _setup_field_list(self):
+        self.derived_field_list = []
+
+        for field in self.field_info:
+            try:
+                fd = self.field_info[field].get_dependencies(pf=self.parameter_file)
+            except:
+                continue
+            available = na.all([f in self.field_list for f in fd.requested])
+            if available: self.derived_field_list.append(field)
+
+        for field in self.field_list:
+            if field not in self.derived_field_list:
+                self.derived_field_list.append(field)
+
+        if self.parameter_file.use_particles:
+            # We know which particle fields will exist -- pending further
+            # changes in the future.
+            for field in nyx_particle_field_names:
+                def external_wrapper(f):
+                    def _convert_function(data):
+                        return data.convert(f)
+                    return _convert_function
+                cf = external_wrapper(field)
+                # Note that we call add_field on the field_info directly.  This
+                # will allow the same field detection mechanism to work for 1D,
+                # 2D and 3D fields.
+                self.pf.field_info.add_field(field, lambda a, b: None,
+                                             convert_function=cf,
+                                             take_log=False, particle_type=True)
+
+    def _count_grids(self):
+        """ this is already provided in ??? """
+        pass
+
+    def _initialize_grid_arrays(self):
+        mylog.debug("Allocating arrays for %s grids", self.num_grids)
+        self.grid_dimensions = na.ones((self.num_grids, 3), 'int32')
+        self.grid_left_edge = na.zeros((self.num_grids, 3), self.float_type)
+        self.grid_right_edge = na.ones((self.num_grids, 3), self.float_type)
+        self.grid_levels = na.zeros((self.num_grids, 1), 'int32')
+        self.grid_particle_count = na.zeros((self.num_grids, 1), 'int32')
+
+    def _parse_hierarchy(self):
+        pass
+
+    def _detect_fields(self):
+        pass
+
+    def _setup_unknown_fields(self):
+        # Doesn't seem useful
+        for field in self.field_list:
+            if field in self.parameter_file.field_info: continue
+            mylog.info("Adding %s to list of fields", field)
+            cf = None
+            if self.parameter_file.has_key(field):
+                def external_wrapper(f):
+                    def _convert_function(data):
+                        return data.convert(f)
+                    return _convert_function
+                cf = external_wrapper(field)
+            add_field(field, lambda a, b: None, convert_function=cf,
+                      take_log=False)
+
+    def _setup_derived_fields(self):
+        pass
+
+    def _initialize_state_variables(self):
+        """
+        Override not to re-initialize num_grids in AMRHierarchy.__init__
+
+        """
+        self._parallel_locking = False
+        self._data_file = None
+        self._data_mode = None
+        self._max_locations = {}
+
+class NyxLevel:
+    def __init__(self, level, ngrids):
+        self.level = level
+        self.ngrids = ngrids
+        self.grids = []
+
+class NyxStaticOutput(StaticOutput):
+    """
+    This class is a stripped down class that simply reads and parses *filename*,
+    without looking at the Nyx hierarchy.
+
+    """
+    _hierarchy_class = NyxHierarchy
+    _fieldinfo_class = NyxFieldContainer
+
+    @classmethod
+    def _is_valid(cls, *args, **kwargs):
+        # fill our args
+        pname = args[0].rstrip("/")
+        dn = os.path.dirname(pname)
+        if len(args) > 1:
+            kwargs['paramFilename'] = args[1]
+
+        pfname = kwargs.get("paramFilename", os.path.join(dn, "inputs"))
+
+        # @todo: new Nyx output.
+        # We check for the job_info file's existence because this is currently
+        # what distinguishes Nyx data from MAESTRO data.
+        pfn = os.path.join(pfname)
+        if not os.path.exists(pfn): return False
+        nyx = any(("nyx." in line for line in open(pfn)))
+        maestro = os.path.exists(os.path.join(pname, "job_info"))
+        orion = (not nyx) and (not maestro)
+        return nyx
+
+    def __init__(self, plotname, param_filename="inputs",
+                 fparam_filename="probin", data_style="nyx_native",
+                 storage_filename=None):
+        """
+        Need to override for Nyx file structure, for now.
+
+        The paramfile is usually called "inputs" and there may be a fortran
+        inputs file usually called "probin". `plotname` here will be a directory
+        name as per BoxLib, data_style will be one of
+
+         * Native
+         * IEEE (not implemented in yt)
+         * ASCII (not implemented in yt)
+
+        """
+        self.storage_filename = storage_filename
+        self.parameter_filename = param_filename
+        self.parameter_file_path = os.path.abspath(self.parameter_filename)
+        self.fparameter_filename = fparam_filename
+        self.fparameter_file_path = os.path.abspath(self.fparameter_filename)
+        self.path = os.path.abspath(plotname)  # data folder
+
+        self.fparameters = {}
+
+        # @todo: quick fix...
+        self.use_particles = False
+
+        # @todo: first line
+        # runs ``self._parse_parameter_file()``, ``self._set_units()``, and
+        # ``self.print_key_parameters()``
+        StaticOutput.__init__(self, plotname.rstrip("/"), data_style=data_style)
+
+        # @todo: field pruning should happen here
+        self.field_info = self._fieldinfo_class()
+
+        # @todo: check all of these and hopefully factor out of the constructor.
+        # These should maybe not be hardcoded?
+        self.parameters["HydroMethod"] = "nyx"  # always PPM DE
+        self.parameters["Time"] = 1.  # default unit is 1...
+        self.parameters["DualEnergyFormalism"] = 0  # always off.
+        self.parameters["EOSType"] = -1  # default
+
+        # @todo: hopefully delete this after implementing new Nyx output
+        if self.fparameters.has_key("mu"):
+            self.parameters["mu"] = self.fparameters["mu"]
+
+    def _parse_parameter_file(self):
+        """
+        Parses the parameter file and establishes the various dictionaries.
+
+        """
+        # More boxlib madness...
+        self._parse_header_file()
+
+        if os.path.isfile(self.fparameter_file_path):
+            self._parse_fparameter_file()
+
+        # Let's read the file
+        self.unique_identifier = int(os.stat(self.parameter_filename)[ST_CTIME])
+        lines = open(self.parameter_file_path).readlines()
+
+        for line in lines:
+            if line.find("#") >= 1:  # Keep the commented lines...
+                line = line[:line.find("#")]
+            line = line.strip()
+            if len(line) < 2 or line.find("#") == 0: # ...but skip comments
+                continue
+
+            try:
+                param, val_string = map(strip, line.split("="))
+            except ValueError:
+                mylog.error("ValueError: '%s'", line)
+
+            vals = val_string.split()
+
+            # @todo: don't do this here...
+            if nyx_to_enzo_dict.has_key(param):
+                param_name = nyx_to_enzo_dict[param]
+                vals = map(parameter_type_dict[param_name], vals)
+                if len(vals) == 1:
+                    self.parameters[param_name] = vals[0]
+                else:
+                    # don't know why this is special
+                    if param_name == "RefineBy":
+                        self.parameters[param_name] = vals[0]
+                    else:
+                        self.parameters[param_name] = vals
+
+            elif param.startswith("geometry.prob_hi"):
+                self.domain_right_edge = na.array([float(i) for i in vals])
+            elif param.startswith("geometry.prob_lo"):
+                self.domain_left_edge = na.array([float(i) for i in vals])
+            elif param.startswith("particles.write_in_plotfile"):
+                self.use_particles = boxlib_bool_to_int(vals[0])
+
+        # aliases we need
+        self.parameters["TopGridRank"] = len(self.parameters["TopGridDimensions"])
+        self.dimensionality = self.parameters["TopGridRank"]
+        self.domain_dimensions = self.parameters["TopGridDimensions"]
+        self.refine_by = self.parameters.get("RefineBy", 2)  # 2 is silent default? Makes sense I suppose.
+
+        if self.parameters.has_key("ComovingCoordinates") \
+           and self.parameters["ComovingCoordinates"]:
+            self.cosmological_simulation = 1
+            self.omega_lambda = self.parameters["CosmologyOmegaLambdaNow"]
+            self.omega_matter = self.parameters["CosmologyOmegaMatterNow"]
+            self.hubble_constant = self.parameters["CosmologyHubbleConstantNow"]
+
+            # So broken. We will fix this in the new Nyx output format
+            a_file = open(os.path.join(self.path, "comoving_a"))
+            line = a_file.readline().strip()
+            a_file.close()
+            self.parameters["CosmologyCurrentRedshift"] = 1 / float(line) - 1
+            self.cosmological_scale_factor = float(line)
+
+            # alias
+            self.current_redshift = self.parameters["CosmologyCurrentRedshift"]
+
+        else:
+            # @todo: automatic defaults
+            self.current_redshift = self.omega_lambda = self.omega_matter = \
+                self.hubble_constant = self.cosmological_simulation = 0.0
+
+    def _parse_header_file(self):
+        """
+        Parses the BoxLib header file to get any parameters stored there.
+        Hierarchy information is read out of this file in NyxHierarchy.
+
+        Currently, only Time is read here.
+
+        """
+        # @todo: header filename option? probably not.
+        header_file = open(os.path.join(self.path, "Header"))
+        lines = header_file.readlines()  # hopefully this is small
+        header_file.close()
+
+        n_fields = int(lines[1])  # this could change
+        self.current_time = float(lines[3 + n_fields])  # very fragile
+
+    def _parse_fparameter_file(self):
+        """
+        Parses the fortran parameter file for Nyx. Most of this will be useless,
+        but this is where it keeps mu = mass per particle/m_hydrogen. Also it
+        contains the cosmological variables.
+
+        """
+        # @todo: delete after new Nyx output
+        lines = open(self.fparameter_file_path).readlines()
+        for line in lines:
+            if line.count("=") == 1:
+                nyx_param, val_string = map(strip, line.split("="))
+
+                # Check if we care about this param. If so, translate it.
+                if nyx_to_enzo_dict.has_key(nyx_param):
+                    param = nyx_to_enzo_dict[nyx_param]
+                else:
+                    continue
+
+                # parse vals string and correct for fortran double syntax
+                vals = val_string.split()
+                if val_string.count("'") == 0:  # must be floats
+                    vals = map(float, [val.replace('D', 'e').replace('d', 'e')
+                                       for val in vals])
+
+                # single element or array?
+                if len(vals) == 1:
+                    self.parameters[param] = vals[0]
+                else:
+                    self.parameters[param] = vals
+
+    def _set_units(self):
+        """
+        Generates the conversion to various physical _units based on the
+        parameter file.
+
+        """
+        self.units = {}
+        self.time_units = {}
+
+        if len(self.parameters) == 0:  # don't think this is possible, but safe
+            self._parse_parameter_file()
+
+        # Masses are always in $ M_{\odot} $
+        self.units["particle_mass"] = 1.989e33
+
+        mylog.warning("Length units: setting 1.0 = 1.0 Mpc.")
+        self.units.update(mpc_conversion)
+        self.units["density"] = self.units["particle_mass"]/(self.units["cm"])**3
+        self.units["particle_mass_density"] = self.units["density"]
+        self.units["Density"] = 1
+
+        # @todo: enzo-isms
+        mylog.warning("Time units: setting 1.0 = Mpc/km s ~ 10^12 yr .")
+        self.time_units["s"] = 1.0 / 3.08568025e19
+        self.conversion_factors["Time"] = 1.0 / 3.08568025e19
+
+        # velocities are not comoving!
+        # Nyx is in km/s so we need to convert to cm/s, hence 1e5
+        cf = 1e5 * (self.cosmological_scale_factor)
+        for ax in "xyz":
+            self.units["particle_velocity_%s" % ax] = cf
+
+        # misc
+        self.conversion_factors = defaultdict(lambda: 1.0)  # what is this for? - Steffen: this is to get 1.0 for values not in the dict
+        self.time_units["1"] = 1
+        self.units["1"] = 1.0
+        self.units["unitary"] = 1.0 / (self.domain_right_edge -
+                                       self.domain_left_edge).max()
+
+        # time
+        seconds = self.time_units["s"]
+        self.time_units["days"] = seconds / (3600 * 24.0)
+        self.time_units["years"] = seconds / (3600 * 24.0 * 365)
+
+
+        # not the most useful right now, but someday
+        for key in nyx_particle_field_names:
+            self.conversion_factors[key] = 1.0
+
+    def _setup_nounits_units(self):
+        z = 0
+
+    def _localize(self, f, default):
+        if f is None:
+            return os.path.join(self.directory, default)
+        return f


--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/yt/frontends/nyx/definitions.py	Thu Aug 11 13:36:43 2011 -0600
@@ -0,0 +1,91 @@
+"""
+Definitions specific to Nyx
+
+Author: Casey W. Stark <caseywstark at gmail.com>
+Affiliation: UC Berkeley
+Author: J. S. Oishi <jsoishi at gmail.com>
+Affiliation: KIPAC/SLAC/Stanford
+Homepage: http://yt.enzotools.org/
+License:
+  Copyright (C) 2011 Casey W. Stark, J. S. Oishi, Matthew Turk.  All Rights
+  Reserved.
+
+  This file is part of yt.
+
+  yt is free software; you can redistribute it and/or modify
+  it under the terms of the GNU General Public License as published by
+  the Free Software Foundation; either version 3 of the License, or
+  (at your option) any later version.
+
+  This program is distributed in the hope that it will be useful,
+  but WITHOUT ANY WARRANTY; without even the implied warranty of
+  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+  GNU General Public License for more details.
+
+  You should have received a copy of the GNU General Public License
+  along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+"""
+
+from utils import boxlib_bool_to_int
+
+# This gives the type of each parameter we want, used to cast with a ``map()``
+# @todo: get rid of enzo parameters we do not need
+parameter_type_dict = {
+    "CosmologyCurrentRedshift": float,
+    "CosmologyComovingBoxSize": float,
+    "CosmologyOmegaMatterNow": float,
+    "CosmologyOmegaLambdaNow": float,
+    "CosmologyHubbleConstantNow": float,
+    "CosmologyInitialRedshift": float,
+    "DualEnergyFormalismEta1": float,
+    "DualEnergyFormalismEta2": float,
+    "MetaDataString": str,
+    "HydroMethod": int,
+    "DualEnergyFormalism": int,
+    "InitialTime": float,
+    "ComovingCoordinates": boxlib_bool_to_int,
+    "DensityUnits": float,
+    "LengthUnits": float,
+    "LengthUnit": float,
+    "TemperatureUnits": float,
+    "TimeUnits": float,
+    "GravitationalConstant": float,
+    "Gamma": float,
+    "MultiSpecies": int,
+    "CompilerPrecision": str,
+    "CurrentTimeIdentifier": int,
+    "RefineBy": int,
+    "BoundaryConditionName": str,
+    "TopGridRank": int,
+    "TopGridDimensions": int,
+    "EOSSoundSpeed": float,
+    "EOSType": int,
+    "NumberOfParticleAttributes": int,
+}
+
+# Provides translation between parameters in the nyx `inputs` file names to the
+# enzo/yt name expected throughout the code. The key is nyx name, value is
+# enzo/yt equivalent.
+nyx_to_enzo_dict = {
+    "amr.n_cell": "TopGridDimensions",
+    "amr.ref_ratio": "RefineBy",
+    "materials.gamma": "Gamma",
+    "castro.use_comoving": "ComovingCoordinates",
+    "castro.redshift_in": "CosmologyInitialRedshift",
+    "comoving_OmL": "CosmologyOmegaLambdaNow",
+    "comoving_OmM": "CosmologyOmegaMatterNow",
+    "comoving_h": "CosmologyHubbleConstantNow"
+}
+
+# is this the same as nyx_to_enzo.
+yt_to_nyx_fields_dict = {}
+nyx_to_yt_fields_dict = {}
+
+fab_header_pattern = r"^FAB \(\((\d+), \([0-9 ]+\)\),\(\d+, \(([0-9 ]+)\)\)\)\(\((\d+,\d+,\d+)\) \((\d+,\d+,\d+)\) \((\d+,\d+,\d+)\)\) (\d+)\n"
+
+# need to specify units eventually
+nyx_particle_field_names = ['particle_position_%s' % ax for ax in 'xyz'] + \
+                           ['particle_mass'] +  \
+                           ['particle_velocity_%s' % ax for ax in 'xyz']
+


--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/yt/frontends/nyx/fields.py	Thu Aug 11 13:36:43 2011 -0600
@@ -0,0 +1,153 @@
+"""
+Field specifications for Nyx
+
+Author: Casey W. Stark <caseywstark at gmail.com>
+Affiliation: UC Berkeley
+Author: J. S. Oishi <jsoishi at gmail.com>
+Affiliation: KIPAC/SLAC/Stanford
+Homepage: http://yt.enzotools.org/
+License:
+  Copyright (C) 2011 Casey W. Stark, J. S. Oishi, Matthew Turk.  All Rights
+  Reserved.
+
+  This file is part of yt.
+
+  yt is free software; you can redistribute it and/or modify
+  it under the terms of the GNU General Public License as published by
+  the Free Software Foundation; either version 3 of the License, or
+  (at your option) any later version.
+
+  This program is distributed in the hope that it will be useful,
+  but WITHOUT ANY WARRANTY; without even the implied warranty of
+  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+  GNU General Public License for more details.
+
+  You should have received a copy of the GNU General Public License
+  along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+"""
+
+import yt.data_objects.universal_fields
+
+from yt.data_objects.field_info_container import CodeFieldInfoContainer, \
+    ValidateParameter, ValidateDataField, ValidateProperty, ValidateSpatial, \
+    ValidateGridType
+from yt.utilities.physical_constants import mh, kboltz
+
+class NyxFieldContainer(CodeFieldInfoContainer):
+    """ All nyx-specific fields are stored in here. """
+    _shared_state = {}
+    _field_list = {}
+
+nyx_fields = NyxFieldContainer()
+add_field = nyx_fields.add_field
+add_nyx_field = add_field  # alias for API
+
+# Density
+add_field("density", function=lambda a, b: None, take_log=True,
+          validators=[ValidateDataField("density")],
+          units=r"\rm{g}} / \rm{cm}^3",
+          projected_units =r"\rm{g}} / \rm{cm}^2")
+nyx_fields["density"]._projected_units =r"\rm{g}} / \rm{cm}^2"
+
+add_field("Density", function=lambda a, b: b["density"], take_log=True,
+          units=r"\rm{g}} / \rm{cm}^3",
+          projected_units =r"\rm{g}} / \rm{cm}^2")
+
+# Particle mass in units of $ M_{\odot}
+def _convertParticleMassMsun(data):
+    return (1/1.989e33)
+def _particle_mass_m_sun(field, data):
+    return data["particle_mass"]
+add_field("ParticleMassMsun", function=_particle_mass_m_sun,
+          validators=[ValidateSpatial(0), ValidateDataField("particle_mass")],
+          particle_type=True, convert_function=_convertParticleMassMsun, take_log=True, units=r"\rm{M_{\odot}}")
+          
+add_field("Dark_Matter_Density", function=lambda a, b: b["particle_mass_density"], take_log=True,
+          units=r"\rm{g}} / \rm{cm}^3",particle_type=True,
+          projected_units =r"\rm{g}} / \rm{cm}^2")
+
+
+# Energy Density
+# @todo: ``energy_density``
+add_field("total_energy", function=lambda a, b: None, take_log=True,
+          validators=[ValidateDataField("total_energy")],
+          units=r"\rm{M_{\odot}} (\rm{km} / \rm{s})^2")
+
+# Momentum in each dimension.
+# @todo: ``momentum_x``
+add_field("x-momentum", function=lambda a, b: None, take_log=False,
+          validators=[ValidateDataField("x-momentum")],
+          units=r"\rm{M_{\odot}} \rm{km} / \rm{s}")
+add_field("y-momentum", function=lambda a, b: None, take_log=False,
+          validators=[ValidateDataField("y-momentum")],
+          units=r"\rm{M_{\odot}} \rm{km} / \rm{s}")
+add_field("z-momentum", function=lambda a, b: None, take_log=False,
+          validators=[ValidateDataField("z-momentum")],
+          units=r"\rm{M_{\odot}} \rm{km} / \rm{s}")
+
+### Now derived fields
+
+# Velocity fields in each dimension
+# @todo: ``velocity_x``
+def _x_velocity(field, data):
+    """ Generate x-velocity from x-momentum and density. """
+    return data["x-momentum"] / data["density"]
+add_field("x-velocity", function=_x_velocity, take_log=False,
+          units=r"\rm{km} / \rm{s}")
+
+def _y_velocity(field, data):
+    """ Generate y-velocity from y-momentum and density. """
+    return data["y-momentum"] / data["density"]
+add_field("y-velocity", function=_y_velocity, take_log=False,
+          units=r"\rm{km} / \rm{s}")
+
+def _z_velocity(field, data):
+    """ Generate z-velocity from z-momentum and density. """
+    return data["z-momentum"] / data["density"]
+add_field("z-velocity", function=_z_velocity, take_log=False,
+          units=r"\rm{km} / \rm{s}")
+
+# The gas **thermal** energy.
+# @todo: should be called ``gas_energy`` whether it is data or derived
+def _thermal_energy(field, data):
+    """
+    Generate thermal (gas energy). Dual Energy Formalism was implemented by
+    Stella, but this isn't how it's called, so I'll leave that commented out for
+    now.
+
+    """
+    #if data.pf["DualEnergyFormalism"]:
+    #    return data["Gas_Energy"]
+    #else:
+    return data["Total_Energy"] - 0.5 * data["density"] * (
+                                          data["x-velocity"]**2.0
+                                        + data["y-velocity"]**2.0
+                                        + data["z-velocity"]**2.0 )
+add_field("ThermalEnergy", function=_thermal_energy,
+          units=r"\rm{M_{\odot}} (\rm{km} / \rm{s})^2")
+
+# Gas pressure
+# @todo: eventually figure out a way to detect when using radiation and change
+#        this.
+def _pressure(field, data):
+    """
+    Computed using
+
+    $$ pressure = (\gamma - 1.0) * e$$
+
+    where e is thermal energy density. Note that this will need to be modified
+    when radiation is accounted for.
+
+    """
+    return (data.pf["Gamma"] - 1.0) * data["ThermalEnergy"]
+add_field("Pressure", function=_pressure,
+          units=r"\rm{M_{\odot}} (\rm{km} / \rm{s})^2 / \rm{Mpc}^3")
+
+# Gas temperature
+def _temperature(field, data):
+    return ((data.pf["Gamma"] - 1.0) * data.pf["mu"] * mh *
+            data["ThermalEnergy"] / (kboltz * data["Density"]))
+add_field("Temperature", function=_temperature, take_log=False,
+          units=r"\rm{Kelvin}")
+


--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/yt/frontends/nyx/io.py	Thu Aug 11 13:36:43 2011 -0600
@@ -0,0 +1,141 @@
+"""
+Nyx data-file handling functions (basically a boxlib reader)
+
+Author: Casey W. Stark <caseywstark at gmail.com>
+Affiliation: UC Berkeley
+Author: Matthew Turk <matthewturk at gmail.com>
+Author: J. S. Oishi <jsoishi at gmail.com>
+Affiliation: KIPAC/SLAC/Stanford
+Homepage: http://yt.enzotools.org/
+License:
+  Copyright (C) 2011 Casey W. Stark, Matthew Turk.  All Rights Reserved.
+
+  This file is part of yt.
+
+  yt is free software; you can redistribute it and/or modify
+  it under the terms of the GNU General Public License as published by
+  the Free Software Foundation; either version 3 of the License, or
+  (at your option) any later version.
+
+  This program is distributed in the hope that it will be useful,
+  but WITHOUT ANY WARRANTY; without even the implied warranty of
+  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+  GNU General Public License for more details.
+
+  You should have received a copy of the GNU General Public License
+  along with this program.  If not, see <http://www.gnu.org/licenses/>.
+"""
+
+import os
+import numpy as na
+from yt.utilities.amr_utils import read_castro_particles
+from yt.utilities.io_handler import BaseIOHandler
+
+from definitions import fab_header_pattern, nyx_particle_field_names, \
+                        yt_to_nyx_fields_dict
+
+class IOHandlerNative(BaseIOHandler):
+    """ File handler that can somehow read the native boxlib format. """
+
+    _data_style = "nyx_native"
+
+    def modify(self, field):
+        return field.swapaxes(0, 2)
+
+    def _read_particle_field(self, grid, field):
+        offset = grid._particle_offset
+        filen = os.path.expanduser(grid.particle_filename)
+        off = grid._particle_offset
+        tr = na.zeros(grid.NumberOfParticles, dtype='float64')
+        read_castro_particles(filen, off,
+                              nyx_particle_field_names.index(field),
+                              len(nyx_particle_field_names), tr)
+        return tr
+
+    def _read_data_set(self, grid, field):
+        """ reads packed multiFABs output by BoxLib in "NATIVE" format. """
+        if field in nyx_particle_field_names:
+            return self._read_particle_field(grid, field)
+        filen = os.path.expanduser(grid.filename[field])
+        off = grid._offset[field]
+        inFile = open(filen, 'rb')
+        inFile.seek(off)
+        header = inFile.readline()
+        header.strip()
+
+        """
+        if grid._paranoid:
+            mylog.warn("Castro Native reader: Paranoid read mode.")
+            header_re = re.compile(fab_header_pattern)
+            bytesPerReal, endian, start, stop, centerType, nComponents = \
+                headerRe.search(header).groups()
+
+            # we will build up a dtype string, starting with endian.
+            # @todo: this code is ugly.
+            bytesPerReal = int(bytesPerReal)
+            if bytesPerReal == int(endian[0]):
+                dtype = '<'
+            elif bytesPerReal == int(endian[-1]):
+                dtype = '>'
+            else:
+                raise ValueError("FAB header is neither big nor little endian. Perhaps the file is corrupt?")
+
+            dtype += ('f%i' % bytesPerReal)  # always a floating point
+
+            # determine size of FAB
+            start = na.array(map(int, start.split(',')))
+            stop = na.array(map(int, stop.split(',')))
+
+            gridSize = stop - start + 1
+
+            error_count = 0
+            if (start != grid.start).any():
+                print "Paranoia Error: Cell_H and %s do not agree on grid start." % grid.filename
+                error_count += 1
+            if (stop != grid.stop).any():
+                print "Paranoia Error: Cell_H and %s do not agree on grid stop." % grid.filename
+                error_count += 1
+            if (gridSize != grid.ActiveDimensions).any():
+                print "Paranoia Error: Cell_H and %s do not agree on grid dimensions." % grid.filename
+                error_count += 1
+            if bytesPerReal != grid.hierarchy._bytesPerReal:
+                print "Paranoia Error: Cell_H and %s do not agree on bytes per real number." % grid.filename
+                error_count += 1
+            if (bytesPerReal == grid.hierarchy._bytesPerReal and dtype != grid.hierarchy._dtype):
+                print "Paranoia Error: Cell_H and %s do not agree on endianness." % grid.filename
+                error_count += 1
+
+            if error_count > 0:
+                raise RunTimeError("Paranoia unveiled %i differences between Cell_H and %s." % (error_count, grid.filename))
+        else:
+        """
+        start = grid.start_index
+        stop = grid.stop_index
+        dtype = grid.hierarchy._dtype
+        bytesPerReal = grid.hierarchy._bytesPerReal
+
+        nElements = grid.ActiveDimensions.prod()
+
+        # one field has nElements * bytesPerReal bytes and is located
+        # nElements * bytesPerReal * field_index from the offset location
+        if yt_to_nyx_fields_dict.has_key(field):
+            fieldname = yt_to_nyx_fields_dict[field]
+        else:
+            fieldname = field
+        field_index = grid.field_indexes[fieldname]
+        inFile.seek(int(nElements*bytesPerReal*field_index),1)
+        field = na.fromfile(inFile, count=nElements, dtype=dtype)
+        field = field.reshape(grid.ActiveDimensions[::-1]).swapaxes(0,2)
+
+        # @todo: we can/should also check against the max and min in the header
+        # file
+
+        inFile.close()
+        return field
+
+    def _read_data_slice(self, grid, field, axis, coord):
+        # wishful thinking?
+        sl = [slice(None), slice(None), slice(None)]
+        sl[axis] = slice(coord, coord + 1)
+        #sl = tuple(reversed(sl))
+        return self._read_data_set(grid, field)[sl]


--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/yt/frontends/nyx/setup.py	Thu Aug 11 13:36:43 2011 -0600
@@ -0,0 +1,12 @@
+#!/usr/bin/env python
+import setuptools
+import os, sys, os.path
+
+import os.path
+
+def configuration(parent_package='', top_path=None):
+    from numpy.distutils.misc_util import Configuration
+    config = Configuration('nyx', parent_package, top_path)
+    config.make_config_py()  # installs __config__.py
+    config.make_svn_version_py()
+    return config


--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/yt/frontends/nyx/utils.py	Thu Aug 11 13:36:43 2011 -0600
@@ -0,0 +1,36 @@
+"""
+Utilities for dealing with Nyx data
+
+Author: Casey W. Stark <caseywstark at gmail.com>
+Affiliation: UC Berkeley
+Homepage: http://yt.enzotools.org/
+License:
+  Copyright (C) 2011 Casey W. Stark, Matthew Turk.  All Rights Reserved.
+
+  This file is part of yt.
+
+  yt is free software; you can redistribute it and/or modify
+  it under the terms of the GNU General Public License as published by
+  the Free Software Foundation; either version 3 of the License, or
+  (at your option) any later version.
+
+  This program is distributed in the hope that it will be useful,
+  but WITHOUT ANY WARRANTY; without even the implied warranty of
+  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+  GNU General Public License for more details.
+
+  You should have received a copy of the GNU General Public License
+  along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+"""
+
+def boxlib_bool_to_int(v):
+    try:
+        return int(v)
+    except ValueError:
+        pass
+    v = v.upper().strip()
+    if v[0] == 'T':
+        return 1
+    elif v[0] == 'F':
+        return 0
\ No newline at end of file


--- a/yt/frontends/orion/data_structures.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/frontends/orion/data_structures.py	Thu Aug 11 13:36:43 2011 -0600
@@ -476,8 +476,9 @@
         pfn = os.path.join(pfname)
         if not os.path.exists(pfn): return False
         castro = any(("castro." in line for line in open(pfn)))
+        nyx = any(("nyx." in line for line in open(pfn)))
         maestro = os.path.exists(os.path.join(pname, "job_info"))
-        orion = (not castro) and (not maestro)
+        orion = (not castro) and (not maestro) and (not nyx)
         return orion
         
     def _parse_parameter_file(self):


--- a/yt/frontends/orion/setup.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/frontends/orion/setup.py	Thu Aug 11 13:36:43 2011 -0600
@@ -8,5 +8,5 @@
     from numpy.distutils.misc_util import Configuration
     config = Configuration('orion',parent_package,top_path)
     config.make_config_py() # installs __config__.py
-    config.make_svn_version_py()
+    #config.make_svn_version_py()
     return config


--- a/yt/frontends/ramses/setup.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/frontends/ramses/setup.py	Thu Aug 11 13:36:43 2011 -0600
@@ -13,5 +13,5 @@
         depends=glob.glob("yt/frontends/ramses/ramses_headers/*.hh")
         )
     config.make_config_py() # installs __config__.py
-    config.make_svn_version_py()
+    #config.make_svn_version_py()
     return config


--- a/yt/frontends/setup.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/frontends/setup.py	Thu Aug 11 13:36:43 2011 -0600
@@ -5,10 +5,11 @@
     from numpy.distutils.misc_util import Configuration
     config = Configuration('frontends',parent_package,top_path)
     config.make_config_py() # installs __config__.py
-    config.make_svn_version_py()
+    #config.make_svn_version_py()
     config.add_subpackage("chombo")
     config.add_subpackage("enzo")
     config.add_subpackage("flash")
+    config.add_subpackage("nyx")
     config.add_subpackage("orion")
     config.add_subpackage("ramses")
     config.add_subpackage("tiger")


--- a/yt/frontends/stream/setup.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/frontends/stream/setup.py	Thu Aug 11 13:36:43 2011 -0600
@@ -8,5 +8,5 @@
     from numpy.distutils.misc_util import Configuration
     config = Configuration('stream',parent_package,top_path)
     config.make_config_py() # installs __config__.py
-    config.make_svn_version_py()
+    #config.make_svn_version_py()
     return config


--- a/yt/frontends/tiger/setup.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/frontends/tiger/setup.py	Thu Aug 11 13:36:43 2011 -0600
@@ -8,5 +8,5 @@
     from numpy.distutils.misc_util import Configuration
     config = Configuration('tiger',parent_package,top_path)
     config.make_config_py() # installs __config__.py
-    config.make_svn_version_py()
+    #config.make_svn_version_py()
     return config


--- a/yt/gui/opengl_widgets/setup.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/gui/opengl_widgets/setup.py	Thu Aug 11 13:36:43 2011 -0600
@@ -8,5 +8,5 @@
     from numpy.distutils.misc_util import Configuration
     config = Configuration('opengl_widgets',parent_package,top_path)
     config.make_config_py() # installs __config__.py
-    config.make_svn_version_py()
+    #config.make_svn_version_py()
     return config


--- a/yt/gui/reason/bottle_mods.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/gui/reason/bottle_mods.py	Thu Aug 11 13:36:43 2011 -0600
@@ -38,9 +38,6 @@
 route_watchers = []
 payloads = []
 
-orig_stdout = sys.stdout
-orig_stderr = sys.stderr
-
 def preroute(future_route, *args, **kwargs):
     def router(func):
         route_functions[future_route] = (args, kwargs, func)
@@ -153,8 +150,7 @@
             continue
             w._route_prefix = token
     repl.activate()
-    while not repl.execution_thread.queue.empty():
-        time.sleep(1)
+    repl.execution_thread.wait()
     print
     print
     print "============================================================================="


--- a/yt/gui/reason/extdirect_repl.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/gui/reason/extdirect_repl.py	Thu Aug 11 13:36:43 2011 -0600
@@ -108,8 +108,8 @@
         while 1:
             #print "Checking for a queue ..."
             try:
-                task = self.queue.get(True, 10)
-            except (Queue.Full, Queue.Empty):
+                task = self.queue.get(True, 1)
+            except Queue.Empty:
                 if self.repl.stopped: return
                 continue
             #print "Received the task", task
@@ -122,9 +122,12 @@
                 new_code = self.repl._add_widget(
                     task['name'], task['widget_data_name'])
                 #print "Got this command:", new_code
-                self.repl.execute(new_code, hide=True)
+                self.execute_one(new_code, hide=True)
                 #print "Executed!"
 
+    def wait(self):
+        self.queue.join()
+
     def execute_one(self, code, hide):
         self.repl.executed_cell_texts.append(code)
 
@@ -135,13 +138,13 @@
             print "====================                ===================="
             print result
             print "========================================================"
-        if hide: return
-        self.repl.payload_handler.add_payload(
-            {'type': 'cell_results',
-             'output': result,
-             'input': highlighter(code),
-             'raw_input': code},
-            )
+        if not hide:
+            self.repl.payload_handler.add_payload(
+                {'type': 'cell_results',
+                 'output': result,
+                 'input': highlighter(code),
+                 'raw_input': code},
+                )
 
 def deliver_image(im):
     if hasattr(im, 'read'):


--- a/yt/gui/reason/html/js/reason.js	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/gui/reason/html/js/reason.js	Thu Aug 11 13:36:43 2011 -0600
@@ -55,7 +55,9 @@
 }
 
 var repl_input = new Ext.FormPanel({
+    title: 'YT Input',
     url: 'push',
+    flex: 0.2,
     layout: 'fit',
     padding: 5,
     height: '100%',
@@ -133,7 +135,7 @@
     title: 'YT Output',
     id: 'output_container',
     autoScroll: true,
-    flex: 0.7,
+    flex: 0.8,
     items: []
 });
 


--- a/yt/gui/reason/pannable_map.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/gui/reason/pannable_map.py	Thu Aug 11 13:36:43 2011 -0600
@@ -79,9 +79,12 @@
                                     self.pf.domain_right_edge[1],
                                     dd*DW[0] / (64*256),
                                     dd*DW[0])
-        cmi = na.log10(cmi)
-        cma = na.log10(cma)
-        to_plot = apply_colormap(na.log10(frb[self.field]), color_bounds = (cmi, cma))
+        if self.pf.field_info[self.field].take_log:
+            cmi = na.log10(cmi)
+            cma = na.log10(cma)
+            to_plot = apply_colormap(na.log10(frb[self.field]), color_bounds = (cmi, cma))
+        else:
+            to_plot = apply_colormap(frb[self.field], color_bounds = (cmi, cma))
         rv = write_png_to_string(to_plot)
         return rv
 


--- a/yt/gui/reason/setup.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/gui/reason/setup.py	Thu Aug 11 13:36:43 2011 -0600
@@ -6,5 +6,5 @@
     from numpy.distutils.misc_util import Configuration
     config = Configuration('reason',parent_package,top_path)
     config.make_config_py() # installs __config__.py
-    config.make_svn_version_py()
+    #config.make_svn_version_py()
     return config


--- a/yt/gui/setup.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/gui/setup.py	Thu Aug 11 13:36:43 2011 -0600
@@ -9,5 +9,5 @@
     config.add_subpackage('traited_explorer')
     config.add_subpackage('reason')
     config.make_config_py() # installs __config__.py
-    config.make_svn_version_py()
+    #config.make_svn_version_py()
     return config


--- a/yt/gui/traited_explorer/setup.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/gui/traited_explorer/setup.py	Thu Aug 11 13:36:43 2011 -0600
@@ -6,5 +6,5 @@
     from numpy.distutils.misc_util import Configuration
     config = Configuration('traited_explorer',parent_package,top_path)
     config.make_config_py() # installs __config__.py
-    config.make_svn_version_py()
+    #config.make_svn_version_py()
     return config


--- a/yt/mods.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/mods.py	Thu Aug 11 13:36:43 2011 -0600
@@ -14,12 +14,12 @@
   it under the terms of the GNU General Public License as published by
   the Free Software Foundation; either version 3 of the License, or
   (at your option) any later version.
-  
+
   This program is distributed in the hope that it will be useful,
   but WITHOUT ANY WARRANTY; without even the implied warranty of
   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
   GNU General Public License for more details.
-  
+
   You should have received a copy of the GNU General Public License
   along with this program.  If not, see <http://www.gnu.org/licenses/>.
 """
@@ -60,6 +60,9 @@
 from yt.frontends.castro.api import \
     CastroStaticOutput, CastroFieldInfo, add_castro_field
 
+from yt.frontends.nyx.api import \
+    NyxStaticOutput, nyx_fields, add_nyx_field
+
 from yt.frontends.orion.api import \
     OrionStaticOutput, OrionFieldInfo, add_orion_field
 


--- a/yt/setup.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/setup.py	Thu Aug 11 13:36:43 2011 -0600
@@ -13,5 +13,5 @@
     config.add_subpackage('utilities')
     config.add_subpackage('visualization')
     config.make_config_py()
-    config.make_svn_version_py()
+    #config.make_svn_version_py()
     return config


--- a/yt/utilities/_amr_utils/misc_utilities.pyx	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/utilities/_amr_utils/misc_utilities.pyx	Thu Aug 11 13:36:43 2011 -0600
@@ -73,3 +73,38 @@
                 break
         if inside == 1: mask[i] = 1
         else: mask[i] = 0
+
+ at cython.boundscheck(False)
+ at cython.wraparound(False)
+ at cython.cdivision(True)
+def find_values_at_point(np.ndarray[np.float64_t, ndim=1] point,
+                         np.ndarray[np.float64_t, ndim=2] left_edges,
+                         np.ndarray[np.float64_t, ndim=2] right_edges,
+                         np.ndarray[np.int32_t, ndim=2] dimensions,
+                         field_names, grid_objects):
+    # This iterates in order, first to last, and then returns with the first
+    # one in which the point is located; this means if you order from highest
+    # level to lowest, you will find the correct grid without consulting child
+    # masking.  Note also that we will do a few relatively slow operations on
+    # strings and whatnot, but they should not be terribly slow.
+    cdef int ind[3], gi, fi
+    cdef int nf = len(field_names)
+    cdef np.float64_t dds
+    cdef np.ndarray[np.float64_t, ndim=3] field
+    cdef np.ndarray[np.float64_t, ndim=1] rv = np.zeros(nf, dtype='float64')
+    for gi in range(left_edges.shape[0]):
+        if not ((left_edges[gi,0] < point[0] < right_edges[gi,0])
+            and (left_edges[gi,1] < point[1] < right_edges[gi,1])
+            and (left_edges[gi,2] < point[2] < right_edges[gi,2])):
+            continue
+        # We found our grid!
+        for fi in range(3):
+            dds = ((right_edges[gi,fi] - left_edges[gi,fi])/
+                   (<np.float64_t> dimensions[gi,fi]))
+            ind[fi] = <int> ((point[fi] - left_edges[gi,fi])/dds)
+        grid = grid_objects[gi]
+        for fi in range(nf):
+            field = grid[field_names[fi]]
+            rv[fi] = field[ind[0], ind[1], ind[2]]
+        return rv
+    raise KeyError


--- a/yt/utilities/amr_kdtree/amr_kdtree.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/utilities/amr_kdtree/amr_kdtree.py	Thu Aug 11 13:36:43 2011 -0600
@@ -813,8 +813,8 @@
         current_node.r_corner = r_corner
         # current_node.owner = my_rank
         current_node.id = 0
-        par_tree_depth = long(na.log2(nprocs))
-
+        par_tree_depth = int(na.log2(nprocs))
+        anprocs = 2**par_tree_depth
         while current_node is not None:
             # If we don't have any grids, that means we are revisiting
             # a dividing node, and there is nothing to be done.
@@ -825,8 +825,8 @@
 
             # This is where all the domain decomposition occurs.  
             if ((current_node.id + 1)>>par_tree_depth) == 1:
-                # There are nprocs nodes that meet this criteria
-                if (current_node.id+1-nprocs) is my_rank:
+                # There are anprocs nodes that meet this criteria
+                if (current_node.id+1-anprocs) is my_rank:
                     # I own this shared node
                     self.my_l_corner = current_node.l_corner
                     self.my_r_corner = current_node.r_corner
@@ -973,11 +973,11 @@
         if image is not None:
             self.image = image
         rounds = int(na.log2(nprocs))
-
+        anprocs = 2**rounds
         my_node = tree
         my_node_id = 0
         my_node.owner = 0
-        path = na.binary_repr(nprocs+my_rank)
+        path = na.binary_repr(anprocs+my_rank)
         for i in range(rounds):
             my_node.left_child.owner = my_node.owner
             my_node.right_child.owner = my_node.owner + 2**(rounds-(i+1))


--- a/yt/utilities/answer_testing/setup.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/utilities/answer_testing/setup.py	Thu Aug 11 13:36:43 2011 -0600
@@ -8,5 +8,5 @@
     from numpy.distutils.misc_util import Configuration
     config = Configuration('answer_testing',parent_package,top_path)
     config.make_config_py() # installs __config__.py
-    config.make_svn_version_py()
+    #config.make_svn_version_py()
     return config


--- a/yt/utilities/command_line.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/utilities/command_line.py	Thu Aug 11 13:36:43 2011 -0600
@@ -85,7 +85,7 @@
                    help="Width in specified units"),
     unit    = dict(short="-u", long="--unit",
                    action="store", type="string",
-                   dest="unit", default='1',
+                   dest="unit", default='unitary',
                    help="Desired units"),
     center  = dict(short="-c", long="--center",
                    action="store", type="float",
@@ -529,7 +529,13 @@
         import yt.utilities.bottle as bottle
         bottle.debug(True)
         if opts.host is not None:
-            bottle.run(server='rocket', host=opts.host)
+            colonpl = opts.host.find(":")
+            if colonpl >= 0:
+                port = int(opts.host.split(":")[-1])
+                opts.host = opts.host[:colonpl]
+            else:
+                port = 8080
+            bottle.run(server='rocket', host=opts.host, port=port)
         else:
             bottle.run(server='rocket')
 


--- a/yt/utilities/data_point_utilities.c	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/utilities/data_point_utilities.c	Thu Aug 11 13:36:43 2011 -0600
@@ -1,5 +1,5 @@
 /************************************************************************
-* Copyright (C) 2007-2011 Matthew Turk.  All Rights Reserved.
+* Copyright (C) 2007-2009 Matthew Turk.  All Rights Reserved.
 *
 * This file is part of yt.
 *
@@ -880,7 +880,7 @@
     npy_int64 gxs, gys, gzs, gxe, gye, gze;
     npy_int64 cxs, cys, czs, cxe, cye, cze;
     npy_int64 ixs, iys, izs, ixe, iye, ize;
-    int gxi, gyi, gzi, cxi, cyi, czi;
+    npy_int64 gxi, gyi, gzi, cxi, cyi, czi;
     npy_int64 cdx, cdy, cdz;
     npy_int64 dw[3];
     int i;
@@ -1014,17 +1014,17 @@
         ci = (cxi % dw[0]);
         ci = (ci < 0) ? ci + dw[0] : ci;
         if ( ci < gxs*refratio || ci >= gxe*refratio) continue;
-        gxi = ((int) (ci / refratio)) - gxs;
+        gxi = floor(ci / refratio) - gxs;
         for(cyi=cys;cyi<=cye;cyi++) {
             cj = cyi % dw[1];
             cj = (cj < 0) ? cj + dw[1] : cj;
             if ( cj < gys*refratio || cj >= gye*refratio) continue;
-            gyi = ((int) (cj / refratio)) - gys;
+            gyi = floor(cj / refratio) - gys;
             for(czi=czs;czi<=cze;czi++) {
                 ck = czi % dw[2];
                 ck = (ck < 0) ? ck + dw[2] : ck;
                 if ( ck < gzs*refratio || ck >= gze*refratio) continue;
-                gzi = ((int) (ck / refratio)) - gzs;
+                gzi = floor(ck / refratio) - gzs;
                     if ((ll) || (*(npy_int32*)PyArray_GETPTR3(mask, gxi,gyi,gzi) > 0)) 
                 {
                 for(n=0;n<n_fields;n++){
@@ -1214,75 +1214,43 @@
     cye = (cys + cdy - 1);
     cze = (czs + cdz - 1);
 
+    /* It turns out that C89 doesn't define a mechanism for choosing the sign
+       of the remainder.
+    */
     int x_loc, y_loc; // For access into the buffer
-
-    /* We check here if the domain is important or not.
-       If it's not, then, well, we get to use the fast version. */
-    if (dw[0] == dw[1] == dw[2] == 0) {
-      for(gxi=gxs,cxi=gxs*refratio;gxi<gxe;gxi++,cxi+=refratio) {
-        for(gyi=gys,cyi=gys*refratio;gyi<gye;gyi++,cyi+=refratio) {
-          for(gzi=gzs,czi=gzs*refratio;gzi<gze;gzi++,czi+=refratio) {
-            if ((refratio!=1) &&
-                (*(npy_int32*)PyArray_GETPTR3(mask, gxi,gyi,gzi)==0)) continue;
-            switch (axis) {
-              case 0: x_loc = cyi-cys; y_loc = czi-czs; break;
-              case 1: x_loc = cxi-cxs; y_loc = czi-czs; break;
-              case 2: x_loc = cxi-cys; y_loc = cyi-cys; break;
-            }
-            //fprintf(stderr, "%d %d %d %d %d\n", x_loc, y_loc, gxi, gyi, gzi);
-            for(ri=0;ri<refratio;ri++){
-              for(rj=0;rj<refratio;rj++){
-                for(n=0;n<n_fields;n++){
-                  for(n=0;n<n_fields;n++){
-                    *(npy_float64*) PyArray_GETPTR2(c_data[n], x_loc+ri, y_loc+rj)
-                      +=  *(npy_float64*) PyArray_GETPTR3(g_data[n],
-                          gxi-gxs, gyi-gys, gzi-gzs) * dls[n];
-                  }
-                }
-              }
-            }
-            total+=1;
-          }
-        }
-      }
-    } else {
-      /* Gotta go the slow route. */
-      for(cxi=gxs*refratio;cxi<=cxe;cxi++) {
-        /* It turns out that C89 doesn't define a mechanism for choosing the sign
-           of the remainder.
-         */
+    for(cxi=cxs;cxi<=cxe;cxi++) {
         ci = (cxi % dw[0]);
         ci = (ci < 0) ? ci + dw[0] : ci;
-        if ( ci >= gxe*refratio) break;
+        if ( ci < gxs*refratio || ci >= gxe*refratio) continue;
         gxi = floor(ci / refratio) - gxs;
-        for(cyi=gys*refratio;cyi<=cye;cyi++) {
-          cj = cyi % dw[1];
-          cj = (cj < 0) ? cj + dw[1] : cj;
-          if ( cj >= gye*refratio) break;
-          gyi = floor(cj / refratio) - gys;
-          for(czi=gzs*refratio;czi<=cze;czi++) {
-            ck = czi % dw[2];
-            ck = (ck < 0) ? ck + dw[2] : ck;
-            if ( ck >= gze*refratio) break;
-            gzi = floor(ck / refratio) - gzs;
-            if (refratio == 1 || *(npy_int32*)PyArray_GETPTR3(mask, gxi,gyi,gzi) > 0)
-            {
-              switch (axis) {
-                case 0: x_loc = cyi-cys; y_loc = czi-czs; break;
-                case 1: x_loc = cxi-cxs; y_loc = czi-czs; break;
-                case 2: x_loc = cxi-cys; y_loc = cyi-cys; break;
-              }
-              for(n=0;n<n_fields;n++){
-                *(npy_float64*) PyArray_GETPTR2(c_data[n], x_loc, y_loc)
-                  +=  *(npy_float64*) PyArray_GETPTR3(g_data[n], gxi, gyi, gzi) 
-                  * dls[n] / refratio;
-              }
-              total += 1;
+        for(cyi=cys;cyi<=cye;cyi++) {
+            cj = cyi % dw[1];
+            cj = (cj < 0) ? cj + dw[1] : cj;
+            if ( cj < gys*refratio || cj >= gye*refratio) continue;
+            gyi = floor(cj / refratio) - gys;
+            for(czi=czs;czi<=cze;czi++) {
+                ck = czi % dw[2];
+                ck = (ck < 0) ? ck + dw[2] : ck;
+                if ( ck < gzs*refratio || ck >= gze*refratio) continue;
+                gzi = floor(ck / refratio) - gzs;
+                    if (refratio == 1 || *(npy_int32*)PyArray_GETPTR3(mask, gxi,gyi,gzi) > 0)
+                {
+                switch (axis) {
+                  case 0: x_loc = cyi-cys; y_loc = czi-czs; break;
+                  case 1: x_loc = cxi-cxs; y_loc = czi-czs; break;
+                  case 2: x_loc = cxi-cys; y_loc = cyi-cys; break;
+                }
+                for(n=0;n<n_fields;n++){
+                    *(npy_float64*) PyArray_GETPTR2(c_data[n], x_loc, y_loc)
+                    +=  *(npy_float64*) PyArray_GETPTR3(g_data[n], gxi, gyi, gzi) 
+                        * dls[n] / refratio;
+                }
+                total += 1;
+                }
             }
-          }
         }
-      }
     }
+
     Py_DECREF(g_start);
     Py_DECREF(c_start);
     Py_DECREF(g_dims);


--- a/yt/utilities/parallel_tools/parallel_analysis_interface.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/utilities/parallel_tools/parallel_analysis_interface.py	Thu Aug 11 13:36:43 2011 -0600
@@ -1067,7 +1067,7 @@
                 ncols, size = data.shape
         ncols = MPI.COMM_WORLD.allreduce(ncols, op=MPI.MAX)
         if size == 0:
-            data = na.empty((ncols,0), dtype='float64') # This only works for
+            data = na.zeros((ncols,0), dtype='float64') # This only works for
         size = data.shape[-1]
         sizes = na.zeros(MPI.COMM_WORLD.size, dtype='int64')
         outsize = na.array(size, dtype='int64')


--- a/yt/utilities/parameter_file_storage.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/utilities/parameter_file_storage.py	Thu Aug 11 13:36:43 2011 -0600
@@ -33,7 +33,7 @@
     parallel_simple_proxy
 
 output_type_registry = {}
-_field_names = ('hash','bn','fp','tt','ctid','class_name','last_seen')
+_field_names = ('hash', 'bn', 'fp', 'tt', 'ctid', 'class_name', 'last_seen')
 
 class NoParameterShelf(Exception):
     pass
@@ -49,6 +49,14 @@
         return "%s" % self.name
 
 class ParameterFileStore(object):
+    """
+    This class is designed to be a semi-persistent storage for parameter
+    files.  By identifying each parameter file with a unique hash, objects
+    can be stored independently of parameter files -- when an object is
+    loaded, the parameter file is as well, based on the hash.  For
+    storage concerns, only a few hundred will be retained in cache.
+
+    """
 
     _shared_state = {}
     _distributed = True
@@ -60,13 +68,11 @@
         self.__dict__ = cls._shared_state
         return self
 
-    def __init__(self, in_memory = False):
+    def __init__(self, in_memory=False):
         """
-        This class is designed to be a semi-persistent storage for parameter
-        files.  By identifying each parameter file with a unique hash, objects
-        can be stored independently of parameter files -- when an object is
-        loaded, the parameter file is as well, based on the hash.  For
-        storage concerns, only a few hundred will be retained in cache.
+        Create the parameter file database if yt is configured to store them.
+        Otherwise, use read-only settings.
+
         """
         if ytcfg.getboolean("yt", "StoreParameterFiles"):
             self._read_only = False
@@ -92,29 +98,23 @@
         # these will be broadcast
 
     def _get_db_name(self):
-        base_file_name = ytcfg.get("yt","ParameterFileStore")
+        base_file_name = ytcfg.get("yt", "ParameterFileStore")
         if not os.access(os.path.expanduser("~/"), os.W_OK):
             return os.path.abspath(base_file_name)
         return os.path.expanduser("~/.yt/%s" % base_file_name)
 
     def get_pf_hash(self, hash):
-        """
-        This returns a parameter file based on a hash.
-        """
+        """ This returns a parameter file based on a hash. """
         return self._convert_pf(self._records[hash])
 
     def get_pf_ctid(self, ctid):
-        """
-        This returns a parameter file based on a CurrentTimeIdentifier.
-        """
+        """ This returns a parameter file based on a CurrentTimeIdentifier. """
         for h in self._records:
             if self._records[h]['ctid'] == ctid:
                 return self._convert_pf(self._records[h])
 
     def _adapt_pf(self, pf):
-        """
-        This turns a parameter file into a CSV entry
-        """
+        """ This turns a parameter file into a CSV entry. """
         return dict(bn=pf.basename,
                     fp=pf.fullpath,
                     tt=pf.current_time,
@@ -123,9 +123,7 @@
                     last_seen=pf._instantiated)
 
     def _convert_pf(self, pf_dict):
-        """
-        This turns a CSV entry into a parameter file 
-        """
+        """ This turns a CSV entry into a parameter file. """
         bn = pf_dict['bn']
         fp = pf_dict['fp']
         fn = os.path.join(fp, bn)
@@ -145,7 +143,7 @@
     def check_pf(self, pf):
         """
         This will ensure that the parameter file (*pf*) handed to it is
-        recorded in the storage unit.  In doing so, it will update path 
+        recorded in the storage unit.  In doing so, it will update path
         and "last_seen" information.
         """
         hash = pf._hash()
@@ -160,9 +158,7 @@
             self.insert_pf(pf)
 
     def insert_pf(self, pf):
-        """
-        This will insert a new *pf* and flush the database to disk.
-        """
+        """ This will insert a new *pf* and flush the database to disk. """
         self._records[pf._hash()] = self._adapt_pf(pf)
         self.flush_db()
 
@@ -176,16 +172,13 @@
         self.flush_db()
 
     def flush_db(self):
-        """
-        This flushes the storage to disk.
-        """
+        """ This flushes the storage to disk. """
         if self._read_only: return
         self._write_out()
         self.read_db()
 
     def get_recent(self, n=10):
-        recs = sorted(self._records.values(),
-                      key = lambda a: -a['last_seen'])[:n]
+        recs = sorted(self._records.values(), key=lambda a: -a['last_seen'])[:n]
         return recs
 
     @parallel_simple_proxy
@@ -204,10 +197,8 @@
 
     @parallel_simple_proxy
     def read_db(self):
-        """
-        This will read the storage device from disk.
-        """
-        f=open(self._get_db_name(), 'rb')
+        """ This will read the storage device from disk. """
+        f = open(self._get_db_name(), 'rb')
         vals = csv.DictReader(f, _field_names)
         db = {}
         for v in vals:


--- a/yt/utilities/setup.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/utilities/setup.py	Thu Aug 11 13:36:43 2011 -0600
@@ -184,5 +184,5 @@
         define_macros = [("HAVE_XLOCALE_H", True)]
         )
     config.make_config_py() # installs __config__.py
-    config.make_svn_version_py()
+    #config.make_svn_version_py()
     return config


--- a/yt/visualization/fixed_resolution.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/visualization/fixed_resolution.py	Thu Aug 11 13:36:43 2011 -0600
@@ -29,6 +29,7 @@
     y_dict, \
     axis_names
 import _MPL
+import numpy as na
 
 class FixedResolutionBuffer(object):
     def __init__(self, data_source, bounds, buff_size, antialias = True,
@@ -96,15 +97,16 @@
         self.periodic = periodic
 
         # Handle periodicity, just in case
-        DLE = self.pf.domain_left_edge
-        DRE = self.pf.domain_right_edge
-        DD = float(self.periodic)*(DRE - DLE)
-        axis = self.data_source.axis
-        xax = x_dict[axis]
-        yax = y_dict[axis]
-        self._period = (DD[xax], DD[yax])
-        self._edges = ( (DLE[xax], DRE[xax]), (DLE[yax], DRE[yax]) )
-
+        if self.data_source.axis < 3:
+            DLE = self.pf.domain_left_edge
+            DRE = self.pf.domain_right_edge
+            DD = float(self.periodic)*(DRE - DLE)
+            axis = self.data_source.axis
+            xax = x_dict[axis]
+            yax = y_dict[axis]
+            self._period = (DD[xax], DD[yax])
+            self._edges = ( (DLE[xax], DRE[xax]), (DLE[yax], DRE[yax]) )
+        
     def __getitem__(self, item):
         if item in self.data: return self.data[item]
         mylog.info("Making a fixed resolution buffer of %d by %d" % \


--- a/yt/visualization/image_panner/setup.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/visualization/image_panner/setup.py	Thu Aug 11 13:36:43 2011 -0600
@@ -8,5 +8,5 @@
     from numpy.distutils.misc_util import Configuration
     config = Configuration('image_panner',parent_package,top_path)
     config.make_config_py() # installs __config__.py
-    config.make_svn_version_py()
+    #config.make_svn_version_py()
     return config


--- a/yt/visualization/plot_modifications.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/visualization/plot_modifications.py	Thu Aug 11 13:36:43 2011 -0600
@@ -60,13 +60,17 @@
 
 class VelocityCallback(PlotCallback):
     _type_name = "velocity"
-    def __init__(self, factor=16):
+    def __init__(self, factor=16, scale=None, scale_units=None):
         """
         Adds a 'quiver' plot of velocity to the plot, skipping all but
         every *factor* datapoint
+        *scale* is the data units per arrow length unit using *scale_units* 
+        (see matplotlib.axes.Axes.quiver for more info)
         """
         PlotCallback.__init__(self)
         self.factor = factor
+        self.scale  = scale
+        self.scale_units = scale_units
 
     def __call__(self, plot):
         # Instantiation of these is cheap
@@ -77,7 +81,7 @@
         else:
             xv = "%s-velocity" % (x_names[plot.data.axis])
             yv = "%s-velocity" % (y_names[plot.data.axis])
-            qcb = QuiverCallback(xv, yv, self.factor)
+            qcb = QuiverCallback(xv, yv, self.factor, self.scale, self.scale_units)
         return qcb(plot)
 
 class MagFieldCallback(PlotCallback):
@@ -102,16 +106,20 @@
 
 class QuiverCallback(PlotCallback):
     _type_name = "quiver"
-    def __init__(self, field_x, field_y, factor):
+    def __init__(self, field_x, field_y, factor, scale, scale_units):
         """
         Adds a 'quiver' plot to any plot, using the *field_x* and *field_y*
-        from the associated data, skipping every *factor* datapoints.
+        from the associated data, skipping every *factor* datapoints
+        *scale* is the data units per arrow length unit using *scale_units* 
+        (see matplotlib.axes.Axes.quiver for more info)
         """
         PlotCallback.__init__(self)
         self.field_x = field_x
         self.field_y = field_y
         self.bv_x = self.bv_y = 0
         self.factor = factor
+        self.scale = scale
+        self.scale_units = scale_units
 
     def __call__(self, plot):
         x0, x1 = plot.xlim
@@ -137,7 +145,7 @@
                            (x0, x1, y0, y1),).transpose()
         X = na.mgrid[0:plot.image._A.shape[0]-1:nx*1j]# + 0.5*factor
         Y = na.mgrid[0:plot.image._A.shape[1]-1:ny*1j]# + 0.5*factor
-        plot._axes.quiver(X,Y, pixX, pixY)
+        plot._axes.quiver(X,Y, pixX, pixY, scale=self.scale, scale_units=self.scale_units)
         plot._axes.set_xlim(xx0,xx1)
         plot._axes.set_ylim(yy0,yy1)
         plot._axes.hold(False)


--- a/yt/visualization/setup.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/visualization/setup.py	Thu Aug 11 13:36:43 2011 -0600
@@ -7,6 +7,6 @@
     config.add_subpackage("image_panner")
     config.add_subpackage("volume_rendering")
     config.make_config_py() # installs __config__.py
-    config.make_svn_version_py()
+    #config.make_svn_version_py()
     config.add_extension("_MPL", "_MPL.c", libraries=["m"])
     return config


--- a/yt/visualization/volume_rendering/setup.py	Tue Jun 21 23:24:28 2011 -0400
+++ b/yt/visualization/volume_rendering/setup.py	Thu Aug 11 13:36:43 2011 -0600
@@ -10,5 +10,5 @@
     from numpy.distutils.misc_util import Configuration
     config = Configuration('volume_rendering',parent_package,top_path)
     config.make_config_py() # installs __config__.py
-    config.make_svn_version_py()
+    #config.make_svn_version_py()
     return config


http://bitbucket.org/yt_analysis/yt/changeset/b1f1760f477f/
changeset:   b1f1760f477f
branch:      geometry_handling
user:        MatthewTurk
date:        2011-08-14 01:20:44
summary:     Converted the ray grid selections into Cython, opening the branch
'geometry_handling' to manage converting some items into more general geometric
forms.
affected #:  2 files (-1 bytes)

--- a/yt/data_objects/data_containers.py	Thu Aug 11 12:26:38 2011 -0600
+++ b/yt/data_objects/data_containers.py	Sat Aug 13 17:20:44 2011 -0600
@@ -538,12 +538,10 @@
         return (self.px, self.py)
 
     def _get_list_of_grids(self):
-        # This bugs me, but we will give the tie to the LeftEdge
-        y = na.where( (self.px >=  self.pf.hierarchy.grid_left_edge[:,self.px_ax])
-                    & (self.px < self.pf.hierarchy.grid_right_edge[:,self.px_ax])
-                    & (self.py >=  self.pf.hierarchy.grid_left_edge[:,self.py_ax])
-                    & (self.py < self.pf.hierarchy.grid_right_edge[:,self.py_ax]))
-        self._grids = self.hierarchy.grids[y]
+        gi = au.ortho_ray_grids(self, 
+                self.hierarchy.grid_left_edges,
+                self.hierarchy.grid_right_edges)
+        self._grids = self.hierarchy.grids[gi]
 
     def _get_data_from_grid(self, grid, field):
         # We are orthogonal, so we can feel free to make assumptions
@@ -613,31 +611,10 @@
         #self._refresh_data()
 
     def _get_list_of_grids(self):
-        # Get the value of the line at each LeftEdge and RightEdge
-        LE = self.pf.h.grid_left_edge
-        RE = self.pf.h.grid_right_edge
-        p = na.zeros(self.pf.h.num_grids, dtype='bool')
-        # Check left faces first
-        for i in range(3):
-            i1 = (i+1) % 3
-            i2 = (i+2) % 3
-            vs = self._get_line_at_coord(LE[:,i], i)
-            p = p | ( ( (LE[:,i1] <= vs[:,i1]) & (RE[:,i1] >= vs[:,i1]) ) \
-                    & ( (LE[:,i2] <= vs[:,i2]) & (RE[:,i2] >= vs[:,i2]) ) )
-            vs = self._get_line_at_coord(RE[:,i], i)
-            p = p | ( ( (LE[:,i1] <= vs[:,i1]) & (RE[:,i1] >= vs[:,i1]) ) \
-                    & ( (LE[:,i2] <= vs[:,i2]) & (RE[:,i2] >= vs[:,i2]) ) )
-        p = p | ( na.all( LE <= self.start_point, axis=1 ) 
-                & na.all( RE >= self.start_point, axis=1 ) )
-        p = p | ( na.all( LE <= self.end_point,   axis=1 ) 
-                & na.all( RE >= self.end_point,   axis=1 ) )
-        self._grids = self.hierarchy.grids[p]
-
-    def _get_line_at_coord(self, v, index):
-        # t*self.vec + self.start_point = self.end_point
-        t = (v - self.start_point[index])/self.vec[index]
-        t = t.reshape((t.shape[0],1))
-        return self.start_point + t*self.vec
+        gi = au.ray_grids(self,
+                self.hierarchy.grid_left_edges,
+                self.hierarchy.grid_right_edges)
+        self._grids = self.hierarchy.grids[gi]
 
     def _get_data_from_grid(self, grid, field):
         mask = na.logical_and(self._get_cut_mask(grid),


--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/yt/utilities/_amr_utils/geometry_utils.pyx	Sat Aug 13 17:20:44 2011 -0600
@@ -0,0 +1,100 @@
+"""
+Simple integrators for the radiative transfer equation
+
+Author: Matthew Turk <matthewturk at gmail.com>
+Affiliation: Columbia University
+Homepage: http://yt.enzotools.org/
+License:
+  Copyright (C) 2011 Matthew Turk.  All Rights Reserved.
+
+  This file is part of yt.
+
+  yt is free software; you can redistribute it and/or modify
+  it under the terms of the GNU General Public License as published by
+  the Free Software Foundation; either version 3 of the License, or
+  (at your option) any later version.
+
+  This program is distributed in the hope that it will be useful,
+  but WITHOUT ANY WARRANTY; without even the implied warranty of
+  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+  GNU General Public License for more details.
+
+  You should have received a copy of the GNU General Public License
+  along with this program.  If not, see <http://www.gnu.org/licenses/>.
+"""
+
+import numpy as np
+cimport numpy as np
+cimport cython
+from stdlib cimport malloc, free, abs
+
+def ortho_ray_grids(dobj, np.ndarray[np.float64_t, ndim=2] left_edges,
+                          np.ndarray[np.float64_t, ndim=2] right_edges):
+    cdef int i
+    cdef int ng = left_edges.shape[0]
+    cdef int px_ax = dobj.px_ax
+    cdef int py_ax = dobj.py_ax
+    cdef np.float64_t px = dobj.px
+    cdef np.float64_t py = dobj.py
+    cdef np.ndarray[np.int32_t, ndim=1] gridi = np.zeros(ng, dtype='int32_t')
+    for i in range(ng):
+        if (    (px >= left_edges[i, px])
+            and (px < right_edges[i, px])
+            and (py >= left_edges[i, py])
+            and (py < right_edges[i, py])):
+            gridi[i] = 1
+    return gridi
+
+def ray_grids(dobj, np.ndarray[np.float64_t, ndim=2] left_edges,
+                    np.ndarray[np.float64_t, ndim=2] right_edges):
+    cdef int i, ax
+    cdef int i1, i2
+    cdef int ng = left_edges.shape[0]
+    cdef np.ndarray[np.int32_t, ndim=1] gridi = np.zeros(ng, dtype='int32_t')
+    cdef np.float64_t vs[3], t, p0[3], p1[3], v[3]
+    for i in range(3):
+        p0[i] = dobj.start_point[i]
+        p1[i] = dobj.end_point[i]
+        v[i] = dobj.vec[i]
+    # We check first to see if at any point, the ray intersects a grid face
+    for gi in range(ng):
+        for ax in range(3):
+            i1 = (ax+1) % 3
+            i2 = (ax+2) % 3
+            t = (left_edges[gi,ax] - p0[ax])/v[ax]
+            for i in range(3)
+                vs[i] = t * v[i] + p0[i]
+            if left_edges[gi,i1] <= vs[i1] and \
+               right_edges[gi,i1] >= vs[i1] and \
+               left_edges[gi,i2] <= vs[i2] and \
+               right_edges[gi,i2] >= vs[i2]:
+                gridi[gi] = 1
+                break
+            t = (right_edges[gi,ax] - p0[ax])/v[ax]
+            for i in range(3)
+                vs[i] = t * v[i] + p0[i]
+            if left_edges[gi,i1] <= vs[i1] and \
+               right_edges[gi,i1] >= vs[i1] and \
+               left_edges[gi,i2] <= vs[i2] and \
+               right_edges[gi,i2] >= vs[i2]:
+                gridi[gi] = 1
+                break
+        if gridi[gi] = 1: continue
+        # if the point is fully enclosed, we count the grid
+        if left_edges[gi,0] <= p0[0] and \
+           right_edges[gi,0] >= p0[0]:
+           left_edges[gi,1] <= p0[1] and \
+           right_edges[gi,1] >= p0[1]:
+           left_edges[gi,2] <= p0[2] and \
+           right_edges[gi,2] >= p0[2]:
+            gridi[gi] = 1
+            continue
+        if left_edges[gi,0] <= p1[0] and \
+           right_edges[gi,0] >= p1[0]:
+           left_edges[gi,1] <= p1[1] and \
+           right_edges[gi,1] >= p1[1]:
+           left_edges[gi,2] <= p1[2] and \
+           right_edges[gi,2] >= p1[2]:
+            gridi[gi] = 1
+            continue
+    return gridi


http://bitbucket.org/yt_analysis/yt/changeset/ef6ca4c413b0/
changeset:   ef6ca4c413b0
branch:      deliberate_fields
user:        MatthewTurk
date:        2011-08-14 01:47:02
summary:     Fixing enzo to allow double-loads; similar fixes will need to be added to other
functions.
affected #:  2 files (-1 bytes)

--- a/yt/data_objects/hierarchy.py	Thu Aug 11 13:36:43 2011 -0600
+++ b/yt/data_objects/hierarchy.py	Sat Aug 13 17:47:02 2011 -0600
@@ -129,12 +129,10 @@
     def _setup_unknown_fields(self):
         known_fields = self.parameter_file._fieldinfo_known
         for field in self.field_list:
-            if field in self.parameter_file.field_info:
-                ff = self.parameter_file.field_info[field]
-                # By allowing a backup, we don't mandate that it's found in our
-                # current field info.  This means we'll instead simply override
-                # it.
-                self.parameter_file.field_info.pop(field, None)
+            # By allowing a backup, we don't mandate that it's found in our
+            # current field info.  This means we'll instead simply override
+            # it.
+            ff = self.parameter_file.field_info.pop(field, None)
             if field not in known_fields:
                 mylog.info("Adding unknown field %s to list of fields", field)
                 cf = None


--- a/yt/frontends/enzo/data_structures.py	Thu Aug 11 13:36:43 2011 -0600
+++ b/yt/frontends/enzo/data_structures.py	Sat Aug 13 17:47:02 2011 -0600
@@ -664,8 +664,13 @@
         if self["TopGridRank"] == 1: self._setup_1d()
         elif self["TopGridRank"] == 2: self._setup_2d()
 
-        self.field_info = FieldInfoContainer.create_with_fallback(
-                            self._fieldinfo_fallback)
+        if getattr(self, "field_info", None) is None:
+            # The setting up of fields occurs in the hierarchy, which is only
+            # instantiated once.  So we have to double check to make sure that,
+            # in the event of double-loads of a parameter file, we do not blow
+            # away the exising field_info.
+            self.field_info = FieldInfoContainer.create_with_fallback(
+                                self._fieldinfo_fallback)
 
     def _setup_1d(self):
         self._hierarchy_class = EnzoHierarchy1D


http://bitbucket.org/yt_analysis/yt/changeset/d26fd8c81eea/
changeset:   d26fd8c81eea
branch:      deliberate_fields
user:        MatthewTurk
date:        2011-08-14 01:51:17
summary:     Slightly refactor the field container creation.
affected #:  14 files (-1 bytes)

--- a/yt/data_objects/static_output.py	Sat Aug 13 17:47:02 2011 -0600
+++ b/yt/data_objects/static_output.py	Sat Aug 13 17:51:17 2011 -0600
@@ -35,6 +35,8 @@
     ParameterFileStore, \
     NoParameterShelf, \
     output_type_registry
+from yt.data_objects.field_info_container import \
+    FieldInfoContainer, NullFunc
 
 # We want to support the movie format in the future.
 # When such a thing comes to pass, I'll move all the stuff that is contant up
@@ -96,6 +98,8 @@
                 pass
         self.print_key_parameters()
 
+        self.create_field_info()
+
     def __reduce__(self):
         args = (self._hash(),)
         return (_reconstruct_pf, args)
@@ -189,6 +193,17 @@
                 v = getattr(self, a)
                 mylog.info("Parameters: %-25s = %s", a, v)
 
+    def create_field_info(self):
+        if getattr(self, "field_info", None) is None:
+            # The setting up of fields occurs in the hierarchy, which is only
+            # instantiated once.  So we have to double check to make sure that,
+            # in the event of double-loads of a parameter file, we do not blow
+            # away the exising field_info.
+            self.field_info = FieldInfoContainer.create_with_fallback(
+                                self._fieldinfo_fallback)
+
+        
+
 def _reconstruct_pf(*args, **kwargs):
     pfs = ParameterFileStore()
     pf = pfs.get_pf_hash(*args)


--- a/yt/frontends/art/data_structures.py	Sat Aug 13 17:47:02 2011 -0600
+++ b/yt/frontends/art/data_structures.py	Sat Aug 13 17:51:17 2011 -0600
@@ -115,7 +115,6 @@
     
     def __init__(self, pf, data_style='art'):
         self.data_style = data_style
-        self.field_info = ARTFieldContainer()
         self.parameter_file = weakref.proxy(pf)
         # for now, the hierarchy file is the parameter file!
         self.hierarchy_filename = self.parameter_file.parameter_filename
@@ -370,8 +369,6 @@
         StaticOutput.__init__(self, filename, data_style)
         self.storage_filename = storage_filename
         
-        self.field_info = FieldInfoContainer.create_with_fallback(
-                            self._fieldinfo_fallback)
         self.dimensionality = 3
         self.refine_by = 2
         self.parameters["HydroMethod"] = 'art'


--- a/yt/frontends/castro/data_structures.py	Sat Aug 13 17:47:02 2011 -0600
+++ b/yt/frontends/castro/data_structures.py	Sat Aug 13 17:51:17 2011 -0600
@@ -123,7 +123,6 @@
     def __init__(self, pf, data_style='castro_native'):
         super(CastroHierarchy, self).__init__(self, pf, self.data_style)
 
-        self.field_info = CastroFieldContainer()
         self.field_indexes = {}
         self.parameter_file = weakref.proxy(pf)
         header_filename = os.path.join(pf.fullplotdir, 'Header')
@@ -538,9 +537,6 @@
 
         self.fparameters = {}
 
-        self.field_info = FieldInfoContainer.create_with_fallback(
-                              self._fieldinfo_fallback)
-
         # These should maybe not be hardcoded?
         ### TODO: this.
         self.parameters["HydroMethod"] = 'castro' # always PPM DE


--- a/yt/frontends/chombo/data_structures.py	Sat Aug 13 17:47:02 2011 -0600
+++ b/yt/frontends/chombo/data_structures.py	Sat Aug 13 17:51:17 2011 -0600
@@ -94,7 +94,6 @@
         self.domain_left_edge = pf.domain_left_edge # need these to determine absolute grid locations
         self.domain_right_edge = pf.domain_right_edge # need these to determine absolute grid locations
         self.data_style = data_style
-        self.field_info = ChomboFieldContainer()
         self.field_indexes = {}
         self.parameter_file = weakref.proxy(pf)
         # for now, the hierarchy file is the parameter file!
@@ -184,8 +183,6 @@
         self.ini_filename = ini_filename
         StaticOutput.__init__(self,filename,data_style)
         self.storage_filename = storage_filename
-        self.field_info = FieldInfoContainer.create_with_fallback(
-                            self._fieldinfo_fallback)
         
     def _set_units(self):
         """


--- a/yt/frontends/enzo/data_structures.py	Sat Aug 13 17:47:02 2011 -0600
+++ b/yt/frontends/enzo/data_structures.py	Sat Aug 13 17:51:17 2011 -0600
@@ -664,14 +664,6 @@
         if self["TopGridRank"] == 1: self._setup_1d()
         elif self["TopGridRank"] == 2: self._setup_2d()
 
-        if getattr(self, "field_info", None) is None:
-            # The setting up of fields occurs in the hierarchy, which is only
-            # instantiated once.  So we have to double check to make sure that,
-            # in the event of double-loads of a parameter file, we do not blow
-            # away the exising field_info.
-            self.field_info = FieldInfoContainer.create_with_fallback(
-                                self._fieldinfo_fallback)
-
     def _setup_1d(self):
         self._hierarchy_class = EnzoHierarchy1D
         self._fieldinfo_fallback = Enzo1DFieldInfo
@@ -908,9 +900,6 @@
 
         StaticOutput.__init__(self, "InMemoryParameterFile", self._data_style)
 
-        self.field_info = FieldInfoContainer.create_with_fallback(
-                            self._fieldinfo_fallback)
-
     def _parse_parameter_file(self):
         enzo = self._obtain_enzo()
         self.basename = "cycle%08i" % (


--- a/yt/frontends/flash/data_structures.py	Sat Aug 13 17:47:02 2011 -0600
+++ b/yt/frontends/flash/data_structures.py	Sat Aug 13 17:51:17 2011 -0600
@@ -63,7 +63,6 @@
     
     def __init__(self,pf,data_style='chombo_hdf5'):
         self.data_style = data_style
-        self.field_info = FLASHFieldContainer()
         self.field_indexes = {}
         self.parameter_file = weakref.proxy(pf)
         # for now, the hierarchy file is the parameter file!
@@ -183,8 +182,6 @@
         StaticOutput.__init__(self, filename, data_style)
         self.storage_filename = storage_filename
 
-        self.field_info = FieldInfoContainer.create_with_fallback(
-                            self._fieldinfo_fallback)
         # These should be explicitly obtained from the file, but for now that
         # will wait until a reorganization of the source tree and better
         # generalization.


--- a/yt/frontends/gadget/data_structures.py	Sat Aug 13 17:47:02 2011 -0600
+++ b/yt/frontends/gadget/data_structures.py	Sat Aug 13 17:51:17 2011 -0600
@@ -71,7 +71,6 @@
     grid = GadgetGrid
 
     def __init__(self, pf, data_style='gadget_hdf5'):
-        self.field_info = GadgetFieldContainer()
         self.filename = pf.filename
         self.directory = os.path.dirname(pf.filename)
         self.data_style = data_style
@@ -147,8 +146,6 @@
 
     def __init__(self, filename,storage_filename=None) :
         self.storage_filename = storage_filename
-        self.field_info = FieldInfoContainer.create_with_fallback(
-                            self._fieldinfo_fallback)
         self.filename = filename
         
         StaticOutput.__init__(self, filename, 'gadget_infrastructure')


--- a/yt/frontends/gdf/data_structures.py	Sat Aug 13 17:47:02 2011 -0600
+++ b/yt/frontends/gdf/data_structures.py	Sat Aug 13 17:51:17 2011 -0600
@@ -149,8 +149,6 @@
         StaticOutput.__init__(self, filename, data_style)
         self._handle = h5py.File(self.filename, "r")
         self.storage_filename = storage_filename
-        self.field_info = FieldInfoContainer.create_with_fallback(
-                            self._fieldinfo_fallback)
         self._handle.close()
         del self._handle
         


--- a/yt/frontends/maestro/data_structures.py	Sat Aug 13 17:47:02 2011 -0600
+++ b/yt/frontends/maestro/data_structures.py	Sat Aug 13 17:51:17 2011 -0600
@@ -121,7 +121,6 @@
 class MaestroHierarchy(AMRHierarchy):
     grid = MaestroGrid
     def __init__(self, pf, data_style='maestro'):
-        self.field_info = pf.field_info
         self.field_indexes = {}
         self.parameter_file = weakref.proxy(pf)
         header_filename = os.path.join(pf.fullplotdir,'Header')
@@ -444,8 +443,6 @@
         # this is the unit of time; NOT the current time
         self.parameters["Time"] = 1 # second
 
-        self.field_info = FieldInfoContainer.create_with_fallback(
-                            self._fieldinfo_fallback)
         self._parse_header_file()
 
 


--- a/yt/frontends/nyx/data_structures.py	Sat Aug 13 17:47:02 2011 -0600
+++ b/yt/frontends/nyx/data_structures.py	Sat Aug 13 17:51:17 2011 -0600
@@ -118,7 +118,6 @@
     grid = NyxGrid
 
     def __init__(self, pf, data_style="nyx_native"):
-        self.field_info = NyxFieldContainer()
         self.field_indexes = {}
         self.parameter_file = weakref.proxy(pf)
         self.directory = pf.path
@@ -563,9 +562,6 @@
         # ``self.print_key_parameters()``
         StaticOutput.__init__(self, plotname.rstrip("/"), data_style=data_style)
 
-        # @todo: field pruning should happen here
-        self.field_info = self._fieldinfo_class()
-
         # @todo: check all of these and hopefully factor out of the constructor.
         # These should maybe not be hardcoded?
         self.parameters["HydroMethod"] = "nyx"  # always PPM DE


--- a/yt/frontends/orion/data_structures.py	Sat Aug 13 17:47:02 2011 -0600
+++ b/yt/frontends/orion/data_structures.py	Sat Aug 13 17:51:17 2011 -0600
@@ -117,7 +117,6 @@
 class OrionHierarchy(AMRHierarchy):
     grid = OrionGrid
     def __init__(self, pf, data_style='orion_native'):
-        self.field_info = OrionFieldContainer()
         self.field_indexes = {}
         self.parameter_file = weakref.proxy(pf)
         header_filename = os.path.join(pf.fullplotdir,'Header')
@@ -446,8 +445,6 @@
 
         StaticOutput.__init__(self, plotname.rstrip("/"),
                               data_style='orion_native')
-        self.field_info = FieldInfoContainer.create_with_fallback(
-                            self._fieldinfo_fallback)
 
         # These should maybe not be hardcoded?
         self.parameters["HydroMethod"] = 'orion' # always PPM DE


--- a/yt/frontends/ramses/data_structures.py	Sat Aug 13 17:47:02 2011 -0600
+++ b/yt/frontends/ramses/data_structures.py	Sat Aug 13 17:51:17 2011 -0600
@@ -108,7 +108,6 @@
     
     def __init__(self,pf,data_style='ramses'):
         self.data_style = data_style
-        self.field_info = RAMSESFieldContainer()
         self.parameter_file = weakref.proxy(pf)
         # for now, the hierarchy file is the parameter file!
         self.hierarchy_filename = self.parameter_file.parameter_filename
@@ -266,9 +265,6 @@
         StaticOutput.__init__(self, filename, data_style)
         self.storage_filename = storage_filename
 
-        self.field_info = FieldInfoContainer.create_with_fallback(
-                            self._fieldinfo_fallback)
-
     def __repr__(self):
         return self.basename.rsplit(".", 1)[0]
         


--- a/yt/frontends/stream/data_structures.py	Sat Aug 13 17:47:02 2011 -0600
+++ b/yt/frontends/stream/data_structures.py	Sat Aug 13 17:51:17 2011 -0600
@@ -257,7 +257,6 @@
         self.stream_handler = stream_handler
         StaticOutput.__init__(self, "InMemoryParameterFile", self._data_style)
 
-        self.field_info = self._fieldinfo_class()
         self.units = {}
         self.time_units = {}
 


--- a/yt/mods.py	Sat Aug 13 17:47:02 2011 -0600
+++ b/yt/mods.py	Sat Aug 13 17:51:17 2011 -0600
@@ -60,8 +60,8 @@
 from yt.frontends.castro.api import \
     CastroStaticOutput, CastroFieldInfo, add_castro_field
 
-from yt.frontends.nyx.api import \
-    NyxStaticOutput, nyx_fields, add_nyx_field
+#from yt.frontends.nyx.api import \
+#    NyxStaticOutput, nyx_fields, add_nyx_field
 
 from yt.frontends.orion.api import \
     OrionStaticOutput, OrionFieldInfo, add_orion_field


http://bitbucket.org/yt_analysis/yt/changeset/1b685b6bfbf2/
changeset:   1b685b6bfbf2
branch:      yt
user:        MatthewTurk
date:        2011-09-05 05:23:46
summary:     Initial import of peewee ORM.  Initial pass at a PeeWee/SQLite3 datastore that
interoperates with the Enzo data store.  Still needs further testing, but
preliminary results are good.
affected #:  4 files (-1 bytes)

--- a/yt/convenience.py	Sat Sep 03 08:46:55 2011 -0400
+++ b/yt/convenience.py	Sun Sep 04 23:23:46 2011 -0400
@@ -32,8 +32,7 @@
 from yt.funcs import *
 from yt.config import ytcfg
 from yt.utilities.parameter_file_storage import \
-    output_type_registry, \
-    EnzoRunDatabase
+    output_type_registry
 
 def all_pfs(basedir='.', skip=None, max_depth=1, name_spec="*.hierarchy", **kwargs):
     """
@@ -90,15 +89,6 @@
     if len(candidates) == 1:
         return output_type_registry[candidates[0]](*args, **kwargs)
     if len(candidates) == 0:
-        if ytcfg.get("yt", "enzo_db") != '' \
-           and len(args) == 1 \
-           and isinstance(args[0], types.StringTypes):
-            erdb = EnzoRunDatabase()
-            fn = erdb.find_uuid(args[0])
-            n = "EnzoStaticOutput"
-            if n in output_type_registry \
-               and output_type_registry[n]._is_valid(fn):
-                return output_type_registry[n](fn)
         mylog.error("Couldn't figure out output type for %s", args[0])
         return None
     mylog.error("Multiple output type candidates for %s:", args[0])


--- a/yt/data_objects/static_output.py	Sat Sep 03 08:46:55 2011 -0400
+++ b/yt/data_objects/static_output.py	Sun Sep 04 23:23:46 2011 -0400
@@ -104,6 +104,8 @@
         return self.basename
 
     def _hash(self):
+        if "MetaDataDatasetUUID" in self.parameters:
+            return self["MetaDataDatasetUUID"]
         s = "%s;%s;%s" % (self.basename,
             self.current_time, self.unique_identifier)
         try:


--- a/yt/utilities/parameter_file_storage.py	Sat Sep 03 08:46:55 2011 -0400
+++ b/yt/utilities/parameter_file_storage.py	Sun Sep 04 23:23:46 2011 -0400
@@ -32,6 +32,8 @@
 from yt.utilities.parallel_tools.parallel_analysis_interface import \
     parallel_simple_proxy
 
+import yt.utilities.peewee as peewee
+
 output_type_registry = {}
 _field_names = ('hash', 'bn', 'fp', 'tt', 'ctid', 'class_name', 'last_seen')
 
@@ -48,6 +50,20 @@
     def __repr__(self):
         return "%s" % self.name
 
+_field_spec = dict(
+    dset_uuid = peewee.TextField(),
+    output_type = peewee.TextField(),
+    pf_path = peewee.TextField(),
+    creation_time = peewee.IntegerField(),
+    last_seen_time = peewee.IntegerField(),
+    simulation_uuid = peewee.TextField(),
+    redshift = peewee.FloatField(),
+    time = peewee.FloatField(),
+    topgrid0 = peewee.IntegerField(),
+    topgrid1 = peewee.IntegerField(),
+    topgrid2 = peewee.IntegerField(),
+)
+
 class ParameterFileStore(object):
     """
     This class is designed to be a semi-persistent storage for parameter
@@ -62,6 +78,7 @@
     _distributed = True
     _processing = False
     _owner = 0
+    conn = None
 
     def __new__(cls, *p, **k):
         self = object.__new__(cls, *p, **k)
@@ -77,7 +94,6 @@
         if ytcfg.getboolean("yt", "StoreParameterFiles"):
             self._read_only = False
             self.init_db()
-            self._records = self.read_db()
         else:
             self._read_only = True
             self._records = {}
@@ -93,9 +109,18 @@
             if not os.path.isdir(dbdir): os.mkdir(dbdir)
         except OSError:
             raise NoParameterShelf()
-        open(dbn, 'ab') # make sure it exists, allow to close
-        # Now we read in all our records and return them
-        # these will be broadcast
+        self.conn = peewee.SqliteDatabase(dbn)
+        class SimulationOutputsMeta:
+            database = self.conn
+            db_table = "simulation_outputs"
+        _field_spec["Meta"] = SimulationOutputsMeta
+        self.output_model = type(
+            "SimulationOutputs",
+            (peewee.Model,),
+            _field_spec,
+        )
+        self.output_model._meta.pk_name = "dset_uuid"
+        self.conn.connect()
 
     def _get_db_name(self):
         base_file_name = ytcfg.get("yt", "ParameterFileStore")
@@ -105,39 +130,23 @@
 
     def get_pf_hash(self, hash):
         """ This returns a parameter file based on a hash. """
-        return self._convert_pf(self._records[hash])
+        output = self.output_model.get(dset_uuid = hash)
+        return self._convert_pf(output)
 
-    def get_pf_ctid(self, ctid):
-        """ This returns a parameter file based on a CurrentTimeIdentifier. """
-        for h in self._records:
-            if self._records[h]['ctid'] == ctid:
-                return self._convert_pf(self._records[h])
-
-    def _adapt_pf(self, pf):
-        """ This turns a parameter file into a CSV entry. """
-        return dict(bn=pf.basename,
-                    fp=pf.fullpath,
-                    tt=pf.current_time,
-                    ctid=pf.unique_identifier,
-                    class_name=pf.__class__.__name__,
-                    last_seen=pf._instantiated)
-
-    def _convert_pf(self, pf_dict):
-        """ This turns a CSV entry into a parameter file. """
-        bn = pf_dict['bn']
-        fp = pf_dict['fp']
-        fn = os.path.join(fp, bn)
-        class_name = pf_dict['class_name']
-        if class_name not in output_type_registry:
-            raise UnknownStaticOutputType(class_name)
+    def _convert_pf(self, inst):
+        """ This turns a model into a parameter file. """
+        fn = inst.pf_path
+        if inst.output_type not in output_type_registry:
+            raise UnknownStaticOutputType(inst.output_type)
         mylog.info("Checking %s", fn)
         if os.path.exists(fn):
-            pf = output_type_registry[class_name](os.path.join(fp, bn))
+            pf = output_type_registry[inst.output_type](fn)
         else:
             raise IOError
         # This next one is to ensure that we manually update the last_seen
         # record *now*, for during write_out.
-        self._records[pf._hash()]['last_seen'] = pf._instantiated
+        self.output_model.update(last_seen_time = pf._instantiated).where(
+            dset_uuid = inst.dset_uuid).execute()
         return pf
 
     def check_pf(self, pf):
@@ -146,86 +155,34 @@
         recorded in the storage unit.  In doing so, it will update path
         and "last_seen" information.
         """
-        hash = pf._hash()
-        if hash not in self._records:
+        q = self.output_model.select().where(dset_uuid = pf._hash())
+        q.execute()
+        if q.count() == 0:
             self.insert_pf(pf)
             return
-        pf_dict = self._records[hash]
-        self._records[hash]['last_seen'] = pf._instantiated
-        if pf_dict['bn'] != pf.basename \
-          or pf_dict['fp'] != pf.fullpath:
-            self.wipe_hash(hash)
-            self.insert_pf(pf)
+        # Otherwise we update
+        self.output_model.update(
+            last_seen_time = pf._instantiated,
+            pf_path = os.path.join(pf.basename, pf.fullpath)
+        ).where(
+            dset_uuid = pf._hash()).execute(
+        )
 
     def insert_pf(self, pf):
         """ This will insert a new *pf* and flush the database to disk. """
-        self._records[pf._hash()] = self._adapt_pf(pf)
-        self.flush_db()
-
-    def wipe_hash(self, hash):
-        """
-        This removes a *hash* corresponding to a parameter file from the
-        storage.
-        """
-        if hash not in self._records: return
-        del self._records[hash]
-        self.flush_db()
-
-    def flush_db(self):
-        """ This flushes the storage to disk. """
-        if self._read_only: return
-        self._write_out()
-        self.read_db()
-
-    def get_recent(self, n=10):
-        recs = sorted(self._records.values(), key=lambda a: -a['last_seen'])[:n]
-        return recs
-
-    @parallel_simple_proxy
-    def _write_out(self):
-        if self._read_only: return
-        fn = self._get_db_name()
-        f = open("%s.tmp" % fn, 'wb')
-        w = csv.DictWriter(f, _field_names)
-        maxn = ytcfg.getint("yt","MaximumStoredPFs") # number written
-        for h,v in islice(sorted(self._records.items(),
-                          key=lambda a: -a[1]['last_seen']), 0, maxn):
-            v['hash'] = h
-            w.writerow(v)
-        f.close()
-        os.rename("%s.tmp" % fn, fn)
-
-    @parallel_simple_proxy
-    def read_db(self):
-        """ This will read the storage device from disk. """
-        f = open(self._get_db_name(), 'rb')
-        vals = csv.DictReader(f, _field_names)
-        db = {}
-        for v in vals:
-            db[v.pop('hash')] = v
-            if v['last_seen'] is None:
-                v['last_seen'] = 0.0
-            else: v['last_seen'] = float(v['last_seen'])
-        return db
-
-class ObjectStorage(object):
-    pass
-
-class EnzoRunDatabase(object):
-    conn = None
-
-    def __init__(self, path = None):
-        if path is None:
-            path = ytcfg.get("yt", "enzo_db")
-            if len(path) == 0: raise Runtime
-        import sqlite3
-        self.conn = sqlite3.connect(path)
-
-    def find_uuid(self, u):
-        cursor = self.conn.execute(
-            "select pf_path from enzo_outputs where dset_uuid = '%s'" % (
-                u))
-        # It's a 'unique key'
-        result = cursor.fetchone()
-        if result is None: return None
-        return result[0]
+        q = self.output_model.insert(
+                    dset_uuid = pf._hash(),
+                    output_type = pf.__class__.__name__,
+                    pf_path = os.path.join(
+                        pf.fullpath, pf.basename),
+                    creation_time = pf.parameters.get(
+                        "CurrentTimeIdentifier", 0), # Get os.stat
+                    last_seen_time = pf._instantiated,
+                    simulation_uuid = pf.parameters.get(
+                        "SimulationUUID", ""), # NULL
+                    redshift = pf.current_redshift,
+                    time = pf.current_time,
+                    topgrid0 = pf.domain_dimensions[0],
+                    topgrid1 = pf.domain_dimensions[1],
+                    topgrid2 = pf.domain_dimensions[2])
+        q.execute()


--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/yt/utilities/peewee.py	Sun Sep 04 23:23:46 2011 -0400
@@ -0,0 +1,1425 @@
+#     (\
+#     (  \  /(o)\     caw!
+#     (   \/  ()/ /)
+#      (   `;.))'".) 
+#       `(/////.-'
+#    =====))=))===() 
+#      ///'       
+#     //
+#    '
+from datetime import datetime
+import logging
+import os
+import re
+import time
+
+try:
+    import sqlite3
+except ImportError:
+    sqlite3 = None
+
+try:
+    import psycopg2
+except ImportError:
+    psycopg2 = None
+
+try:
+    import MySQLdb as mysql
+except ImportError:
+    mysql = None
+
+if sqlite3 is None and psycopg2 is None and mysql is None:
+    raise ImproperlyConfigured('Either sqlite3, psycopg2 or MySQLdb must be installed')
+
+
+DATABASE_NAME = os.environ.get('PEEWEE_DATABASE', 'peewee.db')
+logger = logging.getLogger('peewee.logger')
+
+
+class BaseAdapter(object):
+    """
+    The various subclasses of `BaseAdapter` provide a bridge between the high-
+    level `Database` abstraction and the underlying python libraries like
+    psycopg2.  It also provides a way to unify the pythonic field types with
+    the underlying column types used by the database engine.
+    
+    The `BaseAdapter` provides two types of mappings:    
+    - mapping between filter operations and their database equivalents
+    - mapping between basic field types and their database column types
+    
+    The `BaseAdapter` also is the mechanism used by the `Database` class to:
+    - handle connections with the database
+    - extract information from the database cursor
+    """
+    operations = {'eq': '= %s'}
+    interpolation = '%s'
+    
+    def get_field_types(self):
+        field_types = {
+            'integer': 'INTEGER',
+            'float': 'REAL',
+            'decimal': 'NUMERIC',
+            'string': 'VARCHAR',
+            'text': 'TEXT',
+            'datetime': 'DATETIME',
+            'primary_key': 'INTEGER',
+            'foreign_key': 'INTEGER',
+            'boolean': 'SMALLINT',
+        }
+        field_types.update(self.get_field_overrides())
+        return field_types
+    
+    def get_field_overrides(self):
+        return {}
+    
+    def connect(self, database, **kwargs):
+        raise NotImplementedError
+    
+    def close(self, conn):
+        conn.close()
+    
+    def lookup_cast(self, lookup, value):
+        if lookup in ('contains', 'icontains'):
+            return '%%%s%%' % value
+        elif lookup in ('startswith', 'istartswith'):
+            return '%s%%' % value
+        return value
+    
+    def last_insert_id(self, cursor, model):
+        return cursor.lastrowid
+    
+    def rows_affected(self, cursor):
+        return cursor.rowcount
+
+
+class SqliteAdapter(BaseAdapter):
+    # note the sqlite library uses a non-standard interpolation string
+    operations = {
+        'lt': '< ?',
+        'lte': '<= ?',
+        'gt': '> ?',
+        'gte': '>= ?',
+        'eq': '= ?',
+        'ne': '!= ?', # watch yourself with this one
+        'in': 'IN (%s)', # special-case to list q-marks
+        'is': 'IS ?',
+        'icontains': "LIKE ? ESCAPE '\\'", # surround param with %'s
+        'contains': "GLOB ?", # surround param with *'s
+        'istartswith': "LIKE ? ESCAPE '\\'",
+        'startswith': "GLOB ?",
+    }
+    interpolation = '?'
+    
+    def connect(self, database, **kwargs):
+        return sqlite3.connect(database, **kwargs)
+    
+    def lookup_cast(self, lookup, value):
+        if lookup == 'contains':
+            return '*%s*' % value
+        elif lookup == 'icontains':
+            return '%%%s%%' % value
+        elif lookup == 'startswith':
+            return '%s*' % value
+        elif lookup == 'istartswith':
+            return '%s%%' % value
+        return value
+
+
+class PostgresqlAdapter(BaseAdapter):
+    operations = {
+        'lt': '< %s',
+        'lte': '<= %s',
+        'gt': '> %s',
+        'gte': '>= %s',
+        'eq': '= %s',
+        'ne': '!= %s', # watch yourself with this one
+        'in': 'IN (%s)', # special-case to list q-marks
+        'is': 'IS %s',
+        'icontains': 'ILIKE %s', # surround param with %'s
+        'contains': 'LIKE %s', # surround param with *'s
+        'istartswith': 'ILIKE %s',
+        'startswith': 'LIKE %s',
+    }
+        
+    def connect(self, database, **kwargs):
+        return psycopg2.connect(database=database, **kwargs)
+    
+    def get_field_overrides(self):
+        return {
+            'primary_key': 'SERIAL',
+            'datetime': 'TIMESTAMP'
+        }
+    
+    def last_insert_id(self, cursor, model):
+        cursor.execute("SELECT CURRVAL('\"%s_%s_seq\"')" % (
+            model._meta.db_table, model._meta.pk_name))
+        return cursor.fetchone()[0]
+    
+
+class MySQLAdapter(BaseAdapter):
+    operations = {
+        'lt': '< %s',
+        'lte': '<= %s',
+        'gt': '> %s',
+        'gte': '>= %s',
+        'eq': '= %s',
+        'ne': '!= %s', # watch yourself with this one
+        'in': 'IN (%s)', # special-case to list q-marks
+        'is': 'IS %s',
+        'icontains': 'LIKE %s', # surround param with %'s
+        'contains': 'LIKE BINARY %s', # surround param with *'s
+        'istartswith': 'LIKE %s',
+        'startswith': 'LIKE BINARY %s',
+    }
+
+    def connect(self, database, **kwargs):
+        return mysql.connect(db=database, **kwargs)
+
+    def get_field_overrides(self):
+        return {
+            'primary_key': 'integer AUTO_INCREMENT',
+            'boolean': 'bool',
+            'float': 'double precision',
+            'text': 'longtext',
+        }
+
+
+class Database(object):
+    """
+    A high-level api for working with the supported database engines.  `Database`
+    provides a wrapper around some of the functions performed by the `Adapter`,
+    in addition providing support for:
+    - execution of SQL queries
+    - creating and dropping tables and indexes
+    """
+    def __init__(self, adapter, database, **connect_kwargs):
+        self.adapter = adapter
+        self.database = database
+        self.connect_kwargs = connect_kwargs
+    
+    def connect(self):
+        self.conn = self.adapter.connect(self.database, **self.connect_kwargs)
+    
+    def close(self):
+        self.adapter.close(self.conn)
+    
+    def execute(self, sql, params=None, commit=False):
+        cursor = self.conn.cursor()
+        res = cursor.execute(sql, params or ())
+        if commit:
+            self.conn.commit()
+        logger.debug((sql, params))
+        return cursor
+    
+    def last_insert_id(self, cursor, model):
+        return self.adapter.last_insert_id(cursor, model)
+    
+    def rows_affected(self, cursor):
+        return self.adapter.rows_affected(cursor)
+    
+    def column_for_field(self, db_field):
+        try:
+            return self.adapter.get_field_types()[db_field]
+        except KeyError:
+            raise AttributeError('Unknown field type: "%s", valid types are: %s' % \
+                db_field, ', '.join(self.adapter.get_field_types().keys())
+            )
+    
+    def create_table(self, model_class):
+        framing = "CREATE TABLE %s (%s);"
+        columns = []
+
+        for field in model_class._meta.fields.values():
+            columns.append(field.to_sql())
+
+        query = framing % (model_class._meta.db_table, ', '.join(columns))
+        
+        self.execute(query, commit=True)
+    
+    def create_index(self, model_class, field, unique=False):
+        framing = 'CREATE %(unique)s INDEX %(model)s_%(field)s ON %(model)s(%(field)s);'
+        
+        if field not in model_class._meta.fields:
+            raise AttributeError(
+                'Field %s not on model %s' % (field, model_class)
+            )
+        
+        unique_expr = ternary(unique, 'UNIQUE', '')
+        
+        query = framing % {
+            'unique': unique_expr,
+            'model': model_class._meta.db_table,
+            'field': field
+        }
+        
+        self.execute(query, commit=True)
+    
+    def drop_table(self, model_class, fail_silently=False):
+        framing = fail_silently and 'DROP TABLE IF EXISTS %s;' or 'DROP TABLE %s;'
+        self.execute(framing % model_class._meta.db_table, commit=True)
+    
+    def get_indexes_for_table(self, table):
+        raise NotImplementedError
+
+
+class SqliteDatabase(Database):
+    def __init__(self, database, **connect_kwargs):
+        super(SqliteDatabase, self).__init__(SqliteAdapter(), database, **connect_kwargs)
+    
+    def get_indexes_for_table(self, table):
+        res = self.execute('PRAGMA index_list(%s);' % table)
+        rows = sorted([(r[1], r[2] == 1) for r in res.fetchall()])
+        return rows
+
+
+class PostgresqlDatabase(Database):
+    def __init__(self, database, **connect_kwargs):
+        super(PostgresqlDatabase, self).__init__(PostgresqlAdapter(), database, **connect_kwargs)
+    
+    def get_indexes_for_table(self, table):
+        res = self.execute("""
+            SELECT c2.relname, i.indisprimary, i.indisunique
+            FROM pg_catalog.pg_class c, pg_catalog.pg_class c2, pg_catalog.pg_index i
+            WHERE c.relname = %s AND c.oid = i.indrelid AND i.indexrelid = c2.oid
+            ORDER BY i.indisprimary DESC, i.indisunique DESC, c2.relname""", (table,))
+        return sorted([(r[0], r[1]) for r in res.fetchall()])
+
+class MySQLDatabase(Database):
+    def __init__(self, database, **connect_kwargs):
+        super(MySQLDatabase, self).__init__(MySQLAdapter(), database, **connect_kwargs)
+    
+    def get_indexes_for_table(self, table):
+        res = self.execute('SHOW INDEXES IN %s;' % table)
+        rows = sorted([(r[2], r[1] == 0) for r in res.fetchall()])
+        return rows
+
+
+class QueryResultWrapper(object):
+    """
+    Provides an iterator over the results of a raw Query, additionally doing
+    two things:
+    - converts rows from the database into model instances
+    - ensures that multiple iterations do not result in multiple queries
+    """
+    def __init__(self, model, cursor):
+        self.model = model
+        self.cursor = cursor
+        self._result_cache = []
+        self._populated = False
+    
+    def model_from_rowset(self, model_class, row_dict):
+        instance = model_class()
+        for attr, value in row_dict.iteritems():
+            if attr in instance._meta.fields:
+                field = instance._meta.fields[attr]
+                setattr(instance, attr, field.python_value(value))
+            else:
+                setattr(instance, attr, value)
+        return instance    
+    
+    def _row_to_dict(self, row, result_cursor):
+        return dict((result_cursor.description[i][0], value)
+            for i, value in enumerate(row))
+    
+    def __iter__(self):
+        if not self._populated:
+            return self
+        else:
+            return iter(self._result_cache)
+    
+    def next(self):
+        row = self.cursor.fetchone()
+        if row:
+            row_dict = self._row_to_dict(row, self.cursor)
+            instance = self.model_from_rowset(self.model, row_dict)
+            self._result_cache.append(instance)
+            return instance
+        else:
+            self._populated = True
+            raise StopIteration
+
+
+# create
+class DoesNotExist(Exception):
+    pass
+
+
+# semantic wrappers for ordering the results of a `SelectQuery`
+def asc(f):
+    return (f, 'ASC')
+
+def desc(f):
+    return (f, 'DESC')
+
+# wrappers for performing aggregation in a `SelectQuery`
+def Count(f, alias='count'):
+    return ('COUNT', f, alias)
+
+def Max(f, alias='max'):
+    return ('MAX', f, alias)
+
+def Min(f, alias='min'):
+    return ('MIN', f, alias)
+
+# decorator for query methods to indicate that they change the state of the
+# underlying data structures
+def returns_clone(func):
+    def inner(self, *args, **kwargs):
+        clone = self.clone()
+        res = func(clone, *args, **kwargs)
+        return clone
+    return inner
+
+# helpers
+ternary = lambda cond, t, f: (cond and [t] or [f])[0]
+
+
+class Node(object):
+    def __init__(self, connector='AND'):
+        self.connector = connector
+        self.children = []
+        self.negated = False
+    
+    def connect(self, rhs, connector):
+        if isinstance(rhs, Q):
+            if connector == self.connector:
+                self.children.append(rhs)
+                return self
+            else:
+                p = Node(connector)
+                p.children = [self, rhs]
+                return p
+        elif isinstance(rhs, Node):
+            p = Node(connector)
+            p.children = [self, rhs]
+            return p
+    
+    def __or__(self, rhs):
+        return self.connect(rhs, 'OR')
+
+    def __and__(self, rhs):
+        return self.connect(rhs, 'AND')
+    
+    def __invert__(self):
+        self.negated = not self.negated
+        return self
+    
+    def __unicode__(self):
+        query = []
+        nodes = []
+        for child in self.children:
+            if isinstance(child, Q):
+                query.append(unicode(child))
+            elif isinstance(child, Node):
+                nodes.append('(%s)' % unicode(child))
+        query.extend(nodes)
+        connector = ' %s ' % self.connector
+        query = connector.join(query)
+        if self.negated:
+            query = 'NOT %s' % query
+        return query
+    
+
+class Q(object):
+    def __init__(self, **kwargs):
+        self.query = kwargs
+        self.parent = None
+        self.negated = False
+    
+    def connect(self, connector):
+        if self.parent is None:
+            self.parent = Node(connector)
+            self.parent.children.append(self)
+    
+    def __or__(self, rhs):
+        self.connect('OR')
+        return self.parent | rhs
+    
+    def __and__(self, rhs):
+        self.connect('AND')
+        return self.parent & rhs
+    
+    def __invert__(self):
+        self.negated = not self.negated
+        return self
+    
+    def __unicode__(self):
+        bits = ['%s = %s' % (k, v) for k, v in self.query.items()]
+        if len(self.query.items()) > 1:
+            connector = ' AND '
+            expr = '(%s)' % connector.join(bits)
+        else:
+            expr = bits[0]
+        if self.negated:
+            expr = 'NOT %s' % expr
+        return expr
+
+
+def parseq(*args, **kwargs):
+    node = Node()
+    
+    for piece in args:
+        if isinstance(piece, (Q, Node)):
+            node.children.append(piece)
+        else:
+            raise TypeError('Unknown object: %s', piece)
+
+    if kwargs:
+        node.children.append(Q(**kwargs))
+
+    return node
+
+
+class EmptyResultException(Exception):
+    pass
+
+
+class BaseQuery(object):
+    query_separator = '__'
+    requires_commit = True
+    force_alias = False
+    
+    def __init__(self, model):
+        self.model = model
+        self.query_context = model
+        self.database = self.model._meta.database
+        self.operations = self.database.adapter.operations
+        self.interpolation = self.database.adapter.interpolation
+        
+        self._dirty = True
+        self._where = {}
+        self._joins = []
+    
+    def clone(self):
+        raise NotImplementedError
+    
+    def lookup_cast(self, lookup, value):
+        return self.database.adapter.lookup_cast(lookup, value)
+    
+    def parse_query_args(self, model, **query):
+        parsed = {}
+        for lhs, rhs in query.iteritems():
+            if self.query_separator in lhs:
+                lhs, op = lhs.rsplit(self.query_separator, 1)
+            else:
+                op = 'eq'
+            
+            try:
+                field = model._meta.get_field_by_name(lhs)
+            except AttributeError:
+                field = model._meta.get_related_field_by_name(lhs)
+                if field is None:
+                    raise
+                if isinstance(rhs, Model):
+                    rhs = rhs.get_pk()
+            
+            if op == 'in':
+                if isinstance(rhs, SelectQuery):
+                    lookup_value = rhs
+                    operation = 'IN (%s)'
+                else:
+                    if not rhs:
+                        raise EmptyResultException
+                    lookup_value = [field.db_value(o) for o in rhs]
+                    operation = self.operations[op] % \
+                        (','.join([self.interpolation for v in lookup_value]))
+            elif op == 'is':
+                if rhs is not None:
+                    raise ValueError('__is lookups only accept None')
+                operation = 'IS NULL'
+                lookup_value = []
+            else:
+                lookup_value = field.db_value(rhs)
+                operation = self.operations[op]
+            
+            parsed[field.name] = (operation, self.lookup_cast(op, lookup_value))
+        
+        return parsed
+    
+    @returns_clone
+    def where(self, *args, **kwargs):
+        self._where.setdefault(self.query_context, [])
+        self._where[self.query_context].append(parseq(*args, **kwargs))
+
+    @returns_clone
+    def join(self, model, join_type=None, on=None):
+        if self.query_context._meta.rel_exists(model):
+            self._joins.append((model, join_type, on))
+            self.query_context = model
+        else:
+            raise AttributeError('No foreign key found between %s and %s' % \
+                (self.query_context.__name__, model.__name__))
+
+    @returns_clone
+    def switch(self, model):
+        if model == self.model:
+            self.query_context = model
+            return
+
+        for klass, join_type, on in self._joins:
+            if model == klass:
+                self.query_context = model
+                return
+        raise AttributeError('You must JOIN on %s' % model.__name__)
+    
+    def use_aliases(self):
+        return len(self._joins) > 0 or self.force_alias
+
+    def combine_field(self, alias, field_name):
+        if alias:
+            return '%s.%s' % (alias, field_name)
+        return field_name
+    
+    def compile_where(self):
+        alias_count = 0
+        alias_map = {}
+
+        alias_required = self.use_aliases()
+
+        joins = list(self._joins)
+        if self._where or len(joins):
+            joins.insert(0, (self.model, None, None))
+        
+        where_with_alias = []
+        where_data = []
+        computed_joins = []
+
+        for i, (model, join_type, on) in enumerate(joins):
+            if alias_required:
+                alias_count += 1
+                alias_map[model] = 't%d' % alias_count
+            else:
+                alias_map[model] = ''
+            
+            if i > 0:
+                from_model = joins[i-1][0]
+                field = from_model._meta.get_related_field_for_model(model, on)
+                if field:
+                    left_field = field.name
+                    right_field = model._meta.pk_name
+                else:
+                    field = from_model._meta.get_reverse_related_field_for_model(model, on)
+                    left_field = from_model._meta.pk_name
+                    right_field = field.name
+                
+                if join_type is None:
+                    if field.null and model not in self._where:
+                        join_type = 'LEFT OUTER'
+                    else:
+                        join_type = 'INNER'
+                
+                computed_joins.append(
+                    '%s JOIN %s AS %s ON %s = %s' % (
+                        join_type,
+                        model._meta.db_table,
+                        alias_map[model],
+                        self.combine_field(alias_map[from_model], left_field),
+                        self.combine_field(alias_map[model], right_field),
+                    )
+                )
+        
+        for (model, join_type, on) in joins:
+            if model in self._where:
+                for node in self._where[model]:
+                    query, data = self.parse_node(node, model, alias_map)
+                    where_with_alias.append(query)
+                    where_data.extend(data)
+        
+        return computed_joins, where_with_alias, where_data, alias_map
+    
+    def convert_where_to_params(self, where_data):
+        flattened = []
+        for clause in where_data:
+            if isinstance(clause, (tuple, list)):
+                flattened.extend(clause)
+            else:
+                flattened.append(clause)
+        return flattened
+    
+    def parse_node(self, node, model, alias_map):
+        query = []
+        query_data = []
+        nodes = []
+        for child in node.children:
+            if isinstance(child, Q):
+                parsed, data = self.parse_q(child, model, alias_map)
+                query.append(parsed)
+                query_data.extend(data)
+            elif isinstance(child, Node):
+                parsed, data = self.parse_node(child, model, alias_map)
+                query.append('(%s)' % parsed)
+                query_data.extend(data)
+        query.extend(nodes)
+        connector = ' %s ' % node.connector
+        query = connector.join(query)
+        if node.negated:
+            query = 'NOT (%s)' % query
+        return query, query_data
+    
+    def parse_q(self, q, model, alias_map):
+        query = []
+        query_data = []
+        parsed = self.parse_query_args(model, **q.query)
+        for (name, lookup) in parsed.iteritems():
+            operation, value = lookup
+            if isinstance(value, SelectQuery):
+                sql, value = self.convert_subquery(value)
+                operation = operation % sql
+
+            query_data.append(value)
+            
+            combined = self.combine_field(alias_map[model], name)
+            query.append('%s %s' % (combined, operation))
+        
+        if len(query) > 1:
+            query = '(%s)' % (' AND '.join(query))
+        else:
+            query = query[0]
+        
+        if q.negated:
+            query = 'NOT %s' % query
+        
+        return query, query_data
+
+    def convert_subquery(self, subquery):
+        subquery.query, orig_query = subquery.model._meta.pk_name, subquery.query
+        subquery.force_alias, orig_alias = True, subquery.force_alias
+        sql, data = subquery.sql()
+        subquery.query = orig_query
+        subquery.force_alias = orig_alias
+        return sql, data
+    
+    def raw_execute(self):
+        query, params = self.sql()
+        return self.database.execute(query, params, self.requires_commit)
+
+
+class RawQuery(BaseQuery):
+    def __init__(self, model, query, *params):
+        self._sql = query
+        self._params = list(params)
+        super(RawQuery, self).__init__(model)
+    
+    def sql(self):
+        return self._sql, self._params
+    
+    def execute(self):
+        return QueryResultWrapper(self.model, self.raw_execute())
+    
+    def join(self):
+        raise AttributeError('Raw queries do not support joining programmatically')
+    
+    def where(self):
+        raise AttributeError('Raw queries do not support querying programmatically')
+    
+    def switch(self):
+        raise AttributeError('Raw queries do not support switching contexts')
+    
+    def __iter__(self):
+        return self.execute()
+
+
+class SelectQuery(BaseQuery):
+    requires_commit = False
+    
+    def __init__(self, model, query=None):
+        self.query = query or '*'
+        self._group_by = []
+        self._having = []
+        self._order_by = []
+        self._pagination = None # return all by default
+        self._distinct = False
+        self._qr = None
+        super(SelectQuery, self).__init__(model)
+    
+    def clone(self):
+        query = SelectQuery(self.model, self.query)
+        query.query_context = self.query_context
+        query._group_by = list(self._group_by)
+        query._having = list(self._having)
+        query._order_by = list(self._order_by)
+        query._pagination = self._pagination and tuple(self._pagination) or None
+        query._distinct = self._distinct
+        query._qr = self._qr
+        query._where = dict(self._where)
+        query._joins = list(self._joins)
+        return query
+    
+    @returns_clone
+    def paginate(self, page_num, paginate_by=20):
+        self._pagination = (page_num, paginate_by)
+    
+    def count(self):
+        tmp_pagination = self._pagination
+        self._pagination = None
+        
+        tmp_query = self.query
+        
+        if self.use_aliases():
+            self.query = 'COUNT(t1.%s)' % (self.model._meta.pk_name)
+        else:
+            self.query = 'COUNT(%s)' % (self.model._meta.pk_name)
+        
+        res = self.database.execute(*self.sql())
+        
+        self.query = tmp_query
+        self._pagination = tmp_pagination
+        
+        return res.fetchone()[0]
+    
+    @returns_clone
+    def group_by(self, clause):
+        model = self.query_context
+        
+        if isinstance(clause, basestring):
+            fields = (clause,)
+        elif isinstance(clause, (list, tuple)):
+            fields = clause
+        elif issubclass(clause, Model):
+            model = clause
+            fields = clause._meta.get_field_names()
+        
+        self._group_by.append((model, fields))
+    
+    @returns_clone
+    def having(self, clause):
+        self._having.append(clause)
+    
+    @returns_clone
+    def distinct(self):
+        self._distinct = True
+    
+    @returns_clone
+    def order_by(self, field_or_string):
+        if isinstance(field_or_string, tuple):
+            field_or_string, ordering = field_or_string
+        else:
+            ordering = 'ASC'
+        
+        self._order_by.append(
+            (self.query_context, field_or_string, ordering)
+        )
+
+    def parse_select_query(self, alias_map):
+        if isinstance(self.query, basestring):
+            if self.query in ('*', self.model._meta.pk_name) and self.use_aliases():
+                return '%s.%s' % (alias_map[self.model], self.query)
+            return self.query
+        elif isinstance(self.query, dict):
+            qparts = []
+            aggregates = []
+            for model, cols in self.query.iteritems():
+                alias = alias_map.get(model, '')
+                for col in cols:
+                    if isinstance(col, tuple):
+                        func, col, col_alias = col
+                        aggregates.append('%s(%s) AS %s' % \
+                            (func, self.combine_field(alias, col), col_alias)
+                        )
+                    else:
+                        qparts.append(self.combine_field(alias, col))
+            return ', '.join(qparts + aggregates)
+        else:
+            raise TypeError('Unknown type encountered parsing select query')
+    
+    def sql(self):
+        joins, where, where_data, alias_map = self.compile_where()
+        
+        table = self.model._meta.db_table
+
+        params = []
+        group_by = []
+        
+        if self.use_aliases():
+            table = '%s AS %s' % (table, alias_map[self.model])
+            for model, clause in self._group_by:
+                alias = alias_map[model]
+                for field in clause:
+                    group_by.append(self.combine_field(alias, field))
+        else:
+            group_by = [c[1] for c in self._group_by]
+
+        parsed_query = self.parse_select_query(alias_map)
+        
+        if self._distinct:
+            sel = 'SELECT DISTINCT'
+        else:
+            sel = 'SELECT'
+        
+        select = '%s %s FROM %s' % (sel, parsed_query, table)
+        joins = '\n'.join(joins)
+        where = ' AND '.join(where)
+        group_by = ', '.join(group_by)
+        having = ' AND '.join(self._having)
+        
+        order_by = []
+        for piece in self._order_by:
+            model, field, ordering = piece
+            if self.use_aliases() and field in model._meta.fields:
+                field = '%s.%s' % (alias_map[model], field)
+            order_by.append('%s %s' % (field, ordering))
+        
+        pieces = [select]
+        
+        if joins:
+            pieces.append(joins)
+        if where:
+            pieces.append('WHERE %s' % where)
+            params.extend(self.convert_where_to_params(where_data))
+        
+        if group_by:
+            pieces.append('GROUP BY %s' % group_by)
+        if having:
+            pieces.append('HAVING %s' % having)
+        if order_by:
+            pieces.append('ORDER BY %s' % ', '.join(order_by))
+        if self._pagination:
+            page, paginate_by = self._pagination
+            if page > 0:
+                page -= 1
+            pieces.append('LIMIT %d OFFSET %d' % (paginate_by, page * paginate_by))
+        
+        return ' '.join(pieces), params
+    
+    def execute(self):
+        if self._dirty or not self._qr:
+            try:
+                self._qr = QueryResultWrapper(self.model, self.raw_execute())
+                self._dirty = False
+                return self._qr
+            except EmptyResultException:
+                return iter([])
+        else:
+            # call the __iter__ method directly
+            return iter(self._qr)
+    
+    def __iter__(self):
+        return self.execute()
+
+
+class UpdateQuery(BaseQuery):
+    def __init__(self, model, **kwargs):
+        self.update_query = kwargs
+        super(UpdateQuery, self).__init__(model)
+    
+    def clone(self):
+        query = UpdateQuery(self.model, **self.update_query)
+        query._where = dict(self._where)
+        query._joins = list(self._joins)
+        return query
+    
+    def parse_update(self):
+        sets = {}
+        for k, v in self.update_query.iteritems():
+            try:
+                field = self.model._meta.get_field_by_name(k)
+            except AttributeError:
+                field = self.model._meta.get_related_field_by_name(k)
+                if field is None:
+                    raise
+            
+            sets[field.name] = field.db_value(v)
+        
+        return sets
+    
+    def sql(self):
+        joins, where, where_data, alias_map = self.compile_where()
+        set_statement = self.parse_update()
+
+        params = []
+        update_params = []
+
+        for k, v in set_statement.iteritems():
+            params.append(v)
+            update_params.append('%s=%s' % (k, self.interpolation))
+        
+        update = 'UPDATE %s SET %s' % (
+            self.model._meta.db_table, ', '.join(update_params))
+        where = ' AND '.join(where)
+        
+        pieces = [update]
+        
+        if where:
+            pieces.append('WHERE %s' % where)
+            params.extend(self.convert_where_to_params(where_data))
+        
+        return ' '.join(pieces), params
+    
+    def join(self, *args, **kwargs):
+        raise AttributeError('Update queries do not support JOINs in sqlite')
+    
+    def execute(self):
+        result = self.raw_execute()
+        return self.database.rows_affected(result)
+
+
+class DeleteQuery(BaseQuery):
+    def clone(self):
+        query = DeleteQuery(self.model)
+        query._where = dict(self._where)
+        query._joins = list(self._joins)
+        return query
+    
+    def sql(self):
+        joins, where, where_data, alias_map = self.compile_where()
+
+        params = []
+        
+        delete = 'DELETE FROM %s' % (self.model._meta.db_table)
+        where = ' AND '.join(where)
+        
+        pieces = [delete]
+        
+        if where:
+            pieces.append('WHERE %s' % where)
+            params.extend(self.convert_where_to_params(where_data))
+        
+        return ' '.join(pieces), params
+    
+    def join(self, *args, **kwargs):
+        raise AttributeError('Update queries do not support JOINs in sqlite')
+    
+    def execute(self):
+        result = self.raw_execute()
+        return self.database.rows_affected(result)
+
+
+class InsertQuery(BaseQuery):
+    def __init__(self, model, **kwargs):
+        self.insert_query = kwargs
+        super(InsertQuery, self).__init__(model)
+    
+    def parse_insert(self):
+        cols = []
+        vals = []
+        for k, v in self.insert_query.iteritems():
+            field = self.model._meta.get_field_by_name(k)
+            cols.append(k)
+            vals.append(field.db_value(v))
+        
+        return cols, vals
+    
+    def sql(self):
+        cols, vals = self.parse_insert()
+        
+        insert = 'INSERT INTO %s (%s) VALUES (%s)' % (
+            self.model._meta.db_table,
+            ','.join(cols),
+            ','.join(self.interpolation for v in vals)
+        )
+        
+        return insert, vals
+    
+    def where(self, *args, **kwargs):
+        raise AttributeError('Insert queries do not support WHERE clauses')
+    
+    def join(self, *args, **kwargs):
+        raise AttributeError('Insert queries do not support JOINs')
+    
+    def execute(self):
+        result = self.raw_execute()
+        return self.database.last_insert_id(result, self.model)
+
+
+class Field(object):
+    db_field = ''
+    default = None
+    field_template = "%(column_type)s%(nullable)s"
+
+    def get_attributes(self):
+        return {}
+    
+    def __init__(self, null=False, db_index=False, *args, **kwargs):
+        self.null = null
+        self.db_index = db_index
+        self.attributes = self.get_attributes()
+        self.default = kwargs.get('default', None)
+        
+        kwargs['nullable'] = ternary(self.null, '', ' NOT NULL')
+        self.attributes.update(kwargs)
+    
+    def add_to_class(self, klass, name):
+        self.name = name
+        self.model = klass
+        setattr(klass, name, None)
+    
+    def render_field_template(self):
+        col_type = self.model._meta.database.column_for_field(self.db_field)
+        self.attributes['column_type'] = col_type
+        return self.field_template % self.attributes
+    
+    def to_sql(self):
+        rendered = self.render_field_template()
+        return '%s %s' % (self.name, rendered)
+    
+    def null_wrapper(self, value, default=None):
+        if (self.null and value is None) or default is None:
+            return value
+        return value or default
+    
+    def db_value(self, value):
+        return value
+    
+    def python_value(self, value):
+        return value
+    
+    def lookup_value(self, lookup_type, value):
+        return self.db_value(value)
+
+
+class CharField(Field):
+    db_field = 'string'
+    field_template = '%(column_type)s(%(max_length)d)%(nullable)s'
+    
+    def get_attributes(self):
+        return {'max_length': 255}
+    
+    def db_value(self, value):
+        if self.null and value is None:
+            return value
+        value = value or ''
+        return value[:self.attributes['max_length']]
+    
+    def lookup_value(self, lookup_type, value):
+        if lookup_type == 'contains':
+            return '*%s*' % self.db_value(value)
+        elif lookup_type == 'icontains':
+            return '%%%s%%' % self.db_value(value)
+        else:
+            return self.db_value(value)
+    
+
+class TextField(Field):
+    db_field = 'text'
+    
+    def db_value(self, value):
+        return self.null_wrapper(value, '')
+    
+    def lookup_value(self, lookup_type, value):
+        if lookup_type == 'contains':
+            return '*%s*' % self.db_value(value)
+        elif lookup_type == 'icontains':
+            return '%%%s%%' % self.db_value(value)
+        else:
+            return self.db_value(value)
+
+
+class DateTimeField(Field):
+    db_field = 'datetime'
+    
+    def python_value(self, value):
+        if isinstance(value, basestring):
+            value = value.rsplit('.', 1)[0]
+            return datetime(*time.strptime(value, '%Y-%m-%d %H:%M:%S')[:6])
+        return value
+
+
+class IntegerField(Field):
+    db_field = 'integer'
+    
+    def db_value(self, value):
+        return self.null_wrapper(value, 0)
+    
+    def python_value(self, value):
+        if value is not None:
+            return int(value)
+
+
+class BooleanField(IntegerField):
+    db_field = 'boolean'
+    
+    def db_value(self, value):
+        if value:
+            return 1
+        return 0
+    
+    def python_value(self, value):
+        return bool(value)
+
+
+class FloatField(Field):
+    db_field = 'float'
+    
+    def db_value(self, value):
+        return self.null_wrapper(value, 0.0)
+    
+    def python_value(self, value):
+        if value is not None:
+            return float(value)
+
+
+class PrimaryKeyField(IntegerField):
+    db_field = 'primary_key'
+    field_template = "%(column_type)s NOT NULL PRIMARY KEY"
+
+
+class ForeignRelatedObject(object):    
+    def __init__(self, to, name):
+        self.field_name = name
+        self.to = to
+        self.cache_name = '_cache_%s' % name
+    
+    def __get__(self, instance, instance_type=None):
+        if not getattr(instance, self.cache_name, None):
+            id = getattr(instance, self.field_name, 0)
+            qr = self.to.select().where(**{self.to._meta.pk_name: id}).execute()
+            setattr(instance, self.cache_name, qr.next())
+        return getattr(instance, self.cache_name)
+    
+    def __set__(self, instance, obj):
+        assert isinstance(obj, self.to), "Cannot assign %s, invalid type" % obj
+        setattr(instance, self.field_name, obj.get_pk())
+        setattr(instance, self.cache_name, obj)
+
+
+class ReverseForeignRelatedObject(object):
+    def __init__(self, related_model, name):
+        self.field_name = name
+        self.related_model = related_model
+    
+    def __get__(self, instance, instance_type=None):
+        query = {self.field_name: instance.get_pk()}
+        qr = self.related_model.select().where(**query)
+        return qr
+
+
+class ForeignKeyField(IntegerField):
+    db_field = 'foreign_key'
+    field_template = '%(column_type)s%(nullable)s REFERENCES %(to_table)s (%(to_pk)s)'
+    
+    def __init__(self, to, null=False, related_name=None, *args, **kwargs):
+        self.to = to
+        self.related_name = related_name
+        kwargs.update({
+            'to_table': to._meta.db_table,
+            'to_pk': to._meta.pk_name
+        })
+        super(ForeignKeyField, self).__init__(null=null, *args, **kwargs)
+    
+    def add_to_class(self, klass, name):
+        self.descriptor = name
+        self.name = name + '_id'
+        self.model = klass
+        
+        if self.related_name is None:
+            self.related_name = klass._meta.db_table + '_set'
+        
+        klass._meta.rel_fields[name] = self.name
+        setattr(klass, self.descriptor, ForeignRelatedObject(self.to, self.name))
+        setattr(klass, self.name, None)
+        
+        reverse_rel = ReverseForeignRelatedObject(klass, self.name)
+        setattr(self.to, self.related_name, reverse_rel)
+    
+    def lookup_value(self, lookup_type, value):
+        if isinstance(value, Model):
+            return value.get_pk()
+        return value or None
+    
+    def db_value(self, value):
+        if isinstance(value, Model):
+            return value.get_pk()
+        return value
+
+
+# define a default database object in the module scope
+database = SqliteDatabase(DATABASE_NAME)
+
+
+class BaseModelOptions(object):
+    def __init__(self, model_class, options=None):
+        # configurable options
+        options = options or {'database': database}
+        for k, v in options.items():
+            setattr(self, k, v)
+        
+        self.rel_fields = {}
+        self.fields = {}
+        self.model_class = model_class
+    
+    def get_field_names(self):
+        fields = [self.pk_name]
+        fields.extend([f for f in sorted(self.fields.keys()) if f != self.pk_name])
+        return fields
+    
+    def get_field_by_name(self, name):
+        if name in self.fields:
+            return self.fields[name]
+        raise AttributeError('Field named %s not found' % name)
+    
+    def get_related_field_by_name(self, name):
+        if name in self.rel_fields:
+            return self.fields[self.rel_fields[name]]
+    
+    def get_related_field_for_model(self, model, name=None):
+        for field in self.fields.values():
+            if isinstance(field, ForeignKeyField) and field.to == model:
+                if name is None or name == field.name or name == field.descriptor:
+                    return field
+    
+    def get_reverse_related_field_for_model(self, model, name=None):
+        for field in model._meta.fields.values():
+            if isinstance(field, ForeignKeyField) and field.to == self.model_class:
+                if name is None or name == field.name or name == field.descriptor:
+                    return field
+    
+    def rel_exists(self, model):
+        return self.get_related_field_for_model(model) or \
+               self.get_reverse_related_field_for_model(model)
+
+
+class BaseModel(type):
+    inheritable_options = ['database']
+    
+    def __new__(cls, name, bases, attrs):
+        cls = super(BaseModel, cls).__new__(cls, name, bases, attrs)
+
+        attr_dict = {}
+        meta = attrs.pop('Meta', None)
+        if meta:
+            attr_dict = meta.__dict__
+        
+        for b in bases:
+            base_meta = getattr(b, '_meta', None)
+            if not base_meta:
+                continue
+            
+            for (k, v) in base_meta.__dict__.items():
+                if k in cls.inheritable_options and k not in attr_dict:
+                    attr_dict[k] = v
+        
+        _meta = BaseModelOptions(cls, attr_dict)
+        
+        if not hasattr(_meta, 'db_table'):
+            _meta.db_table = re.sub('[^a-z]+', '_', cls.__name__.lower())
+
+        setattr(cls, '_meta', _meta)
+        
+        _meta.pk_name = None
+
+        for name, attr in cls.__dict__.items():
+            if isinstance(attr, Field):
+                attr.add_to_class(cls, name)
+                _meta.fields[attr.name] = attr
+                if isinstance(attr, PrimaryKeyField):
+                    _meta.pk_name = attr.name
+        
+        if _meta.pk_name is None:
+            _meta.pk_name = 'id'
+            pk = PrimaryKeyField()
+            pk.add_to_class(cls, _meta.pk_name)
+            _meta.fields[_meta.pk_name] = pk
+
+        _meta.model_name = cls.__name__
+                
+        if hasattr(cls, '__unicode__'):
+            setattr(cls, '__repr__', lambda self: '<%s: %s>' % (
+                _meta.model_name, self.__unicode__()))
+
+        exception_class = type('%sDoesNotExist' % _meta.model_name, (DoesNotExist,), {})
+        cls.DoesNotExist = exception_class
+        
+        return cls
+
+
+class Model(object):
+    __metaclass__ = BaseModel
+    
+    def __init__(self, *args, **kwargs):
+        for k, v in kwargs.items():
+            setattr(self, k, v)
+    
+    def __eq__(self, other):
+        return other.__class__ == self.__class__ and \
+               self.get_pk() and \
+               other.get_pk() == self.get_pk()
+    
+    def get_field_dict(self):
+        def get_field_val(field):
+            field_value = getattr(self, field.name)
+            if not self.get_pk() and field_value is None and field.default is not None:
+                if callable(field.default):
+                    field_value = field.default()
+                else:
+                    field_value = field.default
+                setattr(self, field.name, field_value)
+            return (field.name, field_value)
+        
+        pairs = map(get_field_val, self._meta.fields.values())
+        return dict(pairs)
+    
+    @classmethod
+    def create_table(cls):
+        cls._meta.database.create_table(cls)
+        
+        for field_name, field_obj in cls._meta.fields.items():
+            if isinstance(field_obj, PrimaryKeyField):
+                cls._meta.database.create_index(cls, field_obj.name, True)
+            elif isinstance(field_obj, ForeignKeyField):
+                cls._meta.database.create_index(cls, field_obj.name)
+            elif field_obj.db_index:
+                cls._meta.database.create_index(cls, field_obj.name)
+    
+    @classmethod
+    def drop_table(cls, fail_silently=False):
+        cls._meta.database.drop_table(cls, fail_silently)
+    
+    @classmethod
+    def select(cls, query=None):
+        return SelectQuery(cls, query)
+    
+    @classmethod
+    def update(cls, **query):
+        return UpdateQuery(cls, **query)
+    
+    @classmethod
+    def insert(cls, **query):
+        return InsertQuery(cls, **query)
+    
+    @classmethod
+    def delete(cls, **query):
+        return DeleteQuery(cls, **query)
+    
+    @classmethod
+    def raw(cls, sql, *params):
+        return RawQuery(cls, sql, *params)
+
+    @classmethod
+    def create(cls, **query):
+        inst = cls(**query)
+        inst.save()
+        return inst
+
+    @classmethod
+    def get_or_create(cls, **query):
+        try:
+            inst = cls.get(**query)
+        except cls.DoesNotExist:
+            inst = cls.create(**query)
+        return inst
+    
+    @classmethod            
+    def get(cls, *args, **kwargs):
+        query = cls.select().where(*args, **kwargs).paginate(1, 1)
+        try:
+            return query.execute().next()
+        except StopIteration:
+            raise cls.DoesNotExist('instance matching query does not exist:\nSQL: %s\nPARAMS: %s' % (
+                query.sql()
+            ))
+    
+    def get_pk(self):
+        return getattr(self, self._meta.pk_name, None)
+    
+    def save(self):
+        field_dict = self.get_field_dict()
+        field_dict.pop(self._meta.pk_name)
+        if self.get_pk():
+            update = self.update(
+                **field_dict
+            ).where(**{self._meta.pk_name: self.get_pk()})
+            update.execute()
+        else:
+            insert = self.insert(**field_dict)
+            new_pk = insert.execute()
+            setattr(self, self._meta.pk_name, new_pk)


http://bitbucket.org/yt_analysis/yt/changeset/afb375fb513c/
changeset:   afb375fb513c
branch:      yt
user:        samskillman
date:        2011-09-08 01:21:17
summary:     Some work on the gdf data format frontend.
affected #:  6 files (-1 bytes)

--- a/yt/frontends/gdf/api.py	Sat Sep 03 11:48:19 2011 -0600
+++ b/yt/frontends/gdf/api.py	Wed Sep 07 17:21:17 2011 -0600
@@ -29,14 +29,15 @@
 """
 
 from .data_structures import \
-      ChomboGrid, \
-      ChomboHierarchy, \
-      ChomboStaticOutput
+      GDFGrid, \
+      GDFHierarchy, \
+      GDFStaticOutput
 
 from .fields import \
-      ChomboFieldContainer, \
-      ChomboFieldInfo, \
-      add_chombo_field
+      GDFFieldContainer, \
+      GDFFieldInfo, \
+      add_gdf_field
 
 from .io import \
-      IOHandlerChomboHDF5
+      IOHandlerGDFHDF5
+


--- a/yt/frontends/gdf/data_structures.py	Sat Sep 03 11:48:19 2011 -0600
+++ b/yt/frontends/gdf/data_structures.py	Wed Sep 07 17:21:17 2011 -0600
@@ -24,6 +24,9 @@
   along with this program.  If not, see <http://www.gnu.org/licenses/>.
 """
 
+import h5py
+import numpy as na
+import weakref
 from yt.funcs import *
 from yt.data_objects.grid_patch import \
            AMRGridPatch
@@ -33,6 +36,7 @@
            StaticOutput
 
 from .fields import GDFFieldContainer
+import pdb
 
 class GDFGrid(AMRGridPatch):
     _id_offset = 0
@@ -58,6 +62,7 @@
             self.dds = na.array((RE-LE)/self.ActiveDimensions)
         if self.pf.dimensionality < 2: self.dds[1] = 1.0
         if self.pf.dimensionality < 3: self.dds[2] = 1.0
+        # pdb.set_trace()
         self.data['dx'], self.data['dy'], self.data['dz'] = self.dds
 
 class GDFHierarchy(AMRHierarchy):
@@ -66,6 +71,7 @@
     
     def __init__(self, pf, data_style='grid_data_format'):
         self.parameter_file = weakref.proxy(pf)
+        self.data_style = data_style
         # for now, the hierarchy file is the parameter file!
         self.hierarchy_filename = self.parameter_file.parameter_filename
         self.directory = os.path.dirname(self.hierarchy_filename)
@@ -78,8 +84,7 @@
         pass
 
     def _detect_fields(self):
-        ncomp = int(self._fhandle['/'].attrs['num_components'])
-        self.field_list = [c[1] for c in self._fhandle['/'].attrs.listitems()[-ncomp:]]
+        self.field_list = self._fhandle['field_types'].keys()
     
     def _setup_classes(self):
         dd = self._get_data_reader_dict()
@@ -87,9 +92,7 @@
         self.object_types.sort()
 
     def _count_grids(self):
-        self.num_grids = 0
-        for lev in self._levels:
-            self.num_grids += self._fhandle[lev]['Processors'].len()
+        self.num_grids = self._fhandle['/grid_parent_id'].shape[0]
         
     def _parse_hierarchy(self):
         f = self._fhandle # shortcut
@@ -98,24 +101,22 @@
         # 'Chombo_global'
         levels = f.listnames()[1:]
         self.grids = []
-        i = 0
-        for lev in levels:
-            level_number = int(re.match('level_(\d+)',lev).groups()[0])
-            boxes = f[lev]['boxes'].value
-            dx = f[lev].attrs['dx']
-            for level_id, box in enumerate(boxes):
-                si = na.array([box['lo_%s' % ax] for ax in 'ijk'])
-                ei = na.array([box['hi_%s' % ax] for ax in 'ijk'])
-                pg = self.grid(len(self.grids),self,level=level_number,
-                               start = si, stop = ei)
-                self.grids.append(pg)
-                self.grids[-1]._level_id = level_id
-                self.grid_left_edge[i] = dx*si.astype(self.float_type)
-                self.grid_right_edge[i] = dx*(ei.astype(self.float_type) + 1)
-                self.grid_particle_count[i] = 0
-                self.grid_dimensions[i] = ei - si + 1
-                i += 1
+        for i, grid in enumerate(f['data'].keys()):
+            self.grids.append(self.grid(i, self, f['grid_level'][i],
+                                        f['grid_left_index'][i],
+                                        f['grid_dimensions'][i]))
+            self.grids[-1]._level_id = f['grid_level'][i]
+
+        dx = (self.parameter_file.domain_right_edge-
+              self.parameter_file.domain_left_edge)/self.parameter_file.domain_dimensions
+        dx = dx/self.parameter_file.refine_by**(f['grid_level'][:])
+
+        self.grid_left_edge = self.parameter_file.domain_left_edge + dx*f['grid_left_index'][:]
+        self.grid_dimensions = f['grid_dimensions'][:]
+        self.grid_right_edge = self.grid_left_edge + dx*self.grid_dimensions
+        self.grid_particle_count = f['grid_particle_count'][:]
         self.grids = na.array(self.grids, dtype='object')
+        # pdb.set_trace()
 
     def _populate_grid_objects(self):
         for g in self.grids:
@@ -147,11 +148,9 @@
     def __init__(self, filename, data_style='grid_data_format',
                  storage_filename = None):
         StaticOutput.__init__(self, filename, data_style)
-        self._handle = h5py.File(self.filename, "r")
         self.storage_filename = storage_filename
+        self.filename = filename
         self.field_info = self._fieldinfo_class()
-        self._handle.close()
-        del self._handle
         
     def _set_units(self):
         """
@@ -168,16 +167,20 @@
         self.time_units['years'] = seconds / (365*3600*24.0)
         self.time_units['days']  = seconds / (3600*24.0)
         # This should be improved.
+        self._handle = h5py.File(self.parameter_filename, "r")
         for field_name in self._handle["/field_types"]:
-            self.units[field_name] = self._handle["/%s/field_to_cgs" % field_name]
-
+            self.units[field_name] = self._handle["/field_types/%s" % field_name].attrs['field_to_cgs']
+        del self._handle
+        
     def _parse_parameter_file(self):
+        self._handle = h5py.File(self.parameter_filename, "r")
         sp = self._handle["/simulation_parameters"].attrs
         self.domain_left_edge = sp["domain_left_edge"][:]
         self.domain_right_edge = sp["domain_right_edge"][:]
-        self.refine_by = sp["refine_by"][:]
-        self.dimensionality = sp["dimensionality"][:]
-        self.current_time = sp["current_time"][:]
+        self.domain_dimensions = sp["domain_dimensions"][:]
+        self.refine_by = sp["refine_by"]
+        self.dimensionality = sp["dimensionality"]
+        self.current_time = sp["current_time"]
         self.unique_identifier = sp["unique_identifier"]
         self.cosmological_simulation = sp["cosmological_simulation"]
         if sp["num_ghost_zones"] != 0: raise RuntimeError
@@ -191,7 +194,8 @@
         else:
             self.current_redshift = self.omega_lambda = self.omega_matter = \
                 self.hubble_constant = self.cosmological_simulation = 0.0
-        
+        del self._handle
+            
     @classmethod
     def _is_valid(self, *args, **kwargs):
         try:


--- a/yt/frontends/gdf/fields.py	Sat Sep 03 11:48:19 2011 -0600
+++ b/yt/frontends/gdf/fields.py	Wed Sep 07 17:21:17 2011 -0600
@@ -1,5 +1,5 @@
 """
-Chombo-specific fields
+GDF-specific fields
 
 Author: J. S. Oishi <jsoishi at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
@@ -32,82 +32,45 @@
     ValidateGridType
 import yt.data_objects.universal_fields
 
-class ChomboFieldContainer(CodeFieldInfoContainer):
+class GDFFieldContainer(CodeFieldInfoContainer):
     _shared_state = {}
     _field_list = {}
-ChomboFieldInfo = ChomboFieldContainer()
-add_chombo_field = ChomboFieldInfo.add_field
+GDFFieldInfo = GDFFieldContainer()
+add_gdf_field = GDFFieldInfo.add_field
 
-add_field = add_chombo_field
+add_field = add_gdf_field
 
 add_field("density", function=lambda a,b: None, take_log=True,
           validators = [ValidateDataField("density")],
           units=r"\rm{g}/\rm{cm}^3")
 
-ChomboFieldInfo["density"]._projected_units =r"\rm{g}/\rm{cm}^2"
+GDFFieldInfo["density"]._projected_units =r"\rm{g}/\rm{cm}^2"
 
-add_field("X-momentum", function=lambda a,b: None, take_log=False,
-          validators = [ValidateDataField("X-Momentum")],
-          units=r"",display_name=r"B_x")
-ChomboFieldInfo["X-momentum"]._projected_units=r""
+add_field("specific_energy", function=lambda a,b: None, take_log=True,
+          validators = [ValidateDataField("specific_energy")],
+          units=r"\rm{erg}/\rm{g}")
 
-add_field("Y-momentum", function=lambda a,b: None, take_log=False,
-          validators = [ValidateDataField("Y-Momentum")],
-          units=r"",display_name=r"B_y")
-ChomboFieldInfo["Y-momentum"]._projected_units=r""
+add_field("velocity_x", function=lambda a,b: None, take_log=True,
+          validators = [ValidateDataField("velocity_x")],
+          units=r"\rm{cm}/\rm{s}")
 
-add_field("Z-momentum", function=lambda a,b: None, take_log=False,
-          validators = [ValidateDataField("Z-Momentum")],
-          units=r"",display_name=r"B_z")
-ChomboFieldInfo["Z-momentum"]._projected_units=r""
+add_field("velocity_y", function=lambda a,b: None, take_log=True,
+          validators = [ValidateDataField("velocity_y")],
+          units=r"\rm{cm}/\rm{s}")
 
-add_field("X-magnfield", function=lambda a,b: None, take_log=False,
-          validators = [ValidateDataField("X-Magnfield")],
-          units=r"",display_name=r"B_x")
-ChomboFieldInfo["X-magnfield"]._projected_units=r""
+add_field("velocity_z", function=lambda a,b: None, take_log=True,
+          validators = [ValidateDataField("velocity_z")],
+          units=r"\rm{cm}/\rm{s}")
 
-add_field("Y-magnfield", function=lambda a,b: None, take_log=False,
-          validators = [ValidateDataField("Y-Magnfield")],
-          units=r"",display_name=r"B_y")
-ChomboFieldInfo["Y-magnfield"]._projected_units=r""
+add_field("mag_field_x", function=lambda a,b: None, take_log=True,
+          validators = [ValidateDataField("mag_field_x")],
+          units=r"\rm{cm}/\rm{s}")
 
-add_field("Z-magnfield", function=lambda a,b: None, take_log=False,
-          validators = [ValidateDataField("Z-Magnfield")],
-          units=r"",display_name=r"B_z")
-ChomboFieldInfo["Z-magnfield"]._projected_units=r""
+add_field("mag_field_y", function=lambda a,b: None, take_log=True,
+          validators = [ValidateDataField("mag_field_y")],
+          units=r"\rm{cm}/\rm{s}")
 
-def _MagneticEnergy(field,data):
-    return (data["X-magnfield"]**2 +
-            data["Y-magnfield"]**2 +
-            data["Z-magnfield"]**2)/2.
-add_field("MagneticEnergy", function=_MagneticEnergy, take_log=True,
-          units=r"",display_name=r"B^2/8\pi")
-ChomboFieldInfo["MagneticEnergy"]._projected_units=r""
-
-def _xVelocity(field, data):
-    """generate x-velocity from x-momentum and density
-
-    """
-    return data["X-momentum"]/data["density"]
-add_field("x-velocity",function=_xVelocity, take_log=False,
-          units=r'\rm{cm}/\rm{s}')
-
-def _yVelocity(field,data):
-    """generate y-velocity from y-momentum and density
-
-    """
-    #try:
-    #    return data["xvel"]
-    #except KeyError:
-    return data["Y-momentum"]/data["density"]
-add_field("y-velocity",function=_yVelocity, take_log=False,
-          units=r'\rm{cm}/\rm{s}')
-
-def _zVelocity(field,data):
-    """generate z-velocity from z-momentum and density
-
-    """
-    return data["Z-momentum"]/data["density"]
-add_field("z-velocity",function=_zVelocity, take_log=False,
-          units=r'\rm{cm}/\rm{s}')
+add_field("mag_field_z", function=lambda a,b: None, take_log=True,
+          validators = [ValidateDataField("mag_field_z")],
+          units=r"\rm{cm}/\rm{s}")
     


--- a/yt/frontends/gdf/io.py	Sat Sep 03 11:48:19 2011 -0600
+++ b/yt/frontends/gdf/io.py	Wed Sep 07 17:21:17 2011 -0600
@@ -25,44 +25,48 @@
 """
 from yt.utilities.io_handler import \
            BaseIOHandler
+import h5py
 
-class IOHandlerChomboHDF5(BaseIOHandler):
-    _data_style = "chombo_hdf5"
+class IOHandlerGDFHDF5(BaseIOHandler):
+    _data_style = "grid_data_format"
     _offset_string = 'data:offsets=0'
     _data_string = 'data:datatype=0'
 
     def _field_dict(self,fhandle):
-        ncomp = int(fhandle['/'].attrs['num_components'])
-        temp =  fhandle['/'].attrs.listitems()[-ncomp:]
-        val, keys = zip(*temp)
-        val = [int(re.match('component_(\d+)',v).groups()[0]) for v in val]
+        keys = fhandle['field_types'].keys()
+        val = fhandle['field_types'].keys()
+        # ncomp = int(fhandle['/'].attrs['num_components'])
+        # temp =  fhandle['/'].attrs.listitems()[-ncomp:]
+        # val, keys = zip(*temp)
+        # val = [int(re.match('component_(\d+)',v).groups()[0]) for v in val]
         return dict(zip(keys,val))
         
     def _read_field_names(self,grid):
         fhandle = h5py.File(grid.filename,'r')
-        ncomp = int(fhandle['/'].attrs['num_components'])
-
-        return [c[1] for c in f['/'].attrs.listitems()[-ncomp:]]
+        return fhandle['field_types'].keys()
     
     def _read_data_set(self,grid,field):
         fhandle = h5py.File(grid.hierarchy.hierarchy_filename,'r')
+        return fhandle['/data/grid_%010i/'%grid.id+field][:]
+        # field_dict = self._field_dict(fhandle)
+        # lstring = 'level_%i' % grid.Level
+        # lev = fhandle[lstring]
+        # dims = grid.ActiveDimensions
+        # boxsize = dims.prod()
+        
+        # grid_offset = lev[self._offset_string][grid._level_id]
+        # start = grid_offset+field_dict[field]*boxsize
+        # stop = start + boxsize
+        # data = lev[self._data_string][start:stop]
 
-        field_dict = self._field_dict(fhandle)
-        lstring = 'level_%i' % grid.Level
-        lev = fhandle[lstring]
-        dims = grid.ActiveDimensions
-        boxsize = dims.prod()
-        
-        grid_offset = lev[self._offset_string][grid._level_id]
-        start = grid_offset+field_dict[field]*boxsize
-        stop = start + boxsize
-        data = lev[self._data_string][start:stop]
-
-        return data.reshape(dims, order='F')
+        # return data.reshape(dims, order='F')
                                           
 
     def _read_data_slice(self, grid, field, axis, coord):
         sl = [slice(None), slice(None), slice(None)]
         sl[axis] = slice(coord, coord + 1)
-        return self._read_data_set(grid,field)[sl]
+        fhandle = h5py.File(grid.hierarchy.hierarchy_filename,'r')
+        return fhandle['/data/grid_%010i/'%grid.id+field][:][sl]
 
+    # return self._read_data_set(grid,field)[sl]
+


--- a/yt/frontends/setup.py	Sat Sep 03 11:48:19 2011 -0600
+++ b/yt/frontends/setup.py	Wed Sep 07 17:21:17 2011 -0600
@@ -6,6 +6,7 @@
     config = Configuration('frontends',parent_package,top_path)
     config.make_config_py() # installs __config__.py
     #config.make_svn_version_py()
+    config.add_subpackage("gdf")
     config.add_subpackage("chombo")
     config.add_subpackage("enzo")
     config.add_subpackage("flash")


--- a/yt/mods.py	Sat Sep 03 11:48:19 2011 -0600
+++ b/yt/mods.py	Wed Sep 07 17:21:17 2011 -0600
@@ -77,6 +77,9 @@
 from yt.frontends.chombo.api import \
     ChomboStaticOutput, ChomboFieldInfo, add_chombo_field
 
+from yt.frontends.gdf.api import \
+    GDFStaticOutput, GDFFieldInfo, add_gdf_field
+
 from yt.frontends.art.api import \
     ARTStaticOutput, ARTFieldInfo, add_art_field
 


http://bitbucket.org/yt_analysis/yt/changeset/431d39a054f7/
changeset:   431d39a054f7
branch:      yt
user:        samskillman
date:        2011-09-08 01:21:50
summary:     merging.
affected #:  1 file (-1 bytes)

--- a/yt/visualization/eps_writer.py	Wed Sep 07 17:21:17 2011 -0600
+++ b/yt/visualization/eps_writer.py	Wed Sep 07 17:21:50 2011 -0600
@@ -719,7 +719,7 @@
 def multiplot(ncol, nrow, yt_plots=None, images=None, xranges=None,
               yranges=None, xlabels=None, ylabels=None, colorbars=None,
               shrink_cb=0.95, figsize=(8,8), margins=(0,0), titles=None,
-              savefig=None, yt_nocbar=False, bare_axes=False,
+              savefig=None, format="eps", yt_nocbar=False, bare_axes=False,
               cb_flags=None):
     r"""Convenience routine to create a multi-panel figure from yt plots or
     JPEGs.  The images are first placed from the origin, and then
@@ -756,6 +756,8 @@
         Titles that are placed in textboxes in each panel.
     savefig : string
         Name of the saved file without the extension.
+    format : string
+        File format of the figure. eps or pdf accepted.
     yt_nocbar : boolean
         Flag to indicate whether or not colorbars are created.
     bare_axes : boolean
@@ -908,7 +910,7 @@
                                        shrink=shrink_cb)
 
     if savefig != None:
-        d.save_fig(savefig)
+        d.save_fig(savefig, format=format)
 
     return d
 


http://bitbucket.org/yt_analysis/yt/changeset/5eede6d86687/
changeset:   5eede6d86687
branch:      yt
user:        MatthewTurk
date:        2011-09-09 13:27:11
summary:     Fixing "unitary" units and __repr__ for GDF frontend.
affected #:  1 file (-1 bytes)

--- a/yt/frontends/gdf/data_structures.py	Wed Sep 07 17:21:50 2011 -0600
+++ b/yt/frontends/gdf/data_structures.py	Fri Sep 09 07:27:11 2011 -0400
@@ -162,7 +162,7 @@
             self._parse_parameter_file()
         self.time_units['1'] = 1
         self.units['1'] = 1.0
-        self.units['unitary'] = 1.0 / (self.domain_right_edge - self.domain_right_edge).max()
+        self.units['unitary'] = 1.0 / (self.domain_right_edge - self.domain_left_edge).max()
         seconds = 1
         self.time_units['years'] = seconds / (365*3600*24.0)
         self.time_units['days']  = seconds / (3600*24.0)
@@ -206,4 +206,6 @@
             pass
         return False
 
-
+    def __repr__(self):
+        return self.basename.rsplit(".", 1)[0]
+        


http://bitbucket.org/yt_analysis/yt/changeset/78449a855131/
changeset:   78449a855131
branch:      deliberate_fields
user:        MatthewTurk
date:        2011-09-09 16:33:52
summary:     Merging into deliberate_fields the last couple weeks of development
affected #:  244 files (-1 bytes)

--- a/MANIFEST.in	Sat Aug 13 17:51:17 2011 -0600
+++ b/MANIFEST.in	Fri Sep 09 10:33:52 2011 -0400
@@ -1,1 +1,3 @@
+include distribute_setup.py
 recursive-include yt/gui/reason/html/ *.html *.png *.ico *.js
+recursive-include yt/ *.pyx *.pxd *.hh *.h README* 


--- a/README	Sat Aug 13 17:51:17 2011 -0600
+++ b/README	Fri Sep 09 10:33:52 2011 -0400
@@ -5,8 +5,8 @@
 
 Full documentation and a user community can be found at:
 
-http://yt.enzotools.org/
-http://yt.enzotools.org/doc/
+http://yt-project.org/
+http://yt-project.org/doc/
 
 If you have used Python before, and are comfortable with installing packages,
 you should find the setup.py script fairly straightforward: simply execute
@@ -20,6 +20,6 @@
 interested in helping out, see the developer documentation in
 doc/how_to_develop_yt.txt or at:
 
-http://yt.enzotools.org/doc/advanced/developing.html
+http://yt-project.org/doc/advanced/developing.html
 
 Enjoy!


--- a/distribute_setup.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/distribute_setup.py	Fri Sep 09 10:33:52 2011 -0400
@@ -46,7 +46,7 @@
             args = [quote(arg) for arg in args]
         return os.spawnl(os.P_WAIT, sys.executable, *args) == 0
 
-DEFAULT_VERSION = "0.6.14"
+DEFAULT_VERSION = "0.6.21"
 DEFAULT_URL = "http://pypi.python.org/packages/source/d/distribute/"
 SETUPTOOLS_FAKED_VERSION = "0.6c11"
 


--- a/doc/README	Sat Aug 13 17:51:17 2011 -0600
+++ b/doc/README	Fri Sep 09 10:33:52 2011 -0400
@@ -1,7 +1,7 @@
 This directory contains the compiled yt documentation.  Development of the
 documentation happens in a mercurial repository, yt-doc, available at:
 
-http://hg.enzotools.org/yt-doc/
+http://hg.yt-project.org/yt-doc/
 
 It's written to be used with Sphinx, a tool designed for writing Python
 documentation.  Sphinx is available at this URL:
@@ -11,11 +11,11 @@
 All of the pre-built HTML files, accessible with any web browser, are available
 in the build/ directory, as well as at:
 
-http://yt.enzotools.org/doc/index.html
+http://yt-project.org/doc/index.html
 
 You can also download a copy of the documentation and unzip it right here:
 
-wget http://yt.enzotools.org/doc/download.zip
+wget http://yt-project.org/doc/download.zip
 unzip download.zip
 
 Then open index.html with your favorite web browser, and be off!


--- a/doc/how_to_develop_yt.txt	Sat Aug 13 17:51:17 2011 -0600
+++ b/doc/how_to_develop_yt.txt	Fri Sep 09 10:33:52 2011 -0400
@@ -31,10 +31,10 @@
 --------------------------
 
 yt is hosted on BitBucket, and you can see all of the yt repositories at
-http://hg.enzotools.org/ .  With the yt installation script you should have a
+http://hg.yt-project.org/ .  With the yt installation script you should have a
 copy of Mercurial.  You can clone the repository like so:
 
-   $ hg clone http://hg.enzotools.org/yt/
+   $ hg clone http://hg.yt-project.org/yt/
 
 You can update to any branch or revision by executing the command:
 
@@ -54,7 +54,7 @@
 You can submit changes a couple different ways, but the easiest is to use the
 "fork" mechanism on BitBucket.  Just go here:
 
-http://hg.enzotools.org/yt/fork
+http://hg.yt-project.org/yt/fork
 
 and you're all set, ready to go.  You'll have to either clone a new copy of the
 repository or edit .hg/hgrc to point to the location of your new fork, first,
@@ -62,7 +62,7 @@
 
 When you're ready to submit them to the main repository, simply go to:
 
-http://hg.enzotools.org/yt/fork
+http://hg.yt-project.org/yt/fork
 
 Make sure you notify "yt_analysis" and put in a little description.  That'll
 notify the core developers that you've got something ready to submit, and we
@@ -131,7 +131,7 @@
 The next commit and all subsequent commits will be contained within that named
 branch.  At this point, add your branch here:
 
-http://yt.enzotools.org/wiki/ExistingBranches
+http://yt-project.org/wiki/ExistingBranches
 
 To merge changes in from another branch, you would execute:
 


--- a/doc/install_script.sh	Sat Aug 13 17:51:17 2011 -0600
+++ b/doc/install_script.sh	Fri Sep 09 10:33:52 2011 -0400
@@ -269,10 +269,10 @@
 
 function get_enzotools
 {
-    echo "Downloading $1 from yt.enzotools.org"
+    echo "Downloading $1 from yt-project.org"
     [ -e $1 ] && return
-    wget -nv "http://yt.enzotools.org/dependencies/$1" || do_exit
-    wget -nv "http://yt.enzotools.org/dependencies/$1.md5" || do_exit
+    wget -nv "http://yt-project.org/dependencies/$1" || do_exit
+    wget -nv "http://yt-project.org/dependencies/$1.md5" || do_exit
     ( which md5sum &> /dev/null ) || return # return if we don't have md5sum
     ( md5sum -c $1.md5 2>&1 ) 1>> ${LOG_FILE} || do_exit
 }
@@ -306,7 +306,7 @@
 get_enzotools mercurial-1.8.1.tar.gz
 get_enzotools ipython-0.10.tar.gz
 get_enzotools h5py-1.3.1.tar.gz
-get_enzotools Cython-0.14.tar.gz
+get_enzotools Cython-0.15.tar.gz
 get_enzotools Forthon-0.8.4.tar.gz
 get_enzotools ext-3.3.2.zip
 get_enzotools ext-slate-110328.zip
@@ -465,11 +465,11 @@
     elif [ ! -e yt-hg ] 
     then
         YT_DIR="$PWD/yt-hg/"
-        ( ${HG_EXEC} --debug clone http://hg.enzotools.org/yt-supplemental/ 2>&1 ) 1>> ${LOG_FILE}
+        ( ${HG_EXEC} --debug clone http://hg.yt-project.org/yt-supplemental/ 2>&1 ) 1>> ${LOG_FILE}
         # Recently the hg server has had some issues with timeouts.  In lieu of
         # a new webserver, we are now moving to a three-stage process.
         # First we clone the repo, but only up to r0.
-        ( ${HG_EXEC} --debug clone http://hg.enzotools.org/yt/ ./yt-hg 2>&1 ) 1>> ${LOG_FILE}
+        ( ${HG_EXEC} --debug clone http://hg.yt-project.org/yt/ ./yt-hg 2>&1 ) 1>> ${LOG_FILE}
         # Now we update to the branch we're interested in.
         ( ${HG_EXEC} -R ${YT_DIR} up -C ${BRANCH} 2>&1 ) 1>> ${LOG_FILE}
     elif [ -e yt-hg ] 
@@ -522,7 +522,7 @@
 [ -n "${OLD_CFLAGS}" ] && export CFLAGS=${OLD_CFLAGS}
 do_setup_py ipython-0.10
 do_setup_py h5py-1.3.1
-do_setup_py Cython-0.14
+do_setup_py Cython-0.15
 [ $INST_FORTHON -eq 1 ] && do_setup_py Forthon-0.8.4
 
 echo "Doing yt update, wiping local changes and updating to branch ${BRANCH}"
@@ -652,7 +652,7 @@
     echo
     echo "For support, see the website and join the mailing list:"
     echo
-    echo "    http://yt.enzotools.org/"
+    echo "    http://yt-project.org/"
     echo "    http://lists.spacepope.org/listinfo.cgi/yt-users-spacepope.org"
     echo
     echo "========================================================================"


--- a/scripts/yt_lodgeit.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/scripts/yt_lodgeit.py	Fri Sep 09 10:33:52 2011 -0400
@@ -4,8 +4,8 @@
     LodgeIt!
     ~~~~~~~~
 
-    A script that pastes stuff into the enzotools pastebin on
-    paste.enztools.org.
+    A script that pastes stuff into the yt-project pastebin on
+    paste.yt-project.org.
 
     Modified (very, very slightly) from the original script by the authors
     below.
@@ -33,7 +33,7 @@
 
 SCRIPT_NAME = os.path.basename(sys.argv[0])
 VERSION = '0.3'
-SERVICE_URL = 'http://paste.enzotools.org/'
+SERVICE_URL = 'http://paste.yt-project.org/'
 SETTING_KEYS = ['author', 'title', 'language', 'private', 'clipboard',
                 'open_browser']
 


--- a/setup.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/setup.py	Fri Sep 09 10:33:52 2011 -0400
@@ -25,7 +25,7 @@
     print "Now attempting to install Cython"
     import pip
     rv = pip.main(["install",
-              "http://yt.enzotools.org/dependencies/Cython-latest.tar.gz"])
+              "http://yt-project.org/dependencies/Cython-latest.tar.gz"])
     if rv == 1:
         print "Unable to install Cython.  Please report this bug to yt-users."
         sys.exit(1)
@@ -81,7 +81,7 @@
 
 import setuptools
 
-VERSION = "2.2dev"
+VERSION = "2.3dev"
 
 if os.path.exists('MANIFEST'): os.remove('MANIFEST')
 
@@ -129,7 +129,7 @@
                        ]},
         author="Matthew J. Turk",
         author_email="matthewturk at gmail.com",
-        url = "http://yt.enzotools.org/",
+        url = "http://yt-project.org/",
         license="GPL-3",
         configuration=configuration,
         zip_safe=False,


--- a/tests/object_field_values.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/tests/object_field_values.py	Fri Sep 09 10:33:52 2011 -0400
@@ -9,6 +9,33 @@
 class FieldHashesDontMatch(RegressionTestException):
     pass
 
+known_objects = {}
+
+def register_object(func):
+    known_objects[func.func_name] = func
+    return func
+
+ at register_object
+def centered_sphere(self):
+    center = 0.5*(self.pf.domain_right_edge + self.pf.domain_left_edge)
+    width = (self.pf.domain_right_edge - self.pf.domain_left_edge).max()
+    self.data_object = self.pf.h.sphere(center, width/0.25)
+
+ at register_object
+def off_centered_sphere(self):
+    center = 0.5*(self.pf.domain_right_edge + self.pf.domain_left_edge)
+    width = (self.pf.domain_right_edge - self.pf.domain_left_edge).max()
+    self.data_object = self.pf.h.sphere(center - 0.25 * width, width/0.25)
+
+ at register_object
+def corner_sphere(self):
+    width = (self.pf.domain_right_edge - self.pf.domain_left_edge).max()
+    self.data_object = self.pf.h.sphere(self.pf.domain_left_edge, width/0.25)
+
+ at register_object
+def all_data(self):
+    self.data_object = self.pf.h.all_data()
+
 class YTFieldValuesTest(YTStaticOutputTest):
     def run(self):
         vals = self.data_object[self.field].copy()
@@ -18,42 +45,12 @@
     def compare(self, old_result):
         if self.result != old_result: raise FieldHashesDontMatch
 
-class CenteredSphere(YTFieldValuesTest):
+    def setup(self):
+        YTStaticOutputTest.setup(self)
+        known_objects[self.object_name](self)
 
-    def setup(self):
-        YTFieldValuesTest.setup(self)
-        center = 0.5*(self.pf.domain_right_edge + self.pf.domain_left_edge)
-        width = (self.pf.domain_right_edge - self.pf.domain_left_edge).max()
-        self.data_object = self.pf.h.sphere(center, width/0.25)
+for object_name in known_objects:
+    for field in field_list + particle_field_list:
+        create_test(YTFieldValuesTest, "%s_%s" % (object_name, field),
+                    field = field, object_name = object_name)
 
-for field in field_list + particle_field_list:
-    create_test(CenteredSphere, "centered_sphere_%s" % (field), field = field)
-
-class OffCenteredSphere(YTFieldValuesTest):
-
-    def setup(self):
-        YTFieldValuesTest.setup(self)
-        center = 0.5*(self.pf.domain_right_edge + self.pf.domain_left_edge)
-        width = (self.pf.domain_right_edge - self.pf.domain_left_edge).max()
-        self.data_object = self.pf.h.sphere(center - 0.25 * width, width/0.25)
-
-for field in field_list + particle_field_list:
-    create_test(OffCenteredSphere, "off_centered_sphere_%s" % (field), field = field)
-
-class CornerSphere(YTFieldValuesTest):
-
-    def setup(self):
-        YTFieldValuesTest.setup(self)
-        width = (self.pf.domain_right_edge - self.pf.domain_left_edge).max()
-        self.data_object = self.pf.h.sphere(self.pf.domain_left_edge, width/0.25)
-
-for field in field_list + particle_field_list:
-    create_test(CornerSphere, "corner_sphere_%s" % (field), field = field)
-
-class AllData(YTFieldValuesTest):
-    def setup(self):
-        YTFieldValuesTest.setup(self)
-        self.data_object = self.pf.h.all_data()
-
-for field in field_list + particle_field_list:
-    create_test(AllData, "all_data_%s" % (field), field = field)


--- a/tests/projections.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/tests/projections.py	Fri Sep 09 10:33:52 2011 -0400
@@ -8,9 +8,9 @@
     for field in field_list:
         create_test(TestProjection, "projection_test_%s_%s" % (axis, field),
                     field = field, axis = axis)
+        create_test(TestProjection, "projection_test_%s_%s_Density" % (axis, field),
+                    field = field, axis = axis, weight_field = "Density")
 
 for field in field_list:
     create_test(TestGasDistribution, "profile_density_test_%s" % field,
                 field_x = "Density", field_y = field)
-
-


--- a/yt/__init__.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/__init__.py	Fri Sep 09 10:33:52 2011 -0400
@@ -6,8 +6,8 @@
 wide-variety of methods.
 
 An ever-growing collection of documentation is also available at
-http://yt.enzotools.org/doc/ . Additionally, there is a
-project site at http://yt.enzotools.org/ with recipes, a wiki, a variety of
+http://yt-project.org/doc/ . Additionally, there is a
+project site at http://yt-project.org/ with recipes, a wiki, a variety of
 ways of peering into the version control, and a bug-reporting system.
 
 YT is divided into several packages.
@@ -62,7 +62,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2007-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/analysis_modules/absorption_spectrum/__init__.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/analysis_modules/absorption_spectrum/__init__.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Britton Smith <brittons at origins.colorado.edu>
 Affiliation: CASA/University of CO, Boulder
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2008-2011 Britton Smith.  All Rights Reserved.
 


--- a/yt/analysis_modules/absorption_spectrum/absorption_line.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/analysis_modules/absorption_spectrum/absorption_line.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Britton Smith <brittonsmith at gmail.com>
 Affiliation: Michigan State University
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2008-2011 Britton Smith.  All Rights Reserved.
 


--- a/yt/analysis_modules/absorption_spectrum/absorption_spectrum.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/analysis_modules/absorption_spectrum/absorption_spectrum.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Britton Smith <brittonsmith at gmail.com>
 Affiliation: Michigan State University
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2008-2011 Britton Smith.  All Rights Reserved.
 


--- a/yt/analysis_modules/absorption_spectrum/api.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/analysis_modules/absorption_spectrum/api.py	Fri Sep 09 10:33:52 2011 -0400
@@ -7,7 +7,7 @@
 Affiliation: KIPAC/SLAC/Stanford
 Author: Britton Smith <brittonsmith at gmail.com>
 Affiliation: MSU
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/analysis_modules/api.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/analysis_modules/api.py	Fri Sep 09 10:33:52 2011 -0400
@@ -7,7 +7,7 @@
 Affiliation: KIPAC/SLAC/Stanford
 Author: Britton Smith <brittonsmith at gmail.com>
 Affiliation: MSU
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 
@@ -79,7 +79,6 @@
     ExtractedParameterFile
 
 from .level_sets.api import \
-    GridConsiderationQueue, \
     coalesce_join_tree, \
     identify_contours, \
     Clump, \


--- a/yt/analysis_modules/coordinate_transformation/api.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/analysis_modules/coordinate_transformation/api.py	Fri Sep 09 10:33:52 2011 -0400
@@ -7,7 +7,7 @@
 Affiliation: KIPAC/SLAC/Stanford
 Author: Britton Smith <brittonsmith at gmail.com>
 Affiliation: MSU
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/analysis_modules/coordinate_transformation/transforms.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/analysis_modules/coordinate_transformation/transforms.py	Fri Sep 09 10:33:52 2011 -0400
@@ -5,7 +5,7 @@
 Affiliation: KIPAC/SLAC/Stanford
 Author: JS Oishi <jsoishi at astro.berkeley.edu>
 Organization: UC Berkeley
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2007-2011 Matthew Turk, J. S. Oishi.  All Rights Reserved.
 


--- a/yt/analysis_modules/halo_finding/api.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/analysis_modules/halo_finding/api.py	Fri Sep 09 10:33:52 2011 -0400
@@ -7,7 +7,7 @@
 Affiliation: KIPAC/SLAC/Stanford
 Author: Britton Smith <brittonsmith at gmail.com>
 Affiliation: MSU
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/analysis_modules/halo_finding/halo_objects.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/analysis_modules/halo_finding/halo_objects.py	Fri Sep 09 10:33:52 2011 -0400
@@ -5,7 +5,7 @@
 Affiliation: KIPAC/SLAC/Stanford
 Author: Stephen Skory <s at skory.us>
 Affiliation: UCSD Physics/CASS
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2008-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/analysis_modules/halo_finding/parallel_hop/parallel_hop_interface.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/analysis_modules/halo_finding/parallel_hop/parallel_hop_interface.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Stephen Skory <s at skory.us>
 Affiliation: UCSD/CASS
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2008-2011 Stephen Skory.  All Rights Reserved.
 


--- a/yt/analysis_modules/halo_mass_function/api.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/analysis_modules/halo_mass_function/api.py	Fri Sep 09 10:33:52 2011 -0400
@@ -7,7 +7,7 @@
 Affiliation: KIPAC/SLAC/Stanford
 Author: Britton Smith <brittonsmith at gmail.com>
 Affiliation: MSU
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/analysis_modules/halo_mass_function/halo_mass_function.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/analysis_modules/halo_mass_function/halo_mass_function.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Stephen Skory <s at skory.us>
 Affiliation: UC San Diego / CASS
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2008-2011 Stephen Skory (and others).  All Rights Reserved.
 


--- a/yt/analysis_modules/halo_merger_tree/api.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/analysis_modules/halo_merger_tree/api.py	Fri Sep 09 10:33:52 2011 -0400
@@ -7,7 +7,7 @@
 Affiliation: KIPAC/SLAC/Stanford
 Author: Britton Smith <brittonsmith at gmail.com>
 Affiliation: MSU
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/analysis_modules/halo_merger_tree/enzofof_merger_tree.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/analysis_modules/halo_merger_tree/enzofof_merger_tree.py	Fri Sep 09 10:33:52 2011 -0400
@@ -6,7 +6,7 @@
 Affiliation: Columbia University
 Author: John H. Wise <jwise at astro.princeton.edu>
 Affiliation: Princeton
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/analysis_modules/halo_merger_tree/merger_tree.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/analysis_modules/halo_merger_tree/merger_tree.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Stephen Skory <s at skory.us>
 Affiliation: CASS/UC San Diego, CA
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2008-2011 Stephen Skory.  All Rights Reserved.
 


--- a/yt/analysis_modules/halo_profiler/api.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/analysis_modules/halo_profiler/api.py	Fri Sep 09 10:33:52 2011 -0400
@@ -7,7 +7,7 @@
 Affiliation: KIPAC/SLAC/Stanford
 Author: Britton Smith <brittonsmith at gmail.com>
 Affiliation: MSU
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/analysis_modules/halo_profiler/centering_methods.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/analysis_modules/halo_profiler/centering_methods.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Stephen Skory <s at skory.us>
 Affiliation: CASA/University of CO, Boulder
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2008-2011 Stephen Skory.  All Rights Reserved.
 


--- a/yt/analysis_modules/halo_profiler/halo_filters.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/analysis_modules/halo_profiler/halo_filters.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Britton Smith <brittons at origins.colorado.edu>
 Affiliation: CASA/University of CO, Boulder
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2008-2011 Britton Smith.  All Rights Reserved.
 


--- a/yt/analysis_modules/halo_profiler/multi_halo_profiler.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/analysis_modules/halo_profiler/multi_halo_profiler.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Britton Smith <brittons at origins.colorado.edu>
 Affiliation: CASA/University of CO, Boulder
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2008-2011 Britton Smith.  All Rights Reserved.
 


--- a/yt/analysis_modules/halo_profiler/standard_analysis.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/analysis_modules/halo_profiler/standard_analysis.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: Columbia University
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/analysis_modules/hierarchy_subset/api.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/analysis_modules/hierarchy_subset/api.py	Fri Sep 09 10:33:52 2011 -0400
@@ -7,7 +7,7 @@
 Affiliation: KIPAC/SLAC/Stanford
 Author: Britton Smith <brittonsmith at gmail.com>
 Affiliation: MSU
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/analysis_modules/hierarchy_subset/hierarchy_subset.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/analysis_modules/hierarchy_subset/hierarchy_subset.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2008-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/analysis_modules/level_sets/api.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/analysis_modules/level_sets/api.py	Fri Sep 09 10:33:52 2011 -0400
@@ -7,7 +7,7 @@
 Affiliation: KIPAC/SLAC/Stanford
 Author: Britton Smith <brittonsmith at gmail.com>
 Affiliation: MSU
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/analysis_modules/level_sets/clump_tools.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/analysis_modules/level_sets/clump_tools.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: David Collins <dcollins at physics.ucsd.edu>
 Affiliation: Center for Astrophysics and Space Sciences, U C San Diego
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2009 David Collins.  All Rights Reserved.
 


--- a/yt/analysis_modules/level_sets/contour_finder.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/analysis_modules/level_sets/contour_finder.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2007-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/analysis_modules/light_cone/__init__.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/analysis_modules/light_cone/__init__.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Britton Smith <brittons at origins.colorado.edu>
 Affiliation: CASA/University of CO, Boulder
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2008-2011 Britton Smith.  All Rights Reserved.
 


--- a/yt/analysis_modules/light_cone/api.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/analysis_modules/light_cone/api.py	Fri Sep 09 10:33:52 2011 -0400
@@ -7,7 +7,7 @@
 Affiliation: KIPAC/SLAC/Stanford
 Author: Britton Smith <brittonsmith at gmail.com>
 Affiliation: MSU
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/analysis_modules/light_cone/common_n_volume.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/analysis_modules/light_cone/common_n_volume.py	Fri Sep 09 10:33:52 2011 -0400
@@ -4,7 +4,7 @@
 
 Author: Britton Smith <brittons at origins.colorado.edu>
 Affiliation: CASA/University of CO, Boulder
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2008-2011 Britton Smith.  All Rights Reserved.
 


--- a/yt/analysis_modules/light_cone/halo_mask.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/analysis_modules/light_cone/halo_mask.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Britton Smith <brittons at origins.colorado.edu>
 Affiliation: CASA/University of CO, Boulder
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2008-2011 Britton Smith.  All Rights Reserved.
 


--- a/yt/analysis_modules/light_cone/light_cone.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/analysis_modules/light_cone/light_cone.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Britton Smith <brittons at origins.colorado.edu>
 Affiliation: CASA/University of CO, Boulder
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2008-2011 Britton Smith.  All Rights Reserved.
 
@@ -122,12 +122,20 @@
                                                                 deltaz_min=self.deltaz_min)
 
     def calculate_light_cone_solution(self, seed=None, filename=None):
-        """
-        Create list of projections to be added together to make the light cone.
-        :param seed (int): the seed for the random number generator.  Any light cone solution 
-               can be reproduced by giving the same random seed.  Default: None (each solution 
-               will be distinct).
-        :param filename (str): if given, a text file detailing the solution will be written out.  Default: None.
+        r"""Create list of projections to be added together to make the light cone.
+
+        Several sentences providing an extended description. Refer to
+        variables using back-ticks, e.g. `var`.
+
+        Parameters
+        ----------
+        seed : int
+            The seed for the random number generator.  Any light cone solution
+            can be reproduced by giving the same random seed.  Default: None
+            (each solution will be distinct).
+        filename : string
+            If given, a text file detailing the solution will be written out.
+            Default: None.
         """
 
         # Don't use box coherence with maximum projection depths.
@@ -209,13 +217,17 @@
             self._save_light_cone_solution(filename=filename)
 
     def get_halo_mask(self, mask_file=None, map_file=None, **kwargs):
+        r"""Gets a halo mask from a file or makes a new one.
+
+        Parameters
+        ----------
+        mask_file : string, optional
+            An HDF5 file to which to output the halo mask
+        map_file : string, optional
+            A text file to which to output the halo map (locations in the
+            images of the halos
+
         """
-        Gets a halo mask from a file or makes a new one.
-        :param mask_file (str): specify an hdf5 file to output the halo mask.
-        :param map_file (str): specify a text file to output the halo map 
-               (locations in image of halos).
-        """
-
         # Get halo map if map_file given.
         if map_file is not None and not os.path.exists(map_file):
             light_cone_halo_map(self, map_file=map_file, **kwargs)
@@ -240,22 +252,34 @@
     def project_light_cone(self, field, weight_field=None, apply_halo_mask=False, node=None,
                            save_stack=True, save_slice_images=False, cmap_name='algae', 
                            flatten_stack=False, photon_field=False):
-        """
-        Create projections for light cone, then add them together.
-        :param weight_field (str): the weight field of the projection.  This has the same meaning as in standard 
-               projections.  Default: None.
-        :param apply_halo_mask (bool): if True, a boolean mask is apply to the light cone projection.  See below for a 
-               description of halo masks.  Default: False.
-        :param node (str): a prefix to be prepended to the node name under which the projection data is serialized.  
-               Default: None.
-        :param save_stack (bool): if True, the unflatted light cone data including each individual slice is written to 
-               an hdf5 file.  Default: True.
-        :param save_slice_images (bool): save images for each individual projection slice.  Default: False.
-        :param cmap_name (str): color map for images.  Default: 'algae'.
-        :param flatten_stack (bool): if True, the light cone stack is continually flattened each time a slice is added 
-               in order to save memory.  This is generally not necessary.  Default: False.
-        :param photon_field (bool): if True, the projection data for each slice is decremented by 4 Pi R^2`, where R 
-               is the luminosity distance between the observer and the slice redshift.  Default: False.
+        r"""Create projections for light cone, then add them together.
+
+        Parameters
+        ----------
+        weight_field : str
+            the weight field of the projection.  This has the same meaning as
+            in standard projections.  Default: None.
+        apply_halo_mask : bool
+            if True, a boolean mask is apply to the light cone projection.  See
+            below for a description of halo masks.  Default: False.
+        node : string
+            a prefix to be prepended to the node name under which the
+            projection data is serialized.  Default: None.
+        save_stack : bool
+            if True, the unflatted light cone data including each individual
+            slice is written to an hdf5 file.  Default: True.
+        save_slice_images : bool
+            save images for each individual projection slice.  Default: False.
+        cmap_name : string
+            color map for images.  Default: 'algae'.
+        flatten_stack : bool
+            if True, the light cone stack is continually flattened each time a
+            slice is added in order to save memory.  This is generally not
+            necessary.  Default: False.
+        photon_field : bool
+            if True, the projection data for each slice is decremented by 4 Pi
+            R^2`, where R is the luminosity distance between the observer and
+            the slice redshift.  Default: False.
         """
 
         # Clear projection stack.


--- a/yt/analysis_modules/light_cone/light_cone_projection.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/analysis_modules/light_cone/light_cone_projection.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Britton Smith <brittons at origins.colorado.edu>
 Affiliation: CASA/University of CO, Boulder
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2008-2011 Britton Smith.  All Rights Reserved.
 


--- a/yt/analysis_modules/light_cone/unique_solution.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/analysis_modules/light_cone/unique_solution.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Britton Smith <brittons at origins.colorado.edu>
 Affiliation: CASA/University of CO, Boulder
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2008-2011 Britton Smith.  All Rights Reserved.
 


--- a/yt/analysis_modules/light_ray/api.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/analysis_modules/light_ray/api.py	Fri Sep 09 10:33:52 2011 -0400
@@ -7,7 +7,7 @@
 Affiliation: KIPAC/SLAC/Stanford
 Author: Britton Smith <brittonsmith at gmail.com>
 Affiliation: MSU
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/analysis_modules/light_ray/light_ray.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/analysis_modules/light_ray/light_ray.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Britton Smith <brittons at origins.colorado.edu>
 Affiliation: CASA/University of CO, Boulder
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2008-2011 Britton Smith.  All Rights Reserved.
 


--- a/yt/analysis_modules/list_modules.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/analysis_modules/list_modules.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2007-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/analysis_modules/simulation_handler/api.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/analysis_modules/simulation_handler/api.py	Fri Sep 09 10:33:52 2011 -0400
@@ -7,7 +7,7 @@
 Affiliation: KIPAC/SLAC/Stanford
 Author: Britton Smith <brittonsmith at gmail.com>
 Affiliation: MSU
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/analysis_modules/simulation_handler/enzo_simulation.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/analysis_modules/simulation_handler/enzo_simulation.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Britton Smith <brittons at origins.colorado.edu>
 Affiliation: CASA/University of CO, Boulder
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2008-2011 Britton Smith.  All Rights Reserved.
 


--- a/yt/analysis_modules/spectral_integrator/api.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/analysis_modules/spectral_integrator/api.py	Fri Sep 09 10:33:52 2011 -0400
@@ -7,7 +7,7 @@
 Affiliation: KIPAC/SLAC/Stanford
 Author: Britton Smith <brittonsmith at gmail.com>
 Affiliation: MSU
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/analysis_modules/spectral_integrator/spectral_frequency_integrator.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/analysis_modules/spectral_integrator/spectral_frequency_integrator.py	Fri Sep 09 10:33:52 2011 -0400
@@ -4,7 +4,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2007-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/analysis_modules/star_analysis/api.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/analysis_modules/star_analysis/api.py	Fri Sep 09 10:33:52 2011 -0400
@@ -7,7 +7,7 @@
 Affiliation: KIPAC/SLAC/Stanford
 Author: Britton Smith <brittonsmith at gmail.com>
 Affiliation: MSU
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/analysis_modules/star_analysis/sfr_spectrum.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/analysis_modules/star_analysis/sfr_spectrum.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Stephen Skory <s at skory.us>
 Affiliation: UC San Diego / CASS
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2008-2011 Stephen Skory (and others).  All Rights Reserved.
 


--- a/yt/analysis_modules/sunrise_export/api.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/analysis_modules/sunrise_export/api.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: UCSD
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/analysis_modules/sunrise_export/sunrise_exporter.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/analysis_modules/sunrise_export/sunrise_exporter.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: UCSD
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 
@@ -109,7 +109,7 @@
                             for ax in 'xyz']).transpose()
         # Velocity is cm/s, we want it to be kpc/yr
         vel *= (pf["kpc"]/pf["cm"]) / (365*24*3400.)
-        age = pf["years"] * (pf["InitialTime"] - reg["creation_time"][pi])
+        age = pf["years"] * (pf.current_time - reg["creation_time"][pi])
         creation_time = reg["creation_time"][pi] * pf["years"]
 
         initial_mass = reg["InitialMassCenOstriker"][pi]


--- a/yt/analysis_modules/two_point_functions/api.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/analysis_modules/two_point_functions/api.py	Fri Sep 09 10:33:52 2011 -0400
@@ -7,7 +7,7 @@
 Affiliation: KIPAC/SLAC/Stanford
 Author: Britton Smith <brittonsmith at gmail.com>
 Affiliation: MSU
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/analysis_modules/two_point_functions/two_point_functions.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/analysis_modules/two_point_functions/two_point_functions.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Stephen Skory <s at skory.us>
 Affiliation: UCSD Physics/CASS
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Stephen Skory.  All Rights Reserved.
 


--- a/yt/api.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/api.py	Fri Sep 09 10:33:52 2011 -0400
@@ -7,7 +7,7 @@
 Affiliation: KIPAC/SLAC/Stanford
 Author: Britton Smith <brittonsmith at gmail.com>
 Affiliation: MSU
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/arraytypes.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/arraytypes.py	Fri Sep 09 10:33:52 2011 -0400
@@ -7,7 +7,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2007-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/astro_objects/api.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/astro_objects/api.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: Columbia University
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/astro_objects/astrophysical_object.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/astro_objects/astrophysical_object.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: Columbia University
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/astro_objects/clumped_region.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/astro_objects/clumped_region.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: Columbia University
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/astro_objects/simulation_volume.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/astro_objects/simulation_volume.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: Columbia University
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/config.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/config.py	Fri Sep 09 10:33:52 2011 -0400
@@ -5,7 +5,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2007-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/convenience.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/convenience.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2007-2011 Matthew Turk.  All Rights Reserved.
 
@@ -35,12 +35,14 @@
     output_type_registry, \
     EnzoRunDatabase
 
-def all_pfs(basedir='.',max_depth=1, name_spec="*.hierarchy", **kwargs):
+def all_pfs(basedir='.', skip=None, max_depth=1, name_spec="*.hierarchy", **kwargs):
     """
-    This function searchs a directory and its sub-directories, up to a depth of
-    *max_depth*, for parameter files.  It looks for the *name_spec* and then
-    instantiates an EnzoStaticOutput from each.  All subsequent *kwargs* are
-    passed on to the EnzoStaticOutput constructor.
+    This function searchs a directory and its sub-directories, up to a
+    depth of *max_depth*, for parameter files.  It looks for the
+    *name_spec* and then instantiates an EnzoStaticOutput from
+    each. You can skip every *skip* parameter files, if *skip* is not
+    None; otherwise it will return all files.  All subsequent *kwargs*
+    are passed on to the EnzoStaticOutput constructor.
     """
     list_of_names = []
     basedir = os.path.expanduser(basedir)
@@ -48,7 +50,7 @@
         bb = list('*' * i) + [name_spec]
         list_of_names += glob.glob(os.path.join(basedir,*bb))
     list_of_names.sort(key=lambda b: os.path.basename(b))
-    for fn in list_of_names:
+    for fn in list_of_names[::skip]:
         yield load(fn[:-10], **kwargs)
 
 def max_spheres(width, unit, **kwargs):


--- a/yt/data_objects/analyzer_objects.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/data_objects/analyzer_objects.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: UCSD
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/data_objects/api.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/data_objects/api.py	Fri Sep 09 10:33:52 2011 -0400
@@ -7,7 +7,7 @@
 Affiliation: KIPAC/SLAC/Stanford
 Author: Britton Smith <brittonsmith at gmail.com>
 Affiliation: MSU
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/data_objects/data_containers.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/data_objects/data_containers.py	Fri Sep 09 10:33:52 2011 -0400
@@ -5,7 +5,7 @@
 Affiliation: KIPAC/SLAC/Stanford
 Author: Britton Smith <Britton.Smith at colorado.edu>
 Affiliation: University of Colorado at Boulder
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2007-2011 Matthew Turk.  All Rights Reserved.
 
@@ -40,7 +40,7 @@
 from yt.data_objects.particle_io import particle_handler_registry
 from yt.utilities.amr_utils import find_grids_in_inclined_box, \
     grid_points_in_volume, planar_points_in_volume, VoxelTraversal, \
-    QuadTree
+    QuadTree, get_box_grids_below_level, ghost_zone_interpolate
 from yt.utilities.data_point_utilities import CombineGrids, \
     DataCubeRefine, DataCubeReplace, FillRegion, FillBuffer
 from yt.utilities.definitions import axis_names, x_dict, y_dict
@@ -1614,6 +1614,9 @@
             dxs.append(na.ones(nvals.shape[0], dtype='float64') * ds)
         coord_data = na.concatenate(coord_data, axis=0).transpose()
         field_data = na.concatenate(field_data, axis=0).transpose()
+        if self._weight is None:
+            dls, convs = self._get_dls(self._grids[0], fields)
+            field_data *= convs
         weight_data = na.concatenate(weight_data, axis=0).transpose()
         dxs = na.concatenate(dxs, axis=0).transpose()
         # We now convert to half-widths and center-points
@@ -1685,17 +1688,7 @@
             pbar.update(pi)
             grid.clear_data()
         pbar.finish()
-        lt = tree.get_all_from_level(level, False)
         return
-        if self._weight is not None:
-            field_data = field_data / coord_data[3,:].reshape((1,coord_data.shape[1]))
-        else:
-            field_data *= convs[...,na.newaxis]
-        mylog.info("Level %s done: %s final", \
-                   level, coord_data.shape[1])
-        dx = grids_to_project[0].dds[self.axis] # this is our dl
-        return coord_data, dx, field_data
-
 
     def _get_points_in_region(self, grid):
         pointI = self.source._get_point_indices(grid, use_child_mask=False)
@@ -2165,7 +2158,6 @@
            na.any(self.right_edge > self.pf.domain_right_edge):
             grids,ind = self.pf.hierarchy.get_periodic_box_grids(
                             self.left_edge, self.right_edge)
-            ind = slice(None)
         else:
             grids,ind = self.pf.hierarchy.get_box_grids(
                             self.left_edge, self.right_edge)
@@ -2974,253 +2966,6 @@
         """
         return 4./3. * math.pi * (self.radius * self.pf[unit])**3.0
 
-class AMRFloatCoveringGridBase(AMR3DData):
-    """
-    Covering grids represent fixed-resolution data over a given region.
-    In order to achieve this goal -- for instance in order to obtain ghost
-    zones -- grids up to and including the indicated level are included.
-    No interpolation is done (as that would affect the 'power' on small
-    scales) on the input data.
-    """
-    _spatial = True
-    _type_name = "float_covering_grid"
-    _con_args = ('level', 'left_edge', 'right_edge', 'ActiveDimensions')
-    def __init__(self, level, left_edge, right_edge, dims, fields = None,
-                 pf = None, num_ghost_zones = 0, use_pbar = True, **kwargs):
-        """
-        The data object returned will consider grids up to *level* in
-        generating fixed resolution data between *left_edge* and *right_edge*
-        that is *dims* (3-values) on a side.
-        """
-        AMR3DData.__init__(self, center=None, fields=fields, pf=pf, **kwargs)
-        self.left_edge = na.array(left_edge)
-        self.right_edge = na.array(right_edge)
-        self.level = level
-        self.ActiveDimensions = na.array(dims)
-        dds = (self.right_edge-self.left_edge) \
-              / self.ActiveDimensions
-        self.dds = dds
-        self.data["dx"] = dds[0]
-        self.data["dy"] = dds[1]
-        self.data["dz"] = dds[2]
-        self._num_ghost_zones = num_ghost_zones
-        self._use_pbar = use_pbar
-        self._refresh_data()
-
-    def _get_list_of_grids(self):
-        if self._grids is not None: return
-        if na.any(self.left_edge < self.pf.domain_left_edge) or \
-           na.any(self.right_edge > self.pf.domain_right_edge):
-            grids,ind = self.pf.hierarchy.get_periodic_box_grids(
-                            self.left_edge, self.right_edge)
-            ind = slice(None)
-        else:
-            grids,ind = self.pf.hierarchy.get_box_grids(
-                            self.left_edge, self.right_edge)
-        level_ind = na.where(self.pf.hierarchy.grid_levels.ravel()[ind] <= self.level)
-        sort_ind = na.argsort(self.pf.h.grid_levels.ravel()[ind][level_ind])
-        self._grids = self.pf.hierarchy.grids[ind][level_ind][(sort_ind,)][::-1]
-
-    def extract_region(self, indices):
-        mylog.error("Sorry, dude, do it yourself, it's already in 3-D.")
-
-    def _refresh_data(self):
-        AMR3DData._refresh_data(self)
-        self['dx'] = self.dds[0] * na.ones(self.ActiveDimensions, dtype='float64')
-        self['dy'] = self.dds[1] * na.ones(self.ActiveDimensions, dtype='float64')
-        self['dz'] = self.dds[2] * na.ones(self.ActiveDimensions, dtype='float64')
-
-    def get_data(self, fields=None):
-        if self._grids is None:
-            self._get_list_of_grids()
-        if fields is None:
-            fields = self.fields[:]
-        else:
-            fields = ensure_list(fields)
-        obtain_fields = []
-        for field in fields:
-            if self.data.has_key(field): continue
-            if field not in self.hierarchy.field_list:
-                try:
-                    #print "Generating", field
-                    self._generate_field(field)
-                    continue
-                except NeedsOriginalGrid, ngt_exception:
-                    pass
-            obtain_fields.append(field)
-            self[field] = na.zeros(self.ActiveDimensions, dtype='float64') -999
-        if len(obtain_fields) == 0: return
-        mylog.debug("Getting fields %s from %s possible grids",
-                   obtain_fields, len(self._grids))
-        if self._use_pbar: pbar = \
-                get_pbar('Searching grids for values ', len(self._grids))
-        for i, grid in enumerate(self._grids):
-            if self._use_pbar: pbar.update(i)
-            self._get_data_from_grid(grid, obtain_fields)
-            if not na.any(self[obtain_fields[0]] == -999): break
-        if self._use_pbar: pbar.finish()
-        if na.any(self[obtain_fields[0]] == -999):
-            # and self.dx < self.hierarchy.grids[0].dx:
-            print "COVERING PROBLEM", na.where(self[obtain_fields[0]]==-999)[0].size
-            print na.where(self[obtain_fields[0]]==-999)
-            raise KeyError
-            
-    def _generate_field(self, field):
-        if self.pf.field_info.has_key(field):
-            # First we check the validator; this might even raise!
-            self.pf.field_info[field].check_available(self)
-            self[field] = self.pf.field_info[field](self)
-        else: # Can't find the field, try as it might
-            raise KeyError(field)
-
-    def flush_data(self, field=None):
-        """
-        Any modifications made to the data in this object are pushed back
-        to the originating grids, except the cells where those grids are both
-        below the current level `and` have child cells.
-        """
-        self._get_list_of_grids()
-        # We don't generate coordinates here.
-        if field == None:
-            fields_to_get = self.fields[:]
-        else:
-            fields_to_get = ensure_list(field)
-        for grid in self._grids:
-            self._flush_data_to_grid(grid, fields_to_get)
-
-    @restore_grid_state
-    def _get_data_from_grid(self, grid, fields):
-        ll = int(grid.Level == self.level)
-        g_dx = grid.dds.ravel()
-        c_dx = self.dds.ravel()
-        g_fields = [grid[field] for field in ensure_list(fields)]
-        c_fields = [self[field] for field in ensure_list(fields)]
-        DataCubeRefine(
-            grid.LeftEdge, g_dx, g_fields, grid.child_mask,
-            self.left_edge, self.right_edge, c_dx, c_fields,
-            ll, self.pf.domain_left_edge, self.pf.domain_right_edge)
-
-    def _flush_data_to_grid(self, grid, fields):
-        ll = int(grid.Level == self.level)
-        g_dx = grid.dds.ravel()
-        c_dx = self.dds.ravel()
-        g_fields = []
-        for field in ensure_list(fields):
-            if not grid.has_key(field): grid[field] = \
-               na.zeros(grid.ActiveDimensions, dtype=self[field].dtype)
-            g_fields.append(grid[field])
-        c_fields = [self[field] for field in ensure_list(fields)]
-        DataCubeReplace(
-            grid.LeftEdge, g_dx, g_fields, grid.child_mask,
-            self.left_edge, self.right_edge, c_dx, c_fields,
-            ll, self.pf.domain_left_edge, self.pf.domain_right_edge)
-
-    @property
-    def LeftEdge(self):
-        return self.left_edge
-
-    @property
-    def RightEdge(self):
-        return self.right_edge
-
-class AMRSmoothedCoveringGridBase(AMRFloatCoveringGridBase):
-    _type_name = "smoothed_covering_grid"
-    def __init__(self, *args, **kwargs):
-        dlog2 = na.log10(kwargs['dims'])/na.log10(2)
-        if not na.all(na.floor(dlog2) == na.ceil(dlog2)):
-            pass # used to warn but I think it is not accurate anymore
-            #mylog.warning("Must be power of two dimensions")
-            #raise ValueError
-        #kwargs['num_ghost_zones'] = 0
-        AMRFloatCoveringGridBase.__init__(self, *args, **kwargs)
-
-    def _get_list_of_grids(self):
-        if na.any(self.left_edge - self.dds < self.pf.domain_left_edge) or \
-           na.any(self.right_edge + self.dds > self.pf.domain_right_edge):
-            grids,ind = self.pf.hierarchy.get_periodic_box_grids(
-                            self.left_edge - self.dds,
-                            self.right_edge + self.dds)
-            ind = slice(None)
-        else:
-            grids,ind = self.pf.hierarchy.get_box_grids(
-                            self.left_edge - self.dds,
-                            self.right_edge + self.dds)
-        level_ind = na.where(self.pf.hierarchy.grid_levels.ravel()[ind] <= self.level)
-        sort_ind = na.argsort(self.pf.h.grid_levels.ravel()[ind][level_ind])
-        self._grids = self.pf.hierarchy.grids[ind][level_ind][(sort_ind,)]
-
-    def _get_level_array(self, level, fields):
-        fields = ensure_list(fields)
-        # We assume refinement by a factor of two
-        rf = self.pf.refine_by**(self.level - level)
-        dims = na.maximum(1,self.ActiveDimensions/rf) + 2
-        dx = (self.right_edge-self.left_edge)/(dims-2)
-        x,y,z = (na.mgrid[0:dims[0],0:dims[1],0:dims[2]].astype('float64')-0.5)\
-              * dx[0]
-        x += self.left_edge[0] - dx[0]
-        y += self.left_edge[1] - dx[1]
-        z += self.left_edge[2] - dx[2]
-        offsets = [self['cd%s' % ax]*0.5 for ax in 'xyz']
-        bounds = [self.left_edge[0]-offsets[0], self.right_edge[0]+offsets[0],
-                  self.left_edge[1]-offsets[1], self.right_edge[1]+offsets[1],
-                  self.left_edge[2]-offsets[2], self.right_edge[2]+offsets[2]]
-        fake_grid = {'x':x,'y':y,'z':z,'dx':dx[0],'dy':dx[1],'dz':dx[2]}
-        for ax in 'xyz': self['cd%s'%ax] = fake_grid['d%s'%ax]
-        for field in fields:
-            # Generate the new grid field
-            interpolator = TrilinearFieldInterpolator(
-                            self[field], bounds, ['x','y','z'],
-                            truncate = True)
-            self[field] = interpolator(fake_grid)
-        return fake_grid
-
-    def get_data(self, field=None):
-        self._get_list_of_grids()
-        # We don't generate coordinates here.
-        if field == None:
-            fields_to_get = self.fields[:]
-        else:
-            fields_to_get = ensure_list(field)
-        for field in fields_to_get:
-            grid_count = 0
-            if self.data.has_key(field):
-                continue
-            mylog.debug("Getting field %s from %s possible grids",
-                       field, len(self._grids))
-            if self._use_pbar: pbar = \
-                    get_pbar('Searching grids for values ', len(self._grids))
-            # How do we find out the root grid base dx?
-            idims = na.array([3,3,3])
-            dx = na.minimum((self.right_edge-self.left_edge)/(idims-2),
-                            self.pf.h.grids[0].dds[0])
-            idims = na.floor((self.right_edge-self.left_edge)/dx) + 2
-            for ax in 'xyz': self['cd%s'%ax] = dx[0]
-            self[field] = na.zeros(idims,dtype='float64')-999
-            for level in range(self.level+1):
-                for grid in self.select_grids(level):
-                    if self._use_pbar: pbar.update(grid_count)
-                    self._get_data_from_grid(grid, field)
-                    grid_count += 1
-                if level < self.level: self._get_level_array(level+1, field)
-            self[field] = self[field][1:-1,1:-1,1:-1]
-            if self._use_pbar: pbar.finish()
-        
-    @restore_grid_state
-    def _get_data_from_grid(self, grid, fields):
-        fields = ensure_list(fields)
-        g_dx = grid.dds
-        c_dx = na.array([self['cdx'],self['cdy'],self['cdz']])
-        g_fields = [grid[field] for field in fields]
-        c_fields = [self[field] for field in fields]
-        total = DataCubeRefine(
-            grid.LeftEdge, g_dx, g_fields, grid.child_mask,
-            self.left_edge-c_dx, self.right_edge+c_dx,
-            c_dx, c_fields,
-            1, self.pf.domain_left_edge, self.pf.domain_right_edge)
-
-    def flush_data(self, *args, **kwargs):
-        raise KeyError("Can't do this")
-
 class AMRCoveringGridBase(AMR3DData):
     _spatial = True
     _type_name = "covering_grid"
@@ -3244,17 +2989,15 @@
         if self._grids is not None: return
         if na.any(self.left_edge - buffer < self.pf.domain_left_edge) or \
            na.any(self.right_edge + buffer > self.pf.domain_right_edge):
-            grids,ind = self.pf.hierarchy.get_periodic_box_grids(
+            grids,ind = self.pf.hierarchy.get_periodic_box_grids_below_level(
                             self.left_edge - buffer,
-                            self.right_edge + buffer)
-            ind = slice(None)
+                            self.right_edge + buffer, self.level)
         else:
-            grids,ind = self.pf.hierarchy.get_box_grids(
-                            self.left_edge - buffer,
-                            self.right_edge + buffer)
-        level_ind = (self.pf.hierarchy.grid_levels.ravel()[ind] <= self.level)
-        sort_ind = na.argsort(self.pf.h.grid_levels.ravel()[ind][level_ind])
-        self._grids = self.pf.hierarchy.grids[ind][level_ind][(sort_ind,)][::-1]
+            grids,ind = self.pf.hierarchy.get_box_grids_below_level(
+                self.left_edge - buffer,
+                self.right_edge + buffer, self.level)
+        sort_ind = na.argsort(self.pf.h.grid_levels.ravel()[ind])
+        self._grids = self.pf.hierarchy.grids[ind][(sort_ind,)][::-1]
 
     def _refresh_data(self):
         AMR3DData._refresh_data(self)
@@ -3357,20 +3100,25 @@
     def RightEdge(self):
         return self.right_edge
 
-class AMRIntSmoothedCoveringGridBase(AMRCoveringGridBase):
-    _type_name = "si_covering_grid"
+class AMRSmoothedCoveringGridBase(AMRCoveringGridBase):
+    _type_name = "smoothed_covering_grid"
     @wraps(AMRCoveringGridBase.__init__)
     def __init__(self, *args, **kwargs):
+        self._base_dx = (
+              (self.pf.domain_right_edge - self.pf.domain_left_edge) /
+               self.pf.domain_dimensions.astype("float64"))
         AMRCoveringGridBase.__init__(self, *args, **kwargs)
         self._final_start_index = self.global_startindex
 
     def _get_list_of_grids(self):
-        buffer = self.pf.h.select_grids(0)[0].dds
+        if self._grids is not None: return
+        buffer = ((self.pf.domain_right_edge - self.pf.domain_left_edge)
+                 / self.pf.domain_dimensions).max()
         AMRCoveringGridBase._get_list_of_grids(self, buffer)
+        # We reverse the order to ensure that coarse grids are first
         self._grids = self._grids[::-1]
 
     def get_data(self, field=None):
-        dx = [self.pf.h.select_grids(l)[0].dds for l in range(self.level+1)]
         self._get_list_of_grids()
         # We don't generate coordinates here.
         if field == None:
@@ -3390,14 +3138,16 @@
             # L/R edges.
             # We jump-start our task here
             self._update_level_state(0, field)
-            for level in range(self.level+1):
-                for grid in self.select_grids(level):
-                    if self._use_pbar: pbar.update(grid_count)
-                    self._get_data_from_grid(grid, field, level)
-                    grid_count += 1
-                if level < self.level:
-                    self._update_level_state(level + 1)
+            
+            # The grids are assumed to be pre-sorted
+            last_level = 0
+            for gi, grid in enumerate(self._grids):
+                if self._use_pbar: pbar.update(gi)
+                if grid.Level > last_level and grid.Level <= self.level:
+                    self._update_level_state(last_level + 1)
                     self._refine(1, field)
+                    last_level = grid.Level
+                self._get_data_from_grid(grid, field, grid.Level)
             if self.level > 0:
                 self[field] = self[field][1:-1,1:-1,1:-1]
             if na.any(self[field] == -999):
@@ -3408,8 +3158,10 @@
             if self._use_pbar: pbar.finish()
 
     def _update_level_state(self, level, field = None):
-        dx = self.pf.h.select_grids(level)[0].dds
-        for ax, v in zip('xyz', dx): self['cd%s'%ax] = v
+        dx = self._base_dx / self.pf.refine_by**level
+        self.data['cdx'] = dx[0]
+        self.data['cdy'] = dx[1]
+        self.data['cdz'] = dx[2]
         LL = self.left_edge - self.pf.domain_left_edge
         self._old_global_startindex = self.global_startindex
         self.global_startindex = na.rint(LL / dx).astype('int64') - 1
@@ -3418,48 +3170,38 @@
         if level == 0 and self.level > 0:
             # We use one grid cell at LEAST, plus one buffer on all sides
             idims = na.rint((self.right_edge-self.left_edge)/dx).astype('int64') + 2
-            self[field] = na.zeros(idims,dtype='float64')-999
+            self.data[field] = na.zeros(idims,dtype='float64')-999
+            self._cur_dims = idims.astype("int32")
         elif level == 0 and self.level == 0:
             DLE = self.pf.domain_left_edge
             self.global_startindex = na.array(na.floor(LL/ dx), dtype='int64')
             idims = na.rint((self.right_edge-self.left_edge)/dx).astype('int64')
-            self[field] = na.zeros(idims,dtype='float64')-999
+            self.data[field] = na.zeros(idims,dtype='float64')-999
+            self._cur_dims = idims.astype("int32")
 
     def _refine(self, dlevel, field):
         rf = float(self.pf.refine_by**dlevel)
 
-        old_dims = na.array(self[field].shape) - 1
-        old_left = (self._old_global_startindex + 0.5) * rf 
-        old_right = rf*old_dims + old_left
-        old_bounds = [old_left[0], old_right[0],
-                      old_left[1], old_right[1],
-                      old_left[2], old_right[2]]
+        input_left = (self._old_global_startindex + 0.5) * rf 
+        dx = na.fromiter((self['cd%s' % ax] for ax in 'xyz'), count=3, dtype='float64')
+        output_dims = na.rint((self.right_edge-self.left_edge)/dx).astype('int32') + 2
 
-        dx = na.array([self['cd%s' % ax] for ax in 'xyz'], dtype='float64')
-        new_dims = na.rint((self.right_edge-self.left_edge)/dx).astype('int64') + 2
+        self._cur_dims = output_dims
 
-        # x, y, z are the new bounds
-        x,y,z = (na.mgrid[0:new_dims[0], 0:new_dims[1], 0:new_dims[2]]
-                    ).astype('float64') + 0.5
-        x += self.global_startindex[0]
-        y += self.global_startindex[1]
-        z += self.global_startindex[2]
-        fake_grid = {'x':x,'y':y,'z':z}
-
-        interpolator = TrilinearFieldInterpolator(
-                        self[field], old_bounds, ['x','y','z'],
-                        truncate = True)
-        self[field] = interpolator(fake_grid)
+        output_field = na.zeros(output_dims, dtype="float64")
+        output_left = self.global_startindex + 0.5
+        ghost_zone_interpolate(rf, self[field], input_left,
+                               output_field, output_left)
+        self[field] = output_field
 
     def _get_data_from_grid(self, grid, fields, level):
         fields = ensure_list(fields)
         g_fields = [grid[field] for field in fields]
         c_fields = [self[field] for field in fields]
-        dims = na.array(self[field].shape, dtype='int32')
         count = FillRegion(1,
             grid.get_global_startindex(), self.global_startindex,
             c_fields, g_fields, 
-            dims, grid.ActiveDimensions,
+            self._cur_dims, grid.ActiveDimensions,
             grid.child_mask, self.domain_width, 1, 0)
         return count
 


--- a/yt/data_objects/derived_quantities.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/data_objects/derived_quantities.py	Fri Sep 09 10:33:52 2011 -0400
@@ -5,7 +5,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2007-2011 Matthew Turk.  All Rights Reserved.
 
@@ -173,9 +173,13 @@
     This function returns the location of the center
     of mass. By default, it computes of the *non-particle* data in the object. 
 
-    :param use_cells: if True, will include the cell mass (default: True)
-    :param use_particles: if True, will include the particles in the 
-    object (default: False)
+    Parameters
+    ----------
+
+    use_cells : bool
+        If True, will include the cell mass (default: True)
+    use_particles : bool
+        if True, will include the particles in the object (default: False)
     """
     x = y = z = den = 0
     if use_cells: 


--- a/yt/data_objects/field_info_container.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/data_objects/field_info_container.py	Fri Sep 09 10:33:52 2011 -0400
@@ -5,7 +5,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2008-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/data_objects/grid_patch.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/data_objects/grid_patch.py	Fri Sep 09 10:33:52 2011 -0400
@@ -48,6 +48,7 @@
     _type_name = 'grid'
     _skip_add = True
     _con_args = ('id', 'filename')
+    OverlappingSiblings = None
 
     __slots__ = ['data', 'field_parameters', 'id', 'hierarchy', 'pf',
                  'ActiveDimensions', 'LeftEdge', 'RightEdge', 'Level',
@@ -372,6 +373,10 @@
         self._child_mask = na.ones(self.ActiveDimensions, 'int32')
         for child in self.Children:
             self.__fill_child_mask(child, self._child_mask, 0)
+        if self.OverlappingSiblings is not None:
+            for sibling in self.OverlappingSiblings:
+                self.__fill_child_mask(sibling, self._child_mask, 0)
+        
         self._child_indices = (self._child_mask==0) # bool, possibly redundant
 
     def __generate_child_index_mask(self):
@@ -384,6 +389,10 @@
         for child in self.Children:
             self.__fill_child_mask(child, self._child_index_mask,
                                    child.id)
+        if self.OverlappingSiblings is not None:
+            for sibling in self.OverlappingSiblings:
+                self.__fill_child_mask(sibling, self._child_index_mask,
+                                       sibling.id)
 
     def _get_coords(self):
         if self.__coords == None: self._generate_coords()
@@ -430,8 +439,8 @@
                   'use_pbar': False, 'fields': fields}
 
         if smoothed:
-            cube = self.hierarchy.si_covering_grid(level, new_left_edge,
-                                                   **kwargs)
+            cube = self.hierarchy.smoothed_covering_grid(
+                level, new_left_edge, **kwargs)
         else:
             cube = self.hierarchy.covering_grid(level, new_left_edge, **kwargs)
 


--- a/yt/data_objects/hierarchy.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/data_objects/hierarchy.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2007-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/data_objects/object_finding_mixin.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/data_objects/object_finding_mixin.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2007-2011 Matthew Turk.  All Rights Reserved.
 
@@ -26,6 +26,9 @@
 import numpy as na
 
 from yt.funcs import *
+from yt.utilities.amr_utils import \
+    get_box_grids_level, \
+    get_box_grids_below_level
 
 class ObjectFindingMixin(object):
 
@@ -115,7 +118,7 @@
         as the input *fields*.
         
         Parameters
-        ---------
+        ----------
         fields : string or list of strings
             The field(s) that will be returned.
         
@@ -201,3 +204,33 @@
                     mask[gi] = True
         return self.grids[mask], na.where(mask)
 
+    def get_box_grids_below_level(self, left_edge, right_edge, level):
+        # We discard grids if they are ABOVE the level
+        mask = na.empty(self.grids.size, dtype='int32')
+        get_box_grids_below_level(left_edge, right_edge,
+                            level,
+                            self.grid_left_edge, self.grid_right_edge,
+                            self.grid_levels, mask)
+        mask = mask.astype("bool")
+        return self.grids[mask], na.where(mask)
+
+    def get_periodic_box_grids_below_level(self, left_edge, right_edge, level):
+        left_edge = na.array(left_edge)
+        right_edge = na.array(right_edge)
+        mask = na.zeros(self.grids.shape, dtype='bool')
+        dl = self.parameter_file.domain_left_edge
+        dr = self.parameter_file.domain_right_edge
+        db = right_edge - left_edge
+        for off_x in [-1, 0, 1]:
+            nle = left_edge.copy()
+            nre = left_edge.copy()
+            nle[0] = dl[0] + (dr[0]-dl[0])*off_x + left_edge[0]
+            for off_y in [-1, 0, 1]:
+                nle[1] = dl[1] + (dr[1]-dl[1])*off_y + left_edge[1]
+                for off_z in [-1, 0, 1]:
+                    nle[2] = dl[2] + (dr[2]-dl[2])*off_z + left_edge[2]
+                    nre = nle + db
+                    g, gi = self.get_box_grids_below_level(nle, nre, level)
+                    mask[gi] = True
+        return self.grids[mask], na.where(mask)
+


--- a/yt/data_objects/particle_io.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/data_objects/particle_io.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2009 Matthew Turk.  All Rights Reserved.
 


--- a/yt/data_objects/profiles.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/data_objects/profiles.py	Fri Sep 09 10:33:52 2011 -0400
@@ -5,7 +5,7 @@
 Affiliation: KIPAC/SLAC/Stanford
 Author: Samuel Skillman <samskillman at gmail.com>
 Affiliation: CASA, University of Colorado at Boulder
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2007-2011 Matthew Turk.  All Rights Reserved.
 
@@ -30,8 +30,8 @@
 
 from yt.funcs import *
 
-from yt.utilities.data_point_utilities import Bin2DProfile, \
-    Bin3DProfile
+from yt.utilities.data_point_utilities import \
+    Bin1DProfile, Bin2DProfile, Bin3DProfile
 from yt.utilities.parallel_tools.parallel_analysis_interface import \
     ParallelAnalysisInterface
 
@@ -239,22 +239,19 @@
         mi, inv_bin_indices = args # Args has the indices to use as input
         # check_cut is set if source != self._data_source
         # (i.e., lazy_reader)
-        source_data = self._get_field(source, field, check_cut)[mi]
-        if weight: weight_data = self._get_field(source, weight, check_cut)[mi]
+        source_data = self._get_field(source, field, check_cut)
+        if weight: weight_data = self._get_field(source, weight, check_cut)
+        else: weight_data = na.ones(source_data.shape, dtype='float64')
+        self.total_stuff = source_data.sum()
         binned_field = self._get_empty_field()
         weight_field = self._get_empty_field()
         used_field = self._get_empty_field()
-        # Now we perform the actual binning
-        for bin in inv_bin_indices.keys():
-            # temp_field is *all* the points from source that go into this bin
-            temp_field = source_data[inv_bin_indices[bin]]
-            if weight:
-                # now w_i * v_i and store sum(w_i)
-                weight_field[bin] = weight_data[inv_bin_indices[bin]].sum()
-                temp_field *= weight_data[inv_bin_indices[bin]]
-            binned_field[bin] = temp_field.sum()
-            # inv_bin_indices is a tuple of indices
-            if inv_bin_indices[bin][0].size > 0: used_field[bin] = 1
+        mi = args[0]
+        bin_indices_x = args[1].ravel().astype('int64')
+        source_data = source_data[mi]
+        weight_data = weight_data[mi]
+        Bin1DProfile(bin_indices_x, weight_data, source_data,
+                     weight_field, binned_field, used_field)
         # Fix for laziness, because at the *end* we will be
         # summing up all of the histograms and dividing by the
         # weights.  Accumulation likely doesn't work with weighted
@@ -270,26 +267,21 @@
             raise EmptyProfileData()
         # Truncate at boundaries.
         if self.end_collect:
-            mi = na.arange(source_data.size)
+            sd = source_data[:]
         else:
-            mi = na.where( (source_data > self._bins.min())
-                         & (source_data < self._bins.max()))
-        sd = source_data[mi]
+            mi = ((source_data > self._bins.min())
+               &  (source_data < self._bins.max()))
+            sd = source_data[mi]
         if sd.size == 0:
             raise EmptyProfileData()
         # Stick the bins into our fixed bins, set at initialization
         bin_indices = na.digitize(sd, self._bins)
         if self.end_collect: #limit the range of values to 0 and n_bins-1
-            bin_indices = na.minimum(na.maximum(1, bin_indices), self.n_bins) - 1
+            bin_indices = na.clip(bin_indices, 0, self.n_bins - 1)
         else: #throw away outside values
             bin_indices -= 1
           
-        # Now we set up our inverse bin indices
-        inv_bin_indices = {}
-        for bin in range(self[self.bin_field].size):
-            # Which fall into our bin?
-            inv_bin_indices[bin] = na.where(bin_indices == bin)
-        return (mi, inv_bin_indices)
+        return (mi, bin_indices)
 
     def choose_bins(self, bin_style):
         # Depending on the bin_style, choose from bin edges 0...N either:


--- a/yt/data_objects/static_output.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/data_objects/static_output.py	Fri Sep 09 10:33:52 2011 -0400
@@ -4,7 +4,7 @@
 Presumably at some point EnzoRun will be absorbed into here.
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2007-2011 Matthew Turk, J. S. Oishi.  All Rights Reserved.
 


--- a/yt/data_objects/time_series.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/data_objects/time_series.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/data_objects/universal_fields.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/data_objects/universal_fields.py	Fri Sep 09 10:33:52 2011 -0400
@@ -5,7 +5,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2008-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/frontends/api.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/api.py	Fri Sep 09 10:33:52 2011 -0400
@@ -7,7 +7,7 @@
 Affiliation: KIPAC/SLAC/Stanford
 Author: Britton Smith <brittonsmith at gmail.com>
 Affiliation: MSU
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/frontends/art/__init__.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/art/__init__.py	Fri Sep 09 10:33:52 2011 -0400
@@ -7,7 +7,7 @@
 Affiliation: KIPAC/SLAC/Stanford
 Author: Britton Smith <brittonsmith at gmail.com>
 Affiliation: MSU
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/frontends/art/api.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/art/api.py	Fri Sep 09 10:33:52 2011 -0400
@@ -7,7 +7,7 @@
 Affiliation: KIPAC/SLAC/Stanford
 Author: Britton Smith <brittonsmith at gmail.com>
 Affiliation: MSU
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/frontends/art/data_structures.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/art/data_structures.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: UCSD
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/frontends/art/fields.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/art/fields.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: UCSD
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/frontends/art/io.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/art/io.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2007-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/frontends/castro/__init__.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/castro/__init__.py	Fri Sep 09 10:33:52 2011 -0400
@@ -7,7 +7,7 @@
 Affiliation: KIPAC/SLAC/Stanford
 Author: Britton Smith <brittonsmith at gmail.com>
 Affiliation: MSU
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010 Matthew Turk.  All Rights Reserved.
 


--- a/yt/frontends/castro/api.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/castro/api.py	Fri Sep 09 10:33:52 2011 -0400
@@ -7,7 +7,7 @@
 Affiliation: KIPAC/SLAC/Stanford
 Author: Britton Smith <brittonsmith at gmail.com>
 Affiliation: MSU
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010 Matthew Turk.  All Rights Reserved.
 


--- a/yt/frontends/castro/data_structures.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/castro/data_structures.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: J. S. Oishi <jsoishi at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2008-2010 J. S. Oishi.  All Rights Reserved.
 


--- a/yt/frontends/castro/definitions.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/castro/definitions.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: J. S. Oishi <jsoishi at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2008-2010 J.S. Oishi.  All Rights Reserved.
 


--- a/yt/frontends/castro/fields.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/castro/fields.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: J. S. Oishi <jsoishi at gmail.com>
 Affiliation: UC Berkeley
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2008-2010 J. S. Oishi, Matthew Turk.  All Rights Reserved.
 


--- a/yt/frontends/castro/io.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/castro/io.py	Fri Sep 09 10:33:52 2011 -0400
@@ -4,7 +4,7 @@
 Author: Matthew Turk <matthewturk at gmail.com>
 Author: J. S. Oishi <jsoishi at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2007-2010 Matthew Turk.  All Rights Reserved.
 


--- a/yt/frontends/chombo/data_structures.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/chombo/data_structures.py	Fri Sep 09 10:33:52 2011 -0400
@@ -4,7 +4,7 @@
 Author: Matthew Turk <matthewturk at gmail.com>
 Author: J. S. Oishi <jsoishi at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2008-2011 Matthew Turk, J. S. Oishi.  All Rights Reserved.
 


--- a/yt/frontends/chombo/definitions.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/chombo/definitions.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: J. S. Oishi <jsoishi at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2008-2011 J.S. Oishi.  All Rights Reserved.
 


--- a/yt/frontends/chombo/fields.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/chombo/fields.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: J. S. Oishi <jsoishi at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2009-2011 J. S. Oishi, Matthew Turk.  All Rights Reserved.
 


--- a/yt/frontends/chombo/io.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/chombo/io.py	Fri Sep 09 10:33:52 2011 -0400
@@ -4,7 +4,7 @@
 Author: Matthew Turk <matthewturk at gmail.com>
 Author: J. S. Oishi <jsoishi at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2007-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/frontends/enzo/api.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/enzo/api.py	Fri Sep 09 10:33:52 2011 -0400
@@ -7,7 +7,7 @@
 Affiliation: KIPAC/SLAC/Stanford
 Author: Britton Smith <brittonsmith at gmail.com>
 Affiliation: MSU
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/frontends/enzo/data_structures.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/enzo/data_structures.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2007-2011 Matthew Turk.  All Rights Reserved.
 
@@ -668,9 +668,9 @@
         self._hierarchy_class = EnzoHierarchy1D
         self._fieldinfo_fallback = Enzo1DFieldInfo
         self.domain_left_edge = \
-            na.concatenate([self["DomainLeftEdge"], [0.0, 0.0]])
+            na.concatenate([[self.domain_left_edge], [0.0, 0.0]])
         self.domain_right_edge = \
-            na.concatenate([self["DomainRightEdge"], [1.0, 1.0]])
+            na.concatenate([[self.domain_right_edge], [1.0, 1.0]])
 
     def _setup_2d(self):
         self._hierarchy_class = EnzoHierarchy2D
@@ -727,55 +727,70 @@
         self.unique_identifier = \
             int(os.stat(self.parameter_filename)[stat.ST_CTIME])
         lines = open(self.parameter_filename).readlines()
-        for lineI, line in enumerate(lines):
-            if line.find("#") >= 1: # Keep the commented lines
-                line=line[:line.find("#")]
-            line=line.strip().rstrip()
-            if len(line) < 2:
-                continue
-            try:
-                param, vals = map(string.strip,map(string.rstrip,
-                                                   line.split("=")))
-            except ValueError:
-                mylog.error("ValueError: '%s'", line)
-            if parameterDict.has_key(param):
-                t = map(parameterDict[param], vals.split())
-                if len(t) == 1:
-                    self.parameters[param] = t[0]
+        data_labels = {}
+        data_label_factors = {}
+        for line in (l.strip() for l in lines):
+            if len(line) < 2: continue
+            param, vals = (i.strip() for i in line.split("="))
+            # First we try to decipher what type of value it is.
+            vals = vals.split()
+            # Special case approaching.
+            if "(do" in vals: vals = vals[:1]
+            if len(vals) == 0:
+                pcast = str # Assume NULL output
+            else:
+                v = vals[0]
+                # Figure out if it's castable to floating point:
+                try:
+                    float(v)
+                except ValueError:
+                    pcast = str
                 else:
-                    self.parameters[param] = t
-                if param.endswith("Units") and not param.startswith("Temperature"):
-                    dataType = param[:-5]
-                    self.conversion_factors[dataType] = self.parameters[param]
-            elif param.startswith("#DataCGS"):
+                    if any("." in v or "e+" in v or "e-" in v for v in vals):
+                        pcast = float
+                    else:
+                        pcast = int
+            # Now we figure out what to do with it.
+            if param.endswith("Units") and not param.startswith("Temperature"):
+                dataType = param[:-5]
+                # This one better be a float.
+                self.conversion_factors[dataType] = float(vals[0])
+            if param.startswith("#DataCGS") or \
+                 param.startswith("#CGSConversionFactor"):
                 # Assume of the form: #DataCGSConversionFactor[7] = 2.38599e-26 g/cm^3
-                if lines[lineI-1].find("Label") >= 0:
-                    kk = lineI-1
-                elif lines[lineI-2].find("Label") >= 0:
-                    kk = lineI-2
-                dataType = lines[kk].split("=")[-1].rstrip().strip()
-                convFactor = float(line.split("=")[-1].split()[0])
-                self.conversion_factors[dataType] = convFactor
-            elif param.startswith("#CGSConversionFactor"):
-                dataType = param[20:].rstrip()
-                convFactor = float(line.split("=")[-1])
-                self.conversion_factors[dataType] = convFactor
-            elif param.startswith("DomainLeftEdge"):
-                self.domain_left_edge = \
-                self.parameters["DomainLeftEdge"] = \
-                    na.array([float(i) for i in vals.split()])
-            elif param.startswith("DomainRightEdge"):
-                self.domain_right_edge = \
-                self.parameters["DomainRightEdge"] = \
-                    na.array([float(i) for i in vals.split()])
+                # Which one does it belong to?
+                data_id = param[param.find("[")+1:param.find("]")]
+                data_label_factors[data_id] = float(vals[0])
+            if param.startswith("DataLabel"):
+                data_id = param[param.find("[")+1:param.find("]")]
+                data_labels[data_id] = vals[0]
+            if len(vals) == 0:
+                vals = ""
+            elif len(vals) == 1:
+                vals = pcast(vals[0])
+            else:
+                vals = na.array([pcast(i) for i in vals if i != "-99999"])
+            self.parameters[param] = vals
         for p, v in self._parameter_override.items():
             self.parameters[p] = v
         for p, v in self._conversion_override.items():
             self.conversion_factors[p] = v
+        for k, v in data_label_factors.items():
+            self.conversion_factors[data_labels[k]] = v
         self.refine_by = self.parameters["RefineBy"]
         self.dimensionality = self.parameters["TopGridRank"]
         self.domain_dimensions = self.parameters["TopGridDimensions"]
+        if self.dimensionality > 1:
+            self.domain_left_edge = na.array(self.parameters["DomainLeftEdge"]).copy()
+            self.domain_right_edge = na.array(self.parameters["DomainRightEdge"]).copy()
+        else:
+            self.domain_left_edge = na.array(self.parameters["DomainLeftEdge"])
+            self.domain_right_edge = na.array(self.parameters["DomainRightEdge"])
+
         self.current_time = self.parameters["InitialTime"]
+        # To be enabled when we can break old pickles:
+        #if "MetaDataSimulationUUID" in self.parameters:
+        #    self.unique_identifier = self.parameters["MetaDataSimulationUUID"]
         if "CurrentTimeIdentifier" in self.parameters:
             self.unique_identifier = self.parameters["CurrentTimeIdentifier"]
         if self.parameters["ComovingCoordinates"]:
@@ -809,7 +824,7 @@
             self._setup_nounits_units()
         self.time_units['1'] = 1
         self.units['1'] = 1
-        self.units['unitary'] = 1.0 / (self["DomainRightEdge"] - self["DomainLeftEdge"]).max()
+        self.units['unitary'] = 1.0 / (self.domain_right_edge - self.domain_left_edge).max()
         seconds = self["Time"]
         self.time_units['years'] = seconds / (365*3600*24.0)
         self.time_units['days']  = seconds / (3600*24.0)


--- a/yt/frontends/enzo/definitions.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/enzo/definitions.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 
 License:
   Copyright (C) 2007-2011 Matthew Turk.  All Rights Reserved.


--- a/yt/frontends/enzo/fields.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/enzo/fields.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2008-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/frontends/enzo/io.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/enzo/io.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2007-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/frontends/enzo/misc.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/enzo/misc.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2008-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/frontends/flash/__init__.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/flash/__init__.py	Fri Sep 09 10:33:52 2011 -0400
@@ -7,7 +7,7 @@
 Affiliation: KIPAC/SLAC/Stanford
 Author: Britton Smith <brittonsmith at gmail.com>
 Affiliation: MSU
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/frontends/flash/api.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/flash/api.py	Fri Sep 09 10:33:52 2011 -0400
@@ -7,7 +7,7 @@
 Affiliation: KIPAC/SLAC/Stanford
 Author: Britton Smith <brittonsmith at gmail.com>
 Affiliation: MSU
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/frontends/flash/data_structures.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/flash/data_structures.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: UCSD
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/frontends/flash/fields.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/flash/fields.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: UCSD
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk, John ZuHone.  All Rights Reserved.
 


--- a/yt/frontends/flash/io.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/flash/io.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: UCSD
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/frontends/gadget/__init__.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/gadget/__init__.py	Fri Sep 09 10:33:52 2011 -0400
@@ -7,7 +7,7 @@
 Affiliation: KIPAC/SLAC/Stanford
 Author: Britton Smith <brittonsmith at gmail.com>
 Affiliation: MSU
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/frontends/gadget/api.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/gadget/api.py	Fri Sep 09 10:33:52 2011 -0400
@@ -7,7 +7,7 @@
 Affiliation: KIPAC/SLAC/Stanford
 Author: Britton Smith <brittonsmith at gmail.com>
 Affiliation: MSU
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/frontends/gadget/data_structures.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/gadget/data_structures.py	Fri Sep 09 10:33:52 2011 -0400
@@ -5,7 +5,7 @@
 Affiliation: UCSD
 Author: Chris Moody <cemoody at ucsc.edu>
 Affiliation: UCSC
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2007-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/frontends/gadget/fields.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/gadget/fields.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Christopher E Moody <juxtaposicion at gmail.com>
 Affiliation: UC Santa Cruz
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Christopher E Moody, Matthew Turk.  All Rights Reserved.
 


--- a/yt/frontends/gadget/io.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/gadget/io.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Christopher E Moody <juxtaposicion at gmail.com>
 Affiliation: UC Santa Cruz
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Christopher E Moody, Matthew Turk.  All Rights Reserved.
 


--- a/yt/frontends/gdf/data_structures.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/gdf/data_structures.py	Fri Sep 09 10:33:52 2011 -0400
@@ -4,7 +4,7 @@
 Author: Matthew Turk <matthewturk at gmail.com>
 Author: J. S. Oishi <jsoishi at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2008-2011 Matthew Turk, J. S. Oishi.  All Rights Reserved.
 


--- a/yt/frontends/gdf/definitions.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/gdf/definitions.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: J. S. Oishi <jsoishi at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2008-2011 J.S. Oishi.  All Rights Reserved.
 


--- a/yt/frontends/gdf/fields.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/gdf/fields.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: J. S. Oishi <jsoishi at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2009-2011 J. S. Oishi, Matthew Turk.  All Rights Reserved.
 


--- a/yt/frontends/gdf/io.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/gdf/io.py	Fri Sep 09 10:33:52 2011 -0400
@@ -4,7 +4,7 @@
 Author: Matthew Turk <matthewturk at gmail.com>
 Author: J. S. Oishi <jsoishi at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2007-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/frontends/maestro/__init__.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/maestro/__init__.py	Fri Sep 09 10:33:52 2011 -0400
@@ -9,7 +9,7 @@
 Affiliation: MSU
 Author: Chris Malone <chris.m.malone at gmail.com>
 Affiliation: SUNY Stony Brook
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/frontends/maestro/api.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/maestro/api.py	Fri Sep 09 10:33:52 2011 -0400
@@ -9,7 +9,7 @@
 Affiliation: MSU
 Author: Chris Malone <chris.m.malone at gmail.com>
 Affiliation: SUNY Stony Brook
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/frontends/maestro/data_structures.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/maestro/data_structures.py	Fri Sep 09 10:33:52 2011 -0400
@@ -5,7 +5,7 @@
 Affiliation: KIPAC/SLAC/Stanford
 Author: Chris Malone <chris.m.malone at gmail.com>
 Affiliation: SUNY Stony Brook
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2008-2011 J. S. Oishi.  All Rights Reserved.
 


--- a/yt/frontends/maestro/definitions.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/maestro/definitions.py	Fri Sep 09 10:33:52 2011 -0400
@@ -6,7 +6,7 @@
 Affiliation: KIPAC/SLAC/Stanford
 Author: Chris Malone <chris.m.malone at gmail.com>
 Affiliation: SUNY Stony Brook
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2008-2011 J.S. Oishi.  All Rights Reserved.
 


--- a/yt/frontends/maestro/fields.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/maestro/fields.py	Fri Sep 09 10:33:52 2011 -0400
@@ -5,7 +5,7 @@
 Affiliation: UC Berkeley
 Author: Chris Malone <chris.m.malone at gmail.com>
 Affiliation: SUNY Stony Brook
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2008-2011 J. S. Oishi, Matthew Turk.  All Rights Reserved.
 


--- a/yt/frontends/maestro/io.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/maestro/io.py	Fri Sep 09 10:33:52 2011 -0400
@@ -7,7 +7,7 @@
 Affiliation: KIPAC/SLAC/Stanford
 Author: Chris Malone <chris.m.malone at gmail.com>
 Affiliation: SUNY Stony Brook
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2007-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/frontends/nyx/api.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/nyx/api.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Casey W. Stark <caseywstark at gmail.com>
 Affiliation: UC Berkeley
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2011 Casey W. Stark, Matthew Turk.  All Rights Reserved.
 


--- a/yt/frontends/nyx/data_structures.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/nyx/data_structures.py	Fri Sep 09 10:33:52 2011 -0400
@@ -5,7 +5,7 @@
 Affiliation: UC Berkeley
 Author: J. S. Oishi <jsoishi at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2011 Casey W. Stark, J. S. Oishi, Matthew Turk.  All Rights
   Reserved.
@@ -547,11 +547,17 @@
         """
         self.storage_filename = storage_filename
         self.parameter_filename = param_filename
-        self.parameter_file_path = os.path.abspath(self.parameter_filename)
         self.fparameter_filename = fparam_filename
-        self.fparameter_file_path = os.path.abspath(self.fparameter_filename)
+
         self.path = os.path.abspath(plotname)  # data folder
 
+        # silly inputs and probin file thing (this is on the Nyx todo list)
+        self.parameter_file_path = os.path.join(os.path.dirname(self.path),
+                                                self.parameter_filename)
+
+        self.fparameter_file_path = os.path.join(os.path.dirname(self.path),
+                                                 self.fparameter_filename)
+
         self.fparameters = {}
 
         # @todo: quick fix...


--- a/yt/frontends/nyx/definitions.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/nyx/definitions.py	Fri Sep 09 10:33:52 2011 -0400
@@ -5,7 +5,7 @@
 Affiliation: UC Berkeley
 Author: J. S. Oishi <jsoishi at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2011 Casey W. Stark, J. S. Oishi, Matthew Turk.  All Rights
   Reserved.


--- a/yt/frontends/nyx/fields.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/nyx/fields.py	Fri Sep 09 10:33:52 2011 -0400
@@ -5,7 +5,7 @@
 Affiliation: UC Berkeley
 Author: J. S. Oishi <jsoishi at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2011 Casey W. Stark, J. S. Oishi, Matthew Turk.  All Rights
   Reserved.


--- a/yt/frontends/nyx/io.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/nyx/io.py	Fri Sep 09 10:33:52 2011 -0400
@@ -6,7 +6,7 @@
 Author: Matthew Turk <matthewturk at gmail.com>
 Author: J. S. Oishi <jsoishi at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2011 Casey W. Stark, Matthew Turk.  All Rights Reserved.
 


--- a/yt/frontends/nyx/utils.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/nyx/utils.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Casey W. Stark <caseywstark at gmail.com>
 Affiliation: UC Berkeley
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2011 Casey W. Stark, Matthew Turk.  All Rights Reserved.
 


--- a/yt/frontends/orion/__init__.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/orion/__init__.py	Fri Sep 09 10:33:52 2011 -0400
@@ -7,7 +7,7 @@
 Affiliation: KIPAC/SLAC/Stanford
 Author: Britton Smith <brittonsmith at gmail.com>
 Affiliation: MSU
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/frontends/orion/api.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/orion/api.py	Fri Sep 09 10:33:52 2011 -0400
@@ -7,7 +7,7 @@
 Affiliation: KIPAC/SLAC/Stanford
 Author: Britton Smith <brittonsmith at gmail.com>
 Affiliation: MSU
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/frontends/orion/data_structures.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/orion/data_structures.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: J. S. Oishi <jsoishi at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2008-2011 J. S. Oishi.  All Rights Reserved.
 
@@ -424,16 +424,12 @@
     def __init__(self, plotname, paramFilename=None, fparamFilename=None,
                  data_style='orion_native', paranoia=False,
                  storage_filename = None):
-        """need to override for Orion file structure.
-
-        the paramfile is usually called "inputs"
+        """
+        The paramfile is usually called "inputs"
         and there may be a fortran inputs file usually called "probin"
         plotname here will be a directory name
-        as per BoxLib, data_style will be one of
-         * Native
-         * IEEE (not implemented in yt)
-         * ASCII (not implemented in yt)
-
+        as per BoxLib, data_style will be Native (implemented here), IEEE (not
+        yet implemented) or ASCII (not yet implemented.)
         """
         self.storage_filename = storage_filename
         self.paranoid_read = paranoia


--- a/yt/frontends/orion/definitions.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/orion/definitions.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: J. S. Oishi <jsoishi at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2008-2011 J.S. Oishi.  All Rights Reserved.
 


--- a/yt/frontends/orion/fields.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/orion/fields.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: J. S. Oishi <jsoishi at gmail.com>
 Affiliation: UC Berkeley
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2008-2011 J. S. Oishi, Matthew Turk.  All Rights Reserved.
 


--- a/yt/frontends/orion/io.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/orion/io.py	Fri Sep 09 10:33:52 2011 -0400
@@ -4,7 +4,7 @@
 Author: Matthew Turk <matthewturk at gmail.com>
 Author: J. S. Oishi <jsoishi at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2007-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/frontends/ramses/__init__.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/ramses/__init__.py	Fri Sep 09 10:33:52 2011 -0400
@@ -7,7 +7,7 @@
 Affiliation: KIPAC/SLAC/Stanford
 Author: Britton Smith <brittonsmith at gmail.com>
 Affiliation: MSU
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/frontends/ramses/_ramses_reader.pyx	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/ramses/_ramses_reader.pyx	Fri Sep 09 10:33:52 2011 -0400
@@ -5,7 +5,7 @@
 Affiliation: UCSD
 Author: Oliver Hahn <ohahn at stanford.edu>
 Affiliation: KIPAC / Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 
@@ -401,6 +401,7 @@
         self.hydro_datas = <RAMSES_hydro_data ***>\
                        malloc(sizeof(RAMSES_hydro_data**) * self.rsnap.m_header.ncpu)
         self.ndomains = self.rsnap.m_header.ncpu
+        
         # Note we don't do ncpu + 1
         for idomain in range(self.rsnap.m_header.ncpu):
             # we don't delete local_tree
@@ -422,7 +423,7 @@
         cdef string *field_name
         self.field_names = []
         self.field_ind = {}
-        self.loaded = <int **> malloc(sizeof(int) * local_hydro_data.m_nvars)
+        self.loaded = <int **> malloc(sizeof(int*) * self.ndomains)
         for idomain in range(self.ndomains):
             self.loaded[idomain] = <int *> malloc(
                 sizeof(int) * local_hydro_data.m_nvars)
@@ -435,8 +436,14 @@
             self.field_ind[self.field_names[-1]] = ifield
         # This all needs to be cleaned up in the deallocator
 
+    def get_domain_boundaries(self):
+        bounds = []
+        for i in range(self.rsnap.m_header.ncpu):
+            bounds.append((self.rsnap.ind_min[i],
+                           self.rsnap.ind_max[i]))
+        return bounds
+
     def __dealloc__(self):
-        import traceback; traceback.print_stack()
         cdef int idomain, ifield
         # To ensure that 'delete' is used, not 'free',
         # we allocate temporary variables.
@@ -489,12 +496,12 @@
 
         return cell_count
 
-    def ensure_loaded(self, char *varname, int domain_index):
+    cdef ensure_loaded(self, char *varname, int domain_index, int varindex = -1):
         # this domain_index must be zero-indexed
-        cdef int varindex = self.field_ind[varname]
-        cdef string *field_name = new string(varname)
+        if varindex == -1: varindex = self.field_ind[varname]
         if self.loaded[domain_index][varindex] == 1:
             return
+        cdef string *field_name = new string(varname)
         print "READING FROM DISK", varname, domain_index, varindex
         self.hydro_datas[domain_index][varindex].read(deref(field_name))
         self.loaded[domain_index][varindex] = 1
@@ -505,12 +512,13 @@
         # We delete and re-create
         cdef int varindex = self.field_ind[varname]
         cdef string *field_name = new string(varname)
-        if self.loaded[domain_index][varindex] == 0: return
-        cdef RAMSES_hydro_data *temp_hdata = self.hydro_datas[domain_index][varindex]
-        del temp_hdata
-        self.hydro_datas[domain_index - 1][varindex] = \
-            new RAMSES_hydro_data(deref(self.trees[domain_index]))
-        self.loaded[domain_index][varindex] = 0
+        cdef RAMSES_hydro_data *temp_hdata
+        if self.loaded[domain_index][varindex] == 1:
+            temp_hdata = self.hydro_datas[domain_index][varindex]
+            del temp_hdata
+            self.hydro_datas[domain_index][varindex] = \
+                new RAMSES_hydro_data(deref(self.trees[domain_index]))
+            self.loaded[domain_index][varindex] = 0
         del field_name
 
     def get_file_info(self):
@@ -648,14 +656,16 @@
             data[i] = local_hydro_data.m_var_array[level][8*grid_id+i]
         return tr
 
-    def read_grid(self, char *field, 
+    @cython.cdivision(True)
+    @cython.boundscheck(False)
+    @cython.wraparound(False)
+    def read_grid(self, int varindex, char *field,
                   np.ndarray[np.int64_t, ndim=1] start_index,
                   np.ndarray[np.int32_t, ndim=1] grid_dims,
                   np.ndarray[np.float64_t, ndim=3] data,
                   np.ndarray[np.int32_t, ndim=3] filled,
                   int level, int ref_factor,
                   np.ndarray[np.int64_t, ndim=2] component_grid_info):
-        cdef int varindex = self.field_ind[field]
         cdef RAMSES_tree *local_tree = NULL
         cdef RAMSES_hydro_data *local_hydro_data = NULL
 
@@ -674,7 +684,8 @@
             end_index[i] = start_index[i] + grid_dims[i]
         for gi in range(component_grid_info.shape[0]):
             domain = component_grid_info[gi,0]
-            self.ensure_loaded(field, domain - 1)
+            if domain == 0: continue
+            self.ensure_loaded(field, domain - 1, varindex)
             local_tree = self.trees[domain - 1]
             local_hydro_data = self.hydro_datas[domain - 1][varindex]
             offset = component_grid_info[gi,1]
@@ -789,19 +800,20 @@
                         i = i2 - self.left_edge[2]
                         sig2[i] += 1
                         efficiency += 1
-                        used += 1
                         mask[gi] = 1
+            used += mask[gi]
         cdef np.ndarray[np.int64_t, ndim=2] gfl
         gfl = np.zeros((used, 6), 'int64')
         used = 0
         self.grid_file_locations = gfl
         for gi in range(ng):
             if mask[gi] == 1:
-                grid_file_locations[gi,3] = left_edges[gi, 0]
-                grid_file_locations[gi,4] = left_edges[gi, 1]
-                grid_file_locations[gi,5] = left_edges[gi, 2]
+                grid_file_locations[gi,3] = left_edges[gi,0]
+                grid_file_locations[gi,4] = left_edges[gi,1]
+                grid_file_locations[gi,5] = left_edges[gi,2]
                 for i in range(6):
                     gfl[used, i] = grid_file_locations[gi,i]
+                used += 1
          
         self.dd = np.ones(3, dtype='int64')
         for i in range(3):
@@ -1046,7 +1058,7 @@
     if L.efficiency > 1.0: raise RuntimeError
     if L.efficiency <= 0.0: rv_l = []
     elif L.efficiency < min_eff:
-        rv_l = recursive_patch_splitting(L, dims_r, li_r,
+        rv_l = recursive_patch_splitting(L, dims_l, li_l,
                 left_index, fl, num_deep + 1)
     else:
         rv_l = [L]


--- a/yt/frontends/ramses/api.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/ramses/api.py	Fri Sep 09 10:33:52 2011 -0400
@@ -7,7 +7,7 @@
 Affiliation: KIPAC/SLAC/Stanford
 Author: Britton Smith <brittonsmith at gmail.com>
 Affiliation: MSU
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/frontends/ramses/data_structures.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/ramses/data_structures.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: UCSD
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 
@@ -34,6 +34,7 @@
       AMRHierarchy
 from yt.data_objects.static_output import \
       StaticOutput
+
 try:
     import _ramses_reader
 except ImportError:
@@ -66,6 +67,7 @@
         self.Parent = []
         self.Children = []
         self.locations = locations
+        self.domain = locations[0,0]
         self.start_index = start_index.copy()
 
     def _setup_dx(self):
@@ -189,16 +191,20 @@
                         level, unique_indices.size, hilbert_indices.size)
             locs, lefts = _ramses_reader.get_array_indices_lists(
                         hilbert_indices, unique_indices, left_index, fl)
-            for dleft_index, dfl in zip(lefts, locs):
-                initial_left = na.min(dleft_index, axis=0)
-                idims = (na.max(dleft_index, axis=0) - initial_left).ravel()+2
-                psg = _ramses_reader.ProtoSubgrid(initial_left, idims,
-                                dleft_index, dfl)
-                if psg.efficiency <= 0: continue
-                self.num_deep = 0
-                psgs.extend(_ramses_reader.recursive_patch_splitting(
-                    psg, idims, initial_left, 
-                    dleft_index, dfl))
+            for ddleft_index, ddfl in zip(lefts, locs):
+                for idomain in na.unique(ddfl[:,0]):
+                    dom_ind = ddfl[:,0] == idomain
+                    dleft_index = ddleft_index[dom_ind,:]
+                    dfl = ddfl[dom_ind,:]
+                    initial_left = na.min(dleft_index, axis=0)
+                    idims = (na.max(dleft_index, axis=0) - initial_left).ravel()+2
+                    psg = _ramses_reader.ProtoSubgrid(initial_left, idims,
+                                    dleft_index, dfl)
+                    if psg.efficiency <= 0: continue
+                    self.num_deep = 0
+                    psgs.extend(_ramses_reader.recursive_patch_splitting(
+                        psg, idims, initial_left, 
+                        dleft_index, dfl))
             mylog.debug("Done with level % 2i", level)
             pbar.finish()
             self.proto_grids.append(psgs)
@@ -227,6 +233,7 @@
                 self.grid_levels[gi,:] = level
                 grids.append(self.grid(gi, self, level, fl, props[0,:]))
                 gi += 1
+        self.proto_grids = []
         self.grids = na.array(grids, dtype='object')
 
     def _populate_grid_objects(self):
@@ -240,8 +247,20 @@
                                 self.grid_levels, mask)
             parents = self.grids[mask.astype("bool")]
             if len(parents) > 0:
-                g.Parent.extend(parents.tolist())
+                g.Parent.extend((p for p in parents.tolist()
+                        if p.locations[0,0] == g.locations[0,0]))
                 for p in parents: p.Children.append(g)
+            # Now we do overlapping siblings; note that one has to "win" with
+            # siblings, so we assume the lower ID one will "win"
+            get_box_grids_level(self.grid_left_edge[gi,:],
+                                self.grid_right_edge[gi,:],
+                                g.Level,
+                                self.grid_left_edge, self.grid_right_edge,
+                                self.grid_levels, mask, gi)
+            mask[gi] = False
+            siblings = self.grids[mask.astype("bool")]
+            if len(siblings) > 0:
+                g.OverlappingSiblings = siblings.tolist()
             g._prepare_grid()
             g._setup_dx()
         self.max_level = self.grid_levels.max()
@@ -260,8 +279,8 @@
     
     def __init__(self, filename, data_style='ramses',
                  storage_filename = None):
-        if _ramses_reader is None:
-            import _ramses_reader
+        # Here we want to initiate a traceback, if the reader is not built.
+        import _ramses_reader
         StaticOutput.__init__(self, filename, data_style)
         self.storage_filename = storage_filename
 
@@ -314,6 +333,14 @@
                                            * rheader['boxlen']
         self.domain_left_edge = na.zeros(3, dtype='float64')
         self.domain_dimensions = na.ones(3, dtype='int32') * 2
+        # This is likely not true, but I am not sure how to otherwise
+        # distinguish them.
+        mylog.warning("No current mechanism of distinguishing cosmological simulations in RAMSES!")
+        self.cosmological_simulation = 1
+        self.current_redshift = (1.0 / rheader["aexp"]) - 1.0
+        self.omega_lambda = rheader["omega_l"]
+        self.omega_matter = rheader["omega_m"]
+        self.hubble_constant = rheader["H0"]
 
     @classmethod
     def _is_valid(self, *args, **kwargs):


--- a/yt/frontends/ramses/fields.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/ramses/fields.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: UCSD
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/frontends/ramses/io.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/ramses/io.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2007-2011 Matthew Turk.  All Rights Reserved.
 
@@ -23,10 +23,12 @@
   along with this program.  If not, see <http://www.gnu.org/licenses/>.
 """
 
+from collections import defaultdict
 import numpy as na
 
 from yt.utilities.io_handler import \
     BaseIOHandler
+from yt.utilities.logger import ytLogger as mylog
 
 class IOHandlerRAMSES(BaseIOHandler):
     _data_style = "ramses"
@@ -41,10 +43,11 @@
         to_fill = grid.ActiveDimensions.prod()
         grids = [grid]
         l_delta = 0
+        varindex = self.ramses_tree.field_ind[field]
         while to_fill > 0 and len(grids) > 0:
             next_grids = []
             for g in grids:
-                to_fill -= self.ramses_tree.read_grid(field,
+                to_fill -= self.ramses_tree.read_grid(varindex, field,
                         grid.get_global_startindex(), grid.ActiveDimensions,
                         tr, filled, g.Level, 2**l_delta, g.locations)
                 next_grids += g.Parent
@@ -57,3 +60,21 @@
         sl[axis] = slice(coord, coord + 1)
         return self._read_data_set(grid, field)[sl]
 
+    def preload(self, grids, sets):
+        if len(grids) == 0: return
+        domain_keys = defaultdict(list)
+        pf_field_list = grids[0].pf.h.field_list
+        sets = [dset for dset in list(sets) if dset in pf_field_list]
+        exc = self._read_exception
+        for g in grids:
+            domain_keys[g.domain].append(g)
+        for domain, grids in domain_keys.items():
+            mylog.debug("Starting read of domain %s (%s)", domain, sets)
+            for field in sets:
+                for g in grids:
+                    self.queue[g.id][field] = self._read_data_set(g, field)
+                print "Clearing", field, domain
+                self.ramses_tree.clear_tree(field, domain - 1)
+        mylog.debug("Finished read of %s", sets)
+
+    def modify(self, data): return data


--- a/yt/frontends/stream/api.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/stream/api.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: Columbia University
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/frontends/stream/data_structures.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/stream/data_structures.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: Columbia University
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/frontends/stream/definitions.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/stream/definitions.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: Columbia University
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 
 License:
   Copyright (C) 2011 Matthew Turk.  All Rights Reserved.


--- a/yt/frontends/stream/fields.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/stream/fields.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2008-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/frontends/stream/io.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/stream/io.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2007-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/frontends/stream/misc.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/stream/misc.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2008-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/frontends/tiger/__init__.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/tiger/__init__.py	Fri Sep 09 10:33:52 2011 -0400
@@ -7,7 +7,7 @@
 Affiliation: KIPAC/SLAC/Stanford
 Author: Britton Smith <brittonsmith at gmail.com>
 Affiliation: MSU
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/frontends/tiger/api.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/tiger/api.py	Fri Sep 09 10:33:52 2011 -0400
@@ -7,7 +7,7 @@
 Affiliation: KIPAC/SLAC/Stanford
 Author: Britton Smith <brittonsmith at gmail.com>
 Affiliation: MSU
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/frontends/tiger/data_structures.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/tiger/data_structures.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: UCSD
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/frontends/tiger/fields.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/tiger/fields.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: UCSD
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/frontends/tiger/io.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/frontends/tiger/io.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: UCSD
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/funcs.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/funcs.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2007-2011 Matthew Turk.  All Rights Reserved.
 
@@ -137,17 +137,9 @@
     return resident * pagesize / (1024 * 1024) # return in megs
 
 def time_execution(func):
-    """
+    r"""
     Decorator for seeing how long a given function takes, depending on whether
     or not the global 'yt.timefunctions' config parameter is set.
-
-    This can be used like so:
-
-    .. code-block:: python
-
-       @time_execution
-    def some_longrunning_function(...):
-
     """
     @wraps(func)
     def wrapper(*arg, **kw):
@@ -387,14 +379,14 @@
     sys.__excepthook__(exc_type, exc, tb)
     import xmlrpclib, cStringIO
     p = xmlrpclib.ServerProxy(
-            "http://paste.enzotools.org/xmlrpc/",
+            "http://paste.yt-project.org/xmlrpc/",
             allow_none=True)
     s = cStringIO.StringIO()
     traceback.print_exception(exc_type, exc, tb, file=s)
     s = s.getvalue()
     ret = p.pastes.newPaste('pytb', s, None, '', '', True)
     print
-    print "Traceback pasted to http://paste.enzotools.org/show/%s" % (ret)
+    print "Traceback pasted to http://paste.yt-project.org/show/%s" % (ret)
     print
 
 def paste_traceback_detailed(exc_type, exc, tb):
@@ -409,11 +401,11 @@
     s = s.getvalue()
     print s
     p = xmlrpclib.ServerProxy(
-            "http://paste.enzotools.org/xmlrpc/",
+            "http://paste.yt-project.org/xmlrpc/",
             allow_none=True)
     ret = p.pastes.newPaste('text', s, None, '', '', True)
     print
-    print "Traceback pasted to http://paste.enzotools.org/show/%s" % (ret)
+    print "Traceback pasted to http://paste.yt-project.org/show/%s" % (ret)
     print
 
 def traceback_writer_hook(file_suffix = ""):


--- a/yt/gui/opengl_widgets/api.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/gui/opengl_widgets/api.py	Fri Sep 09 10:33:52 2011 -0400
@@ -7,7 +7,7 @@
 Affiliation: KIPAC/SLAC/Stanford
 Author: Britton Smith <brittonsmith at gmail.com>
 Affiliation: MSU
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/gui/reason/basic_repl.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/gui/reason/basic_repl.py	Fri Sep 09 10:33:52 2011 -0400
@@ -5,7 +5,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: Columbia University
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/gui/reason/bottle_mods.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/gui/reason/bottle_mods.py	Fri Sep 09 10:33:52 2011 -0400
@@ -4,7 +4,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: Columbia University
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2011 Matthew Turk.  All Rights Reserved.
 
@@ -99,7 +99,7 @@
 class YTRocketServer(ServerAdapter):
     server_info = {} # Hack to get back at instance vars
     def run(self, handler):
-        from rocket import Rocket
+        from yt.utilities.rocket import Rocket
         server = Rocket((self.host, self.port), 'wsgi', { 'wsgi_app' : handler })
         self.server_info[id(self)] = server
         server.start()


--- a/yt/gui/reason/extdirect_repl.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/gui/reason/extdirect_repl.py	Fri Sep 09 10:33:52 2011 -0400
@@ -4,7 +4,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: Columbia University
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2011 Matthew Turk.  All Rights Reserved.
 
@@ -384,23 +384,23 @@
     def paste_session(self):
         import xmlrpclib, cStringIO
         p = xmlrpclib.ServerProxy(
-            "http://paste.enzotools.org/xmlrpc/",
+            "http://paste.yt-project.org/xmlrpc/",
             allow_none=True)
         cs = cStringIO.StringIO()
         cs.write("\n######\n".join(self.executed_cell_texts))
         cs = cs.getvalue()
         ret = p.pastes.newPaste('python', cs, None, '', '', True)
-        site = "http://paste.enzotools.org/show/%s" % ret
+        site = "http://paste.yt-project.org/show/%s" % ret
         return {'status': 'SUCCESS', 'site': site}
 
     @lockit
     def paste_text(self, to_paste):
         import xmlrpclib, cStringIO
         p = xmlrpclib.ServerProxy(
-            "http://paste.enzotools.org/xmlrpc/",
+            "http://paste.yt-project.org/xmlrpc/",
             allow_none=True)
         ret = p.pastes.newPaste('python', to_paste, None, '', '', True)
-        site = "http://paste.enzotools.org/show/%s" % ret
+        site = "http://paste.yt-project.org/show/%s" % ret
         return {'status': 'SUCCESS', 'site': site}
 
     _api_key = 'f62d550859558f28c4c214136bc797c7'


--- a/yt/gui/reason/html/help.html	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/gui/reason/html/help.html	Fri Sep 09 10:33:52 2011 -0400
@@ -87,12 +87,12 @@
 <h1>yt:</h1><p>because your data isn't going to analyze itself!</p><ul class="quick_list">
-<li><a href="http://yt.enzotools.org/doc/">docs</a></li>
-<li><a href="http://yt.enzotools.org/wiki/Gallery">gallery</a> ( <a href="video_gallery.html">video</a> )</li>
-<li><a href="http://yt.enzotools.org/wiki">wiki</a></li>
-<li><a href="http://yt.enzotools.org/doc/orientation.html">quick start</a></li>
-<li><a href="http://yt.enzotools.org/newticket">report a bug</a></li>
-<li><a href="http://yt.enzotools.org/browser">source</a></li>
+<li><a href="http://yt-project.org/doc/">docs</a></li>
+<li><a href="http://yt-project.org/wiki/Gallery">gallery</a> ( <a href="video_gallery.html">video</a> )</li>
+<li><a href="http://yt-project.org/wiki">wiki</a></li>
+<li><a href="http://yt-project.org/doc/orientation.html">quick start</a></li>
+<li><a href="http://yt-project.org/newticket">report a bug</a></li>
+<li><a href="http://yt-project.org/browser">source</a></li><li><a href="principles.html">principles</a></li><li><a href="http://blog.enzotools.org/">development blog</a></li></ul>
@@ -130,7 +130,7 @@
 download a copy of your session on your local machine.  By clicking "Save" you
 can save a copy on the server on which Reason is running.  By clicking
 "Pastebin" you can send a copy of it to the <a
-href="http://paste.enzotools.org/">yt pastebin</a>.</p>
+href="http://paste.yt-project.org/">yt pastebin</a>.</p><p>If you use the command 'load_script' and supply it a file the server can
 find locally, it will read that file in and populate the contents of your next


--- a/yt/gui/reason/html/js/functions.js	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/gui/reason/html/js/functions.js	Fri Sep 09 10:33:52 2011 -0400
@@ -9,7 +9,7 @@
 Affiliation: MSU
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: Columbia University
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/gui/reason/html/js/menu_items.js	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/gui/reason/html/js/menu_items.js	Fri Sep 09 10:33:52 2011 -0400
@@ -9,7 +9,7 @@
 Affiliation: MSU
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: Columbia University
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2011 Matthew Turk.  All Rights Reserved.
 
@@ -91,7 +91,7 @@
             }},
            {xtype:'menuitem', text: 'yt Chat',
                 handler: function (b,e) { 
-                        window.open("http://yt.enzotools.org/irc.html", "_new");
+                        window.open("http://yt-project.org/irc.html", "_new");
             }},
            {xtype: 'menuseparator'},
            {xtype:'menuitem', text: 'Quit',


--- a/yt/gui/reason/html/js/reason.js	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/gui/reason/html/js/reason.js	Fri Sep 09 10:33:52 2011 -0400
@@ -9,7 +9,7 @@
 Affiliation: MSU
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: Columbia University
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/gui/reason/html/js/widget_griddata.js	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/gui/reason/html/js/widget_griddata.js	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: Columbia University
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/gui/reason/html/js/widget_gridviewer.js	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/gui/reason/html/js/widget_gridviewer.js	Fri Sep 09 10:33:52 2011 -0400
@@ -5,7 +5,7 @@
 Affiliation: University of Colorado at Boulder
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: Columbia University
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/gui/reason/html/js/widget_phaseplot.js	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/gui/reason/html/js/widget_phaseplot.js	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: Columbia University
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/gui/reason/html/js/widget_plotwindow.js	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/gui/reason/html/js/widget_plotwindow.js	Fri Sep 09 10:33:52 2011 -0400
@@ -9,7 +9,7 @@
 Affiliation: MSU
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: Columbia University
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/gui/reason/html/js/widget_progressbar.js	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/gui/reason/html/js/widget_progressbar.js	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: Columbia University
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/gui/reason/html/js/widget_streamlineviewer.js	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/gui/reason/html/js/widget_streamlineviewer.js	Fri Sep 09 10:33:52 2011 -0400
@@ -5,7 +5,7 @@
 Affiliation: University of Colorado at Boulder
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: Columbia University
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/gui/reason/html/leaflet/leaflet.css	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/gui/reason/html/leaflet/leaflet.css	Fri Sep 09 10:33:52 2011 -0400
@@ -25,6 +25,7 @@
 .leaflet-marker-shadow {
 	-moz-user-select: none;
 	-webkit-user-select: none;
+	user-select: none;
 	}
 .leaflet-marker-icon, 
 .leaflet-marker-shadow {
@@ -33,6 +34,9 @@
 .leaflet-clickable {
 	cursor: pointer;
 	}
+.leaflet-container img {
+	max-width: auto;
+	}
 
 .leaflet-tile-pane { z-index: 2; }
 .leaflet-overlay-pane { z-index: 3; }
@@ -52,6 +56,10 @@
 	visibility: inherit;
 	}
 
+a.leaflet-active {
+	outline: 2px solid orange;
+	}
+
 
 /* Leaflet controls */
 
@@ -78,7 +86,6 @@
 .leaflet-control {
 	float: left;
 	clear: both;
-	display: inline;
 	}
 .leaflet-right .leaflet-control {
 	float: right;
@@ -100,9 +107,9 @@
 	padding: 5px;
 	background: rgba(0, 0, 0, 0.25);
 	
-	border-radius: 7px;
 	-moz-border-radius: 7px;
 	-webkit-border-radius: 7px;
+	border-radius: 7px;
 	}
 .leaflet-control-zoom a {
 	display: block;
@@ -112,9 +119,9 @@
 	background-repeat: no-repeat;
 	background-color: rgba(255, 255, 255, 0.75);
 	
-	border-radius: 4px;
 	-moz-border-radius: 4px;
 	-webkit-border-radius: 4px;
+	border-radius: 4px;
 	}
 .leaflet-control-zoom a:hover {
 	background-color: #fff;
@@ -135,11 +142,13 @@
 	margin: 0;
 	padding: 0 5px;
 	
-	font: 11px/1.5 Arial, sans-serif;
+	font: 11px/1.5 "Helvetica Neue", Arial, Helvetica, sans-serif;
 	color: #333;
 	
 	background-color: rgba(255, 255, 255, 0.7);
             
+	-moz-box-shadow: 0 0 7px #ccc;
+	-webkit-box-shadow: 0 0 7px #ccc;
 	box-shadow: 0 0 7px #ccc;
 	}
 
@@ -153,18 +162,22 @@
 	-moz-transition: opacity 0.2s linear;
 	-o-transition: opacity 0.2s linear;
 	transition: opacity 0.2s linear;
-}
+	}
 .leaflet-fade-anim .leaflet-tile-loaded {
 	opacity: 1;
-}
+	}
 
 .leaflet-fade-anim .leaflet-popup {
+	opacity: 0;
+
 	-webkit-transition: opacity 0.2s linear;
-	opacity: 0;
-}
+	-moz-transition: opacity 0.2s linear;
+	-o-transition: opacity 0.2s linear;
+	transition: opacity 0.2s linear;
+	}
 .leaflet-fade-anim .leaflet-map-pane .leaflet-popup {
 	opacity: 1;
-}
+	}
 
 .leaflet-zoom-anim .leaflet-tile {
 	-webkit-transition: none;
@@ -209,15 +222,16 @@
 	-moz-transform: rotate(45deg);
 	-webkit-transform: rotate(45deg);
 	-ms-transform: rotate(45deg);
+	-o-transform: rotate(45deg);
+	transform: rotate(45deg);
 	}
 .leaflet-popup-close-button {
 	position: absolute;
 	top: 9px;
 	right: 9px;
 	
-	width: 8px;
-	height: 8px;
-	padding: 1px;
+	width: 10px;
+	height: 10px;
 	
 	overflow: hidden;
 	}
@@ -241,8 +255,9 @@
 	}
 .leaflet-popup-content-wrapper, .leaflet-popup-tip {
 	background: white;
+	
 	box-shadow: 0 1px 10px #888;
-	 -moz-box-shadow: 0 1px 10px #888;
+	-moz-box-shadow: 0 1px 10px #888;
 	 -webkit-box-shadow: 0 1px 14px #999;
 	}
 .leaflet-popup-content-wrapper {
@@ -251,7 +266,7 @@
 	border-radius: 20px;
 	}
 .leaflet-popup-content {
-	font: 12px/1.4 Arial, Helvetica, sans-serif;
+	font: 12px/1.4 "Helvetica Neue", Arial, Helvetica, sans-serif;
 	}
 .leaflet-popup-close-button {
 	background: white url(images/popup-close.png);


--- a/yt/gui/reason/html/leaflet/leaflet.ie.css	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/gui/reason/html/leaflet/leaflet.ie.css	Fri Sep 09 10:33:52 2011 -0400
@@ -1,3 +1,21 @@
+.leaflet-tile {
+	filter: inherit;
+	}
+
+.leaflet-vml-shape {
+	width: 1px;
+	height: 1px;
+	}
+.lvml {
+	behavior: url(#default#VML); 
+	display: inline-block; 
+	position: absolute;
+	}
+	
+.leaflet-control {
+	display: inline;
+	}
+
 .leaflet-popup-tip {
 	width: 21px;
 	_width: 27px;
@@ -7,17 +25,12 @@
 	filter: progid:DXImageTransform.Microsoft.Matrix(M11=0.70710678, M12=0.70710678, M21=-0.70710678, M22=0.70710678);
 	-ms-filter: "progid:DXImageTransform.Microsoft.Matrix(M11=0.70710678, M12=0.70710678, M21=-0.70710678, M22=0.70710678)";
 	}
-
 .leaflet-popup-tip-container {
 	margin-top: -1px;
 	}
 .leaflet-popup-content-wrapper, .leaflet-popup-tip {
 	border: 1px solid #bbb;
 	}
-.leaflet-vml-shape {
-	width: 1px;
-	height: 1px;
-	}
 
 .leaflet-control-zoom {
 	filter: progid:DXImageTransform.Microsoft.gradient(startColorStr='#3F000000',EndColorStr='#3F000000');


--- a/yt/gui/reason/html/leaflet/leaflet.js	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/gui/reason/html/leaflet/leaflet.js	Fri Sep 09 10:33:52 2011 -0400
@@ -3,93 +3,112 @@
  Leaflet is a BSD-licensed JavaScript library for map display and interaction.
  See http://cloudmade.github.com/Leaflet/ for more information.
 */
-var L={VERSION:"0.1",ROOT_URL:function(){for(var a=document.getElementsByTagName("script"),b=0,c=a.length;b<c;b++){var d=a[b].src;if((d=d&&d.match(/^(.*\/)leaflet-*\w*\.js.*$/))&&d[1])return d[1]}return"../../dist/"}(),noConflict:function(){L=this._originalL;return this},_originalL:L};L.Util={extend:function(a){for(var b=Array.prototype.slice.call(arguments,1),c=0,d=b.length,e;c<d;c++)for(var f in e=b[c]||{},e)e.hasOwnProperty(f)&&(a[f]=e[f]);return a},bind:function(a,b){return function(){return a.apply(b,arguments)}},stamp:function(){var a=0;return function(b){b._leaflet_id=b._leaflet_id||++a;return b._leaflet_id}}(),limitExecByInterval:function(a,b,c){function d(){e=!1;f&&(g.callee.apply(c,g),f=!1)}var e,f,g;return function(){g=arguments;e?f=!0:(e=!0,setTimeout(d,b),a.apply(c,
-g))}},deferExecByInterval:function(a,b,c){function d(){f=!1;a.apply(c,e)}var e,f;return function(){e=arguments;f||(f=!0,setTimeout(d,b))}},falseFn:function(){return!1},formatNum:function(a,b){var c=Math.pow(10,b||5);return Math.round(a*c)/c},setOptions:function(a,b){a.options=L.Util.extend({},a.options,b)}};L.Class=function(){};
+(function(a){var b={VERSION:"0.2",ROOT_URL:function(){for(var a=document.getElementsByTagName("script"),b=/^(.*\/)leaflet-?([\w-]*)\.js.*$/,e=0,f=a.length;e<f;e++){var g=a[e].src;if(g=g&&g.match(b)){if(g[2]=="include")break;return g[1]}}return"../../dist/"}(),noConflict:function(){a.L=this._originalL;return this},_originalL:a.L};window.L=b})(this);L.Util={extend:function(a){for(var b=Array.prototype.slice.call(arguments,1),c=0,d=b.length,e;c<d;c++){e=b[c]||{};for(var f in e)e.hasOwnProperty(f)&&(a[f]=e[f])}return a},bind:function(a,b){return function(){return a.apply(b,arguments)}},stamp:function(){var a=0;return function(b){b._leaflet_id=b._leaflet_id||++a;return b._leaflet_id}}(),requestAnimFrame:function(){function a(a){window.setTimeout(a,1E3/60)}var b=window.requestAnimationFrame||window.webkitRequestAnimationFrame||window.mozRequestAnimationFrame||
+window.oRequestAnimationFrame||window.msRequestAnimationFrame||a;return function(c,d,e){c=d?L.Util.bind(c,d):d;e&&b===a?c():b(c)}}(),limitExecByInterval:function(a,b,c){function d(){e=!1;f&&(g.callee.apply(c,g),f=!1)}var e,f,g;return function(){g=arguments;e?f=!0:(e=!0,setTimeout(d,b),a.apply(c,g))}},falseFn:function(){return!1},formatNum:function(a,b){var c=Math.pow(10,b||5);return Math.round(a*c)/c},setOptions:function(a,b){a.options=L.Util.extend({},a.options,b)},getParamString:function(a){var b=
+[],c;for(c in a)a.hasOwnProperty(c)&&b.push(c+"="+a[c]);return"?"+b.join("&")}};L.Class=function(){};
 L.Class.extend=function(a){var b=function(){!L.Class._prototyping&&this.initialize&&this.initialize.apply(this,arguments)};L.Class._prototyping=!0;var c=new this;L.Class._prototyping=!1;c.constructor=b;b.prototype=c;c.superclass=this.prototype;a.statics&&(L.Util.extend(b,a.statics),delete a.statics);a.includes&&(L.Util.extend.apply(null,[c].concat(a.includes)),delete a.includes);if(a.options&&c.options)a.options=L.Util.extend({},c.options,a.options);L.Util.extend(c,a);b.extend=arguments.callee;b.include=
 function(a){L.Util.extend(this.prototype,a)};for(var d in this)this.hasOwnProperty(d)&&d!="prototype"&&(b[d]=this[d]);return b};L.Mixin={};
 L.Mixin.Events={addEventListener:function(a,b,c){var d=this._leaflet_events=this._leaflet_events||{};d[a]=d[a]||[];d[a].push({action:b,context:c});return this},hasEventListeners:function(a){return"_leaflet_events"in this&&a in this._leaflet_events&&this._leaflet_events[a].length>0},removeEventListener:function(a,b,c){if(!this.hasEventListeners(a))return this;for(var d=0,e=this._leaflet_events,f=e[a].length;d<f;d++)if(e[a][d].action===b&&(!c||e[a][d].context===c)){e[a].splice(d,1);break}return this},fireEvent:function(a,
-b){if(this.hasEventListeners(a)){for(var c=L.Util.extend({type:a,target:this},b),d=this._leaflet_events[a].slice(),e=0,f=d.length;e<f;e++)d[e].action.call(d[e].context||this,c);return this}}};L.Mixin.Events.on=L.Mixin.Events.addEventListener;L.Mixin.Events.off=L.Mixin.Events.removeEventListener;L.Mixin.Events.fire=L.Mixin.Events.fireEvent;(function(){var a=navigator.userAgent.toLowerCase(),b=!!window.ActiveXObject,c=a.indexOf("webkit")!=-1,d=a.indexOf("mobile")!=-1;L.Browser={ie:b,ie6:b&&!window.XMLHttpRequest,webkit:c,webkit3d:c&&"WebKitCSSMatrix"in window&&"m11"in new WebKitCSSMatrix,mobileWebkit:c&&d,gecko:a.indexOf("gecko")!=-1,android:a.indexOf("android")!=-1}})();L.Point=function(a,b,c){this.x=c?Math.round(a):a;this.y=c?Math.round(b):b};
-L.Point.prototype={add:function(a){return this.clone()._add(a)},_add:function(a){this.x+=a.x;this.y+=a.y;return this},subtract:function(a){return this.clone()._subtract(a)},_subtract:function(a){this.x-=a.x;this.y-=a.y;return this},divideBy:function(a,b){return new L.Point(this.x/a,this.y/a,b)},multiplyBy:function(a){return new L.Point(this.x*a,this.y*a)},distanceTo:function(a){var b=a.x-this.x;a=a.y-this.y;return Math.sqrt(b*b+a*a)},round:function(){return this.clone()._round()},_round:function(){this.x=
-Math.round(this.x);this.y=Math.round(this.y);return this},clone:function(){return new L.Point(this.x,this.y)},toString:function(){return"Point("+L.Util.formatNum(this.x)+", "+L.Util.formatNum(this.y)+")"}};L.Bounds=L.Class.extend({initialize:function(a,b){for(var c=a instanceof Array?a:[a,b],d=0,e=c.length;d<e;d++)this.extend(c[d])},extend:function(a){!this.min&&!this.max?(this.min=new L.Point(a.x,a.y),this.max=new L.Point(a.x,a.y)):(this.min.x=Math.min(a.x,this.min.x),this.max.x=Math.max(a.x,this.max.x),this.min.y=Math.min(a.y,this.min.y),this.max.y=Math.max(a.y,this.max.y))},getCenter:function(a){return new L.Point((this.min.x+this.max.x)/2,(this.min.y+this.max.y)/2,a)},contains:function(a){return a.min.x>=
-this.min.x&&a.max.x<=this.max.x&&a.min.y>=this.min.y&&a.max.y<=this.max.y}});L.Transformation=L.Class.extend({initialize:function(a,b,c,d){this._a=a;this._b=b;this._c=c;this._d=d},transform:function(a,b){return this._transform(a.clone(),b)},_transform:function(a,b){b=b||1;a.x=b*(this._a*a.x+this._b);a.y=b*(this._c*a.y+this._d);return a},untransform:function(a,b){b=b||1;return new L.Point((a.x/b-this._b)/this._a,(a.y/b-this._d)/this._c)}});L.LineUtil={simplify:function(a,b){if(!b)return a.slice();a=this.reducePoints(a,b);return a=this.simplifyDP(a,b)},pointToSegmentDistance:function(a,b,c){return Math.sqrt(this._sqPointToSegmentDist(a,b,c))},simplifyDP:function(a,b){for(var c=0,d=0,e=b*b,f=1,g=a.length,h;f<g-1;f++)h=this._sqPointToSegmentDist(a[f],a[0],a[g-1]),h>c&&(d=f,c=h);return c>=e?(c=a.slice(0,d),d=a.slice(d),g=this.simplifyDP(c,b).slice(0,g-2),d=this.simplifyDP(d,b),g.concat(d)):[a[0],a[g-1]]},reducePoints:function(a,b){for(var c=
-[a[0]],d=b*b,e=1,f=0,g=a.length;e<g;e++)this._sqDist(a[e],a[f])<d||(c.push(a[e]),f=e);f<g-1&&c.push(a[g-1]);return c},clipSegment:function(a,b,c,d){d=d?this._lastCode:this._getBitCode(a,c);var e=this._getBitCode(b,c);for(this._lastCode=e;;)if(d|e)if(d&e)return!1;else{var f=d||e,g=this._getEdgeIntersection(a,b,f,c),h=this._getBitCode(g,c);f==d?(a=g,d=h):(b=g,e=h)}else return[a,b]},_getEdgeIntersection:function(a,b,c,d){var e=b.x-a.x;b=b.y-a.y;var f=d.min;d=d.max;if(c&8)return new L.Point(a.x+e*(d.y-
-a.y)/b,d.y);else if(c&4)return new L.Point(a.x+e*(f.y-a.y)/b,f.y);else if(c&2)return new L.Point(d.x,a.y+b*(d.x-a.x)/e);else if(c&1)return new L.Point(f.x,a.y+b*(f.x-a.x)/e)},_getBitCode:function(a,b){var c=0;a.x<b.min.x?c|=1:a.x>b.max.x&&(c|=2);a.y<b.min.y?c|=4:a.y>b.max.y&&(c|=8);return c},_sqDist:function(a,b){var c=b.x-a.x,d=b.y-a.y;return c*c+d*d},_sqPointToSegmentDist:function(a,b,c){var d=c.x-b.x,e=c.y-b.y,f=((a.x-b.x)*d+(a.y-b.y)*e)/this._sqDist(b,c);if(f<0)return this._sqDist(a,b);if(f>1)return this._sqDist(a,
-c);b=new L.Point(b.x+d*f,b.y+e*f);return this._sqDist(a,b)}};L.PolyUtil={};L.PolyUtil.clipPolygon=function(a,b){var c,d=[1,4,2,8],e,f,g,h,i,j,k=L.LineUtil;e=0;for(i=a.length;e<i;e++)a[e]._code=k._getBitCode(a[e],b);for(g=0;g<4;g++){j=d[g];c=[];e=0;i=a.length;for(f=i-1;e<i;f=e++)if(h=a[e],f=a[f],h._code&j){if(!(f._code&j))f=k._getEdgeIntersection(f,h,j,b),f._code=k._getBitCode(f,b),c.push(f)}else{if(f._code&j)f=k._getEdgeIntersection(f,h,j,b),f._code=k._getBitCode(f,b),c.push(f);c.push(h)}a=c}return a};L.DomEvent={addListener:function(a,b,c,d){function e(b){return c.call(d||a,b||L.DomEvent._getEvent())}var f=L.Util.stamp(c);a["_leaflet_"+b+f]=e;L.Browser.mobileWebkit&&b=="dblclick"&&this.addDoubleTapListener?this.addDoubleTapListener(a,e,f):"addEventListener"in a?(b=="mousewheel"&&a.addEventListener("DOMMouseScroll",e,!1),a.addEventListener(b,e,!1)):"attachEvent"in a&&a.attachEvent("on"+b,e)},removeListener:function(a,b,c){c=L.Util.stamp(c);var d="_leaflet_"+b+c;handler=a[d];L.Browser.mobileWebkit&&
-b=="dblclick"&&this.removeDoubleTapListener?this.removeDoubleTapListener(a,c):"removeEventListener"in a?(b=="mousewheel"&&a.removeEventListener("DOMMouseScroll",handler,!1),a.removeEventListener(b,handler,!1)):"detachEvent"in a&&a.detachEvent("on"+b,handler);a[d]=null},_getEvent:function(){var a=window.event;if(!a)for(var b=arguments.callee.caller;b;){if((a=b.arguments[0])&&Event==a.constructor)break;b=b.caller}return a},stopPropagation:function(a){a.stopPropagation?a.stopPropagation():a.cancelBubble=
-!0},disableClickPropagation:function(a){L.DomEvent.addListener(a,"mousedown",L.DomEvent.stopPropagation);L.DomEvent.addListener(a,"click",L.DomEvent.stopPropagation);L.DomEvent.addListener(a,"dblclick",L.DomEvent.stopPropagation)},preventDefault:function(a){a.preventDefault?a.preventDefault():a.returnValue=!1},getMousePosition:function(a,b){var c=new L.Point(a.pageX?a.pageX:a.clientX+document.body.scrollLeft+document.documentElement.scrollLeft,a.pageY?a.pageY:a.clientY+document.body.scrollTop+document.documentElement.scrollTop);
-return b?c.subtract(L.DomUtil.getCumulativeOffset(b)):c},getWheelDelta:function(a){var b=0;a.wheelDelta&&(b=a.wheelDelta/120);a.detail&&(b=-a.detail/3);return b}};L.Util.extend(L.DomEvent,{addDoubleTapListener:function(a,b,c){function d(a){if(a.touches.length==1){var b=Date.now(),c=b-(f||b);i=a.touches[0];g=c>0&&c<=h;f=b}}function e(){if(g)i.type="dblclick",b(i),f=null}var f,g=!1,h=250,i;a["_leaflet_touchstart"+c]=d;a["_leaflet_touchend"+c]=e;a.addEventListener("touchstart",d,!1);a.addEventListener("touchend",e,!1)},removeDoubleTapListener:function(a,b){a.removeEventListener(a,a["_leaflet_touchstart"+b],!1);a.removeEventListener(a,a["_leaflet_touchend"+b],
-!1)}});L.DomUtil={get:function(a){return typeof a=="string"?document.getElementById(a):a},getStyle:function(a,b){var c=a.style[b];typeof c=="undefined"&&a.currentStyle&&(c=a.currentStyle[b]);typeof c=="undefined"&&(c=(c=document.defaultView.getComputedStyle(a,null))?c[b]:null);return c=="auto"?null:c},getCumulativeOffset:function(a){var b=0,c=0;do b+=a.offsetTop||0,c+=a.offsetLeft||0,a=a.offsetParent;while(a);return new L.Point(c,b)},create:function(a,b,c){a=document.createElement(a);a.className=b;c&&c.appendChild(a);
-return a},disableTextSelection:function(){document.selection&&document.selection.empty&&document.selection.empty();if(!this._onselectstart)this._onselectstart=document.onselectstart,document.onselectstart=L.Util.falseFn},enableTextSelection:function(){document.onselectstart=this._onselectstart;this._onselectstart=null},CLASS_RE:/(\\s|^)'+cls+'(\\s|$)/,hasClass:function(a,b){return a.className.length>0&&RegExp("(^|\\s)"+b+"(\\s|$)").test(a.className)},addClass:function(a,b){L.DomUtil.hasClass(a,b)||
-(a.className+=(a.className?" ":"")+b)},testProp:function(a){for(var b=document.documentElement.style,c=0;c<a.length;c++)if(a[c]in b)return a[c];return!1},getTranslateString:function(a){return L.DomUtil.TRANSLATE_OPEN+a.x+"px,"+a.y+"px"+L.DomUtil.TRANSLATE_CLOSE},getScaleString:function(a,b){return L.DomUtil.getTranslateString(b)+" scale("+a+") "+L.DomUtil.getTranslateString(b.multiplyBy(-1))},setPosition:function(a,b){a._leaflet_pos=b;L.Browser.webkit?a.style[L.DomUtil.TRANSFORM]=L.DomUtil.getTranslateString(b):
-(a.style.left=b.x+"px",a.style.top=b.y+"px")},getPosition:function(a){return a._leaflet_pos}};L.Util.extend(L.DomUtil,{TRANSITION:L.DomUtil.testProp(["transition","webkitTransition","OTransition","MozTransition","msTransition"]),TRANSFORM:L.DomUtil.testProp(["transformProperty","WebkitTransform","OTransform","MozTransform","msTransform"]),TRANSLATE_OPEN:"translate"+(L.Browser.webkit3d?"3d(":"("),TRANSLATE_CLOSE:L.Browser.webkit3d?",0)":")"});L.Draggable=L.Class.extend({includes:L.Mixin.Events,statics:{START:L.Browser.mobileWebkit?"touchstart":"mousedown",END:L.Browser.mobileWebkit?"touchend":"mouseup",MOVE:L.Browser.mobileWebkit?"touchmove":"mousemove"},initialize:function(a,b){this._element=a;this._dragStartTarget=b||a},enable:function(){if(!this._enabled)L.DomEvent.addListener(this._dragStartTarget,L.Draggable.START,this._onDown,this),this._enabled=!0},disable:function(){if(this._enabled)L.DomEvent.removeListener(this._dragStartTarget,
-L.Draggable.START,this._onDown),this._enabled=!1},_onDown:function(a){if(!(a.shiftKey||a.which!=1&&a.button!=1&&!a.touches))if(a.touches||L.DomEvent.preventDefault(a),!(a.touches&&a.touches.length>1))a.touches&&a.touches.length==1&&(a=a.touches[0]),this._dragStartPos=L.DomUtil.getPosition(this._element),this._startX=a.clientX,this._startY=a.clientY,this._moved=!1,L.DomUtil.disableTextSelection(),this._setMovingCursor(),L.DomEvent.addListener(document,L.Draggable.MOVE,this._onMove,this),L.DomEvent.addListener(document,
-L.Draggable.END,this._onUp,this)},_onMove:function(a){L.DomEvent.preventDefault(a);if(!(a.touches&&a.touches.length>1)){a.touches&&a.touches.length==1&&(a=a.touches[0]);this._offset=new L.Point(a.clientX-this._startX,a.clientY-this._startY);this._newPos=this._dragStartPos.add(this._offset);this._updatePosition();if(!this._moved)this.fire("dragstart"),this._moved=!0;this.fire("drag")}},_updatePosition:function(){L.DomUtil.setPosition(this._element,this._newPos)},_onUp:function(){L.DomUtil.enableTextSelection();
-this._restoreCursor();L.DomEvent.removeListener(document,L.Draggable.MOVE,this._onMove);L.DomEvent.removeListener(document,L.Draggable.END,this._onUp);this._moved&&this.fire("dragend")},_setMovingCursor:function(){this._bodyCursor=document.body.style.cursor;document.body.style.cursor="move"},_restoreCursor:function(){document.body.style.cursor=this._bodyCursor}});L.Transition=L.Class.extend({includes:L.Mixin.Events,statics:{CUSTOM_PROPS_SETTERS:{position:L.DomUtil.setPosition},implemented:function(){return L.Transition.NATIVE||L.Transition.TIMER}},options:{easing:"ease",duration:0.5},_setProperty:function(a,b){var c=L.Transition.CUSTOM_PROPS_SETTERS;if(a in c)c[a](this._el,b);else this._el.style[a]=b}});L.Transition=L.Transition.extend({statics:function(){var a=L.DomUtil.TRANSITION;return{NATIVE:!!a,TRANSITION:a,PROPERTY:a+"Property",DURATION:a+"Duration",EASING:a+"TimingFunction",END:a=="webkitTransition"||a=="OTransition"?a+"End":"transitionend",CUSTOM_PROPS_PROPERTIES:{position:L.Browser.webkit?L.DomUtil.TRANSFORM:"top, left"}}}(),options:{fakeStepInterval:100},initialize:function(a,b){this._el=a;L.Util.setOptions(this,b);L.DomEvent.addListener(a,L.Transition.END,this._onTransitionEnd,this);this._onFakeStep=
+b){if(this.hasEventListeners(a)){for(var c=L.Util.extend({type:a,target:this},b),d=this._leaflet_events[a].slice(),e=0,f=d.length;e<f;e++)d[e].action.call(d[e].context||this,c);return this}}};L.Mixin.Events.on=L.Mixin.Events.addEventListener;L.Mixin.Events.off=L.Mixin.Events.removeEventListener;L.Mixin.Events.fire=L.Mixin.Events.fireEvent;(function(){var a=navigator.userAgent.toLowerCase(),b=!!window.ActiveXObject,c=a.indexOf("webkit")!=-1,d=a.indexOf("mobi")!=-1,e=a.indexOf("android")!=-1,f=window.opera;L.Browser={ie:b,ie6:b&&!window.XMLHttpRequest,webkit:c,webkit3d:c&&"WebKitCSSMatrix"in window&&"m11"in new WebKitCSSMatrix,mobileWebkit:c&&(d||e),mobileOpera:d&&f,gecko:a.indexOf("gecko")!=-1,android:e};L.Browser.touch=L.Browser.mobileWebkit||L.Browser.mobileOpera})();L.Point=function(a,b,c){this.x=c?Math.round(a):a;this.y=c?Math.round(b):b};
+L.Point.prototype={add:function(a){return this.clone()._add(a)},_add:function(a){this.x+=a.x;this.y+=a.y;return this},subtract:function(a){return this.clone()._subtract(a)},_subtract:function(a){this.x-=a.x;this.y-=a.y;return this},divideBy:function(a,b){return new L.Point(this.x/a,this.y/a,b)},multiplyBy:function(a){return new L.Point(this.x*a,this.y*a)},distanceTo:function(a){var b=a.x-this.x,a=a.y-this.y;return Math.sqrt(b*b+a*a)},round:function(){return this.clone()._round()},_round:function(){this.x=
+Math.round(this.x);this.y=Math.round(this.y);return this},clone:function(){return new L.Point(this.x,this.y)},toString:function(){return"Point("+L.Util.formatNum(this.x)+", "+L.Util.formatNum(this.y)+")"}};L.Bounds=L.Class.extend({initialize:function(a,b){if(a)for(var c=a instanceof Array?a:[a,b],d=0,e=c.length;d<e;d++)this.extend(c[d])},extend:function(a){!this.min&&!this.max?(this.min=new L.Point(a.x,a.y),this.max=new L.Point(a.x,a.y)):(this.min.x=Math.min(a.x,this.min.x),this.max.x=Math.max(a.x,this.max.x),this.min.y=Math.min(a.y,this.min.y),this.max.y=Math.max(a.y,this.max.y))},getCenter:function(a){return new L.Point((this.min.x+this.max.x)/2,(this.min.y+this.max.y)/2,a)},contains:function(a){var b;
+if(a instanceof L.Bounds)b=a.min,a=a.max;return b.x>=this.min.x&&a.x<=this.max.x&&b.y>=this.min.y&&a.y<=this.max.y}});L.Transformation=L.Class.extend({initialize:function(a,b,c,d){this._a=a;this._b=b;this._c=c;this._d=d},transform:function(a,b){return this._transform(a.clone(),b)},_transform:function(a,b){b=b||1;a.x=b*(this._a*a.x+this._b);a.y=b*(this._c*a.y+this._d);return a},untransform:function(a,b){b=b||1;return new L.Point((a.x/b-this._b)/this._a,(a.y/b-this._d)/this._c)}});L.LineUtil={simplify:function(a,b){if(!b)return a.slice();a=this.reducePoints(a,b);return a=this.simplifyDP(a,b)},pointToSegmentDistance:function(a,b,c){return Math.sqrt(this._sqPointToSegmentDist(a,b,c))},simplifyDP:function(a,b){for(var c=0,d=0,e=b*b,f=1,g=a.length,h;f<g-1;f++)h=this._sqPointToSegmentDist(a[f],a[0],a[g-1]),h>c&&(d=f,c=h);return c>=e?(c=a.slice(0,d),d=a.slice(d),g=this.simplifyDP(c,b).slice(0,g-2),d=this.simplifyDP(d,b),g.concat(d)):[a[0],a[g-1]]},reducePoints:function(a,b){for(var c=
+[a[0]],d=b*b,e=1,f=0,g=a.length;e<g;e++)this._sqDist(a[e],a[f])<d||(c.push(a[e]),f=e);f<g-1&&c.push(a[g-1]);return c},clipSegment:function(a,b,c,d){var d=d?this._lastCode:this._getBitCode(a,c),e=this._getBitCode(b,c);for(this._lastCode=e;;)if(d|e)if(d&e)return!1;else{var f=d||e,g=this._getEdgeIntersection(a,b,f,c),h=this._getBitCode(g,c);f==d?(a=g,d=h):(b=g,e=h)}else return[a,b]},_getEdgeIntersection:function(a,b,c,d){var e=b.x-a.x,b=b.y-a.y,f=d.min,d=d.max;if(c&8)return new L.Point(a.x+e*(d.y-a.y)/
+b,d.y);else if(c&4)return new L.Point(a.x+e*(f.y-a.y)/b,f.y);else if(c&2)return new L.Point(d.x,a.y+b*(d.x-a.x)/e);else if(c&1)return new L.Point(f.x,a.y+b*(f.x-a.x)/e)},_getBitCode:function(a,b){var c=0;a.x<b.min.x?c|=1:a.x>b.max.x&&(c|=2);a.y<b.min.y?c|=4:a.y>b.max.y&&(c|=8);return c},_sqDist:function(a,b){var c=b.x-a.x,d=b.y-a.y;return c*c+d*d},_sqPointToSegmentDist:function(a,b,c){var d=c.x-b.x,e=c.y-b.y;if(!d&&!e)return this._sqDist(a,b);var f=((a.x-b.x)*d+(a.y-b.y)*e)/this._sqDist(b,c);if(f<
+0)return this._sqDist(a,b);if(f>1)return this._sqDist(a,c);b=new L.Point(b.x+d*f,b.y+e*f);return this._sqDist(a,b)}};L.PolyUtil={};L.PolyUtil.clipPolygon=function(a,b){var c,d=[1,4,2,8],e,f,g,h,j,k,l=L.LineUtil;e=0;for(j=a.length;e<j;e++)a[e]._code=l._getBitCode(a[e],b);for(g=0;g<4;g++){k=d[g];c=[];e=0;j=a.length;for(f=j-1;e<j;f=e++)if(h=a[e],f=a[f],h._code&k){if(!(f._code&k))f=l._getEdgeIntersection(f,h,k,b),f._code=l._getBitCode(f,b),c.push(f)}else{if(f._code&k)f=l._getEdgeIntersection(f,h,k,b),f._code=l._getBitCode(f,b),c.push(f);c.push(h)}a=c}return a};L.DomEvent={addListener:function(a,b,c,d){function e(b){return c.call(d||a,b||L.DomEvent._getEvent())}var f=L.Util.stamp(c);if(L.Browser.touch&&b=="dblclick"&&this.addDoubleTapListener)this.addDoubleTapListener(a,e,f);else if("addEventListener"in a)if(b=="mousewheel")a.addEventListener("DOMMouseScroll",e,!1),a.addEventListener(b,e,!1);else if(b=="mouseenter"||b=="mouseleave"){var g=e,e=function(b){if(L.DomEvent._checkMouse(a,b))return g(b)};a.addEventListener(b=="mouseenter"?"mouseover":"mouseout",
+e,!1)}else a.addEventListener(b,e,!1);else"attachEvent"in a&&a.attachEvent("on"+b,e);a["_leaflet_"+b+f]=e},removeListener:function(a,b,c){var c=L.Util.stamp(c),d="_leaflet_"+b+c;handler=a[d];L.Browser.mobileWebkit&&b=="dblclick"&&this.removeDoubleTapListener?this.removeDoubleTapListener(a,c):"removeEventListener"in a?b=="mousewheel"?(a.removeEventListener("DOMMouseScroll",handler,!1),a.removeEventListener(b,handler,!1)):b=="mouseenter"||b=="mouseleave"?a.removeEventListener(b=="mouseenter"?"mouseover":
+"mouseout",handler,!1):a.removeEventListener(b,handler,!1):"detachEvent"in a&&a.detachEvent("on"+b,handler);a[d]=null},_checkMouse:function(a,b){var c=b.relatedTarget;if(!c)return!0;try{for(;c&&c!=a;)c=c.parentNode}catch(d){return!1}return c!=a},_getEvent:function(){var a=window.event;if(!a)for(var b=arguments.callee.caller;b;){if((a=b.arguments[0])&&Event==a.constructor)break;b=b.caller}return a},stopPropagation:function(a){a.stopPropagation?a.stopPropagation():a.cancelBubble=!0},disableClickPropagation:function(a){L.DomEvent.addListener(a,
+"mousedown",L.DomEvent.stopPropagation);L.DomEvent.addListener(a,"click",L.DomEvent.stopPropagation);L.DomEvent.addListener(a,"dblclick",L.DomEvent.stopPropagation)},preventDefault:function(a){a.preventDefault?a.preventDefault():a.returnValue=!1},stop:function(a){L.DomEvent.preventDefault(a);L.DomEvent.stopPropagation(a)},getMousePosition:function(a,b){var c=new L.Point(a.pageX?a.pageX:a.clientX+document.body.scrollLeft+document.documentElement.scrollLeft,a.pageY?a.pageY:a.clientY+document.body.scrollTop+
+document.documentElement.scrollTop);return b?c.subtract(L.DomUtil.getCumulativeOffset(b)):c},getWheelDelta:function(a){var b=0;a.wheelDelta&&(b=a.wheelDelta/120);a.detail&&(b=-a.detail/3);return b}};L.Util.extend(L.DomEvent,{addDoubleTapListener:function(a,b,c){function d(a){if(a.touches.length==1){var b=Date.now(),c=b-(f||b);j=a.touches[0];g=c>0&&c<=h;f=b}}function e(){if(g)j.type="dblclick",b(j),f=null}var f,g=!1,h=250,j;a["_leaflet_touchstart"+c]=d;a["_leaflet_touchend"+c]=e;a.addEventListener("touchstart",d,!1);a.addEventListener("touchend",e,!1)},removeDoubleTapListener:function(a,b){a.removeEventListener(a,a["_leaflet_touchstart"+b],!1);a.removeEventListener(a,a["_leaflet_touchend"+b],
+!1)}});L.DomUtil={get:function(a){return typeof a=="string"?document.getElementById(a):a},getStyle:function(a,b){var c=a.style[b];!c&&a.currentStyle&&(c=a.currentStyle[b]);if(!c||c=="auto")c=(c=document.defaultView.getComputedStyle(a,null))?c[b]:null;return c=="auto"?null:c},getCumulativeOffset:function(a){var b=0,c=0;do b+=a.offsetTop||0,c+=a.offsetLeft||0,a=a.offsetParent;while(a);return new L.Point(c,b)},create:function(a,b,c){a=document.createElement(a);a.className=b;c&&c.appendChild(a);return a},disableTextSelection:function(){document.selection&&
+document.selection.empty&&document.selection.empty();if(!this._onselectstart)this._onselectstart=document.onselectstart,document.onselectstart=L.Util.falseFn},enableTextSelection:function(){document.onselectstart=this._onselectstart;this._onselectstart=null},CLASS_RE:/(\\s|^)'+cls+'(\\s|$)/,hasClass:function(a,b){return a.className.length>0&&RegExp("(^|\\s)"+b+"(\\s|$)").test(a.className)},addClass:function(a,b){L.DomUtil.hasClass(a,b)||(a.className+=(a.className?" ":"")+b)},setOpacity:function(a,
+b){L.Browser.ie?a.style.filter="alpha(opacity="+Math.round(b*100)+")":a.style.opacity=b},testProp:function(a){for(var b=document.documentElement.style,c=0;c<a.length;c++)if(a[c]in b)return a[c];return!1},getTranslateString:function(a){return L.DomUtil.TRANSLATE_OPEN+a.x+"px,"+a.y+"px"+L.DomUtil.TRANSLATE_CLOSE},getScaleString:function(a,b){return L.DomUtil.getTranslateString(b)+" scale("+a+") "+L.DomUtil.getTranslateString(b.multiplyBy(-1))},setPosition:function(a,b){a._leaflet_pos=b;L.Browser.webkit?
+a.style[L.DomUtil.TRANSFORM]=L.DomUtil.getTranslateString(b):(a.style.left=b.x+"px",a.style.top=b.y+"px")},getPosition:function(a){return a._leaflet_pos}};
+L.Util.extend(L.DomUtil,{TRANSITION:L.DomUtil.testProp(["transition","webkitTransition","OTransition","MozTransition","msTransition"]),TRANSFORM:L.DomUtil.testProp(["transformProperty","WebkitTransform","OTransform","MozTransform","msTransform"]),TRANSLATE_OPEN:"translate"+(L.Browser.webkit3d?"3d(":"("),TRANSLATE_CLOSE:L.Browser.webkit3d?",0)":")"});L.Draggable=L.Class.extend({includes:L.Mixin.Events,statics:{START:L.Browser.touch?"touchstart":"mousedown",END:L.Browser.touch?"touchend":"mouseup",MOVE:L.Browser.touch?"touchmove":"mousemove",TAP_TOLERANCE:15},initialize:function(a,b){this._element=a;this._dragStartTarget=b||a},enable:function(){if(!this._enabled)L.DomEvent.addListener(this._dragStartTarget,L.Draggable.START,this._onDown,this),this._enabled=!0},disable:function(){if(this._enabled)L.DomEvent.removeListener(this._dragStartTarget,
+L.Draggable.START,this._onDown),this._enabled=!1},_onDown:function(a){if(!(a.shiftKey||a.which!=1&&a.button!=1&&!a.touches)&&!(a.touches&&a.touches.length>1)){var b=a.touches&&a.touches.length==1?a.touches[0]:a;L.DomEvent.preventDefault(a);L.Browser.mobileWebkit&&(b.target.className+=" leaflet-active");this._moved=!1;L.DomUtil.disableTextSelection();this._setMovingCursor();this._startPos=this._newPos=L.DomUtil.getPosition(this._element);this._startPoint=new L.Point(b.clientX,b.clientY);L.DomEvent.addListener(document,
+L.Draggable.MOVE,this._onMove,this);L.DomEvent.addListener(document,L.Draggable.END,this._onUp,this)}},_onMove:function(a){if(!(a.touches&&a.touches.length>1)){L.DomEvent.preventDefault(a);a=a.touches&&a.touches.length==1?a.touches[0]:a;if(!this._moved)this.fire("dragstart"),this._moved=!0;this._newPos=this._startPos.add(new L.Point(a.clientX,a.clientY)).subtract(this._startPoint);L.Util.requestAnimFrame(this._updatePosition,this,!0);this.fire("drag")}},_updatePosition:function(){L.DomUtil.setPosition(this._element,
+this._newPos)},_onUp:function(a){if(a.changedTouches){var a=a.changedTouches[0],b=a.target,c=this._newPos&&this._newPos.distanceTo(this._startPos)||0;b.className=b.className.replace(" leaflet-active","");c<L.Draggable.TAP_TOLERANCE&&this._simulateEvent("click",a)}L.DomUtil.enableTextSelection();this._restoreCursor();L.DomEvent.removeListener(document,L.Draggable.MOVE,this._onMove);L.DomEvent.removeListener(document,L.Draggable.END,this._onUp);this._moved&&this.fire("dragend")},_removeActiveClass:function(){},
+_setMovingCursor:function(){this._bodyCursor=document.body.style.cursor;document.body.style.cursor="move"},_restoreCursor:function(){document.body.style.cursor=this._bodyCursor},_simulateEvent:function(a,b){var c=document.createEvent("MouseEvent");c.initMouseEvent(a,!0,!0,window,1,b.screenX,b.screenY,b.clientX,b.clientY,!1,!1,!1,!1,0,null);b.target.dispatchEvent(c)}});L.Transition=L.Class.extend({includes:L.Mixin.Events,statics:{CUSTOM_PROPS_SETTERS:{position:L.DomUtil.setPosition},implemented:function(){return L.Transition.NATIVE||L.Transition.TIMER}},options:{easing:"ease",duration:0.5},_setProperty:function(a,b){var c=L.Transition.CUSTOM_PROPS_SETTERS;if(a in c)c[a](this._el,b);else this._el.style[a]=b}});L.Transition=L.Transition.extend({statics:function(){var a=L.DomUtil.TRANSITION;return{NATIVE:!!a,TRANSITION:a,PROPERTY:a+"Property",DURATION:a+"Duration",EASING:a+"TimingFunction",END:a=="webkitTransition"||a=="OTransition"?a+"End":"transitionend",CUSTOM_PROPS_PROPERTIES:{position:L.Browser.webkit?L.DomUtil.TRANSFORM:"top, left"}}}(),options:{fakeStepInterval:100},initialize:function(a,b){this._el=a;L.Util.setOptions(this,b);L.DomEvent.addListener(a,L.Transition.END,this._onTransitionEnd,this);this._onFakeStep=
 L.Util.bind(this._onFakeStep,this)},run:function(a){var b,c=[],d=L.Transition.CUSTOM_PROPS_PROPERTIES;for(b in a)a.hasOwnProperty(b)&&(b=d[b]?d[b]:b,b=b.replace(/([A-Z])/g,function(a){return"-"+a.toLowerCase()}),c.push(b));this._el.style[L.Transition.DURATION]=this.options.duration+"s";this._el.style[L.Transition.EASING]=this.options.easing;this._el.style[L.Transition.PROPERTY]=c.join(", ");for(b in a)a.hasOwnProperty(b)&&this._setProperty(b,a[b]);this._inProgress=!0;this.fire("start");L.Transition.NATIVE?
 this._timer=setInterval(this._onFakeStep,this.options.fakeStepInterval):this._onTransitionEnd()},_onFakeStep:function(){this.fire("step")},_onTransitionEnd:function(){if(this._inProgress)this._inProgress=!1,clearInterval(this._timer),this._el.style[L.Transition.PROPERTY]="none",this.fire("step"),this.fire("end")}});L.Transition=L.Transition.NATIVE?L.Transition:L.Transition.extend({statics:{getTime:Date.now||function(){return+new Date},TIMER:!0,EASINGS:{ease:[0.25,0.1,0.25,1],linear:[0,0,1,1],"ease-in":[0.42,0,1,1],"ease-out":[0,0,0.58,1],"ease-in-out":[0.42,0,0.58,1]},CUSTOM_PROPS_GETTERS:{position:L.DomUtil.getPosition},UNIT_RE:/^[\d\.]+(\D*)$/},options:{fps:50},initialize:function(a,b){this._el=a;L.Util.extend(this.options,b);var c=L.Transition.EASINGS[this.options.easing]||L.Transition.EASINGS.ease;this._p1=
 new L.Point(0,0);this._p2=new L.Point(c[0],c[1]);this._p3=new L.Point(c[2],c[3]);this._p4=new L.Point(1,1);this._step=L.Util.bind(this._step,this);this._interval=Math.round(1E3/this.options.fps)},run:function(a){this._props={};var b=L.Transition.CUSTOM_PROPS_GETTERS,c=L.Transition.UNIT_RE;this.fire("start");for(var d in a)if(a.hasOwnProperty(d)){var e={};if(d in b)e.from=b[d](this._el);else{var f=this._el.style[d].match(c);e.from=parseFloat(f[0]);e.unit=f[1]}e.to=a[d];this._props[d]=e}clearInterval(this._timer);
 this._timer=setInterval(this._step,this._interval);this._startTime=L.Transition.getTime()},_step:function(){var a=L.Transition.getTime()-this._startTime,b=this.options.duration*1E3;a<b?this._runFrame(this._cubicBezier(a/b)):(this._runFrame(1),this._complete())},_runFrame:function(a){var b=L.Transition.CUSTOM_PROPS_SETTERS,c,d;for(c in this._props)this._props.hasOwnProperty(c)&&(d=this._props[c],c in b?(d=d.to.subtract(d.from).multiplyBy(a).add(d.from),b[c](this._el,d)):this._el.style[c]=(d.to-d.from)*
-a+d.from+d.unit);this.fire("step")},_complete:function(){clearInterval(this._timer);this.fire("end")},_cubicBezier:function(a){var b=3*Math.pow(1-a,2)*a,c=3*(1-a)*Math.pow(a,2),d=Math.pow(a,3);a=this._p1.multiplyBy(Math.pow(1-a,3));b=this._p2.multiplyBy(b);c=this._p3.multiplyBy(c);d=this._p4.multiplyBy(d);return a.add(b).add(c).add(d).y}});L.LatLng=function(a,b,c){c!==!0&&(a=Math.max(Math.min(a,90),-90),b=(b+180)%360+(b<-180?180:-180));this.lat=a;this.lng=b};L.Util.extend(L.LatLng,{DEG_TO_RAD:Math.PI/180,RAD_TO_DEG:180/Math.PI,MAX_MARGIN:1.0E-9});L.LatLng.prototype={equals:function(a){if(!(a instanceof L.LatLng))return!1;return Math.max(Math.abs(this.lat-a.lat),Math.abs(this.lng-a.lng))<=L.LatLng.MAX_MARGIN},toString:function(){return"LatLng("+L.Util.formatNum(this.lat)+", "+L.Util.formatNum(this.lng)+")"}};L.LatLngBounds=L.Class.extend({initialize:function(a,b){for(var c=a instanceof Array?a:[a,b],d=0,e=c.length;d<e;d++)this.extend(c[d])},extend:function(a){!this._southWest&&!this._northEast?(this._southWest=new L.LatLng(a.lat,a.lng),this._northEast=new L.LatLng(a.lat,a.lng)):(this._southWest.lat=Math.min(a.lat,this._southWest.lat),this._southWest.lng=Math.min(a.lng,this._southWest.lng),this._northEast.lat=Math.max(a.lat,this._northEast.lat),this._northEast.lng=Math.max(a.lng,this._northEast.lng))},
-getCenter:function(){return new L.LatLng((this._southWest.lat+this._northEast.lat)/2,(this._southWest.lng+this._northEast.lng)/2)},getSouthWest:function(){return this._southWest},getNorthEast:function(){return this._northEast},getNorthWest:function(){return new L.LatLng(this._northEast.lat,this._southWest.lng)},getSouthEast:function(){return new L.LatLng(this._southWest.lat,this._northEast.lng)},contains:function(a){var b=this._southWest,c=this._northEast,d=a.getSouthWest();a=a.getNorthEast();return d.lat>=
-b.lat&&a.lat<=c.lat&&d.lng>=b.lng&&a.lng<=c.lng}});L.Projection={};L.Projection.Mercator={MAX_LATITUDE:function(){var a=Math.exp(2*Math.PI);return Math.asin((a-1)/(a+1))*L.LatLng.RAD_TO_DEG}(),project:function(a){var b=L.LatLng.DEG_TO_RAD,c=L.Projection.Mercator.MAX_LATITUDE,d=a.lng*b;a=Math.max(Math.min(c,a.lat),-c)*b;a=Math.log(Math.tan(Math.PI/4+a/2));return new L.Point(d,a)},unproject:function(a,b){var c=L.LatLng.RAD_TO_DEG;return new L.LatLng((2*Math.atan(Math.exp(a.y))-Math.PI/2)*c,a.x*c,b)}};L.TileLayer=L.Class.extend({includes:L.Mixin.Events,options:{minZoom:0,maxZoom:18,tileSize:256,subdomains:"abc",errorTileUrl:"",attribution:"",unloadInvisibleTiles:L.Browser.mobileWebkit,updateWhenIdle:L.Browser.mobileWebkit},initialize:function(a,b){L.Util.setOptions(this,b);this._url=a;if(typeof this.options.subdomains=="string")this.options.subdomains=this.options.subdomains.split("")},onAdd:function(a){this._map=a;this._initContainer();this._tileImg=L.DomUtil.create("img","leaflet-tile");this._tileImg.galleryimg=
-"no";var b=this.options.tileSize;this._tileImg.style.width=b+"px";this._tileImg.style.height=b+"px";a.on("viewreset",this._reset,this);if(this.options.updateWhenIdle)a.on("moveend",this._update,this);else this._limitedUpdate=L.Util.limitExecByInterval(this._update,100,this),a.on("move",this._limitedUpdate,this);this._reset();this._update()},onRemove:function(){this._map.getPanes().tilePane.removeChild(this._container);this._map.off("viewreset",this._reset);this.options.updateWhenIdle?this._map.off("moveend",
-this._update):this._map.off("move",this._limitedUpdate)},getAttribution:function(){return this.options.attribution},_initContainer:function(){var a=this._map.getPanes().tilePane;if(!this._container||a.empty)this._container=L.DomUtil.create("div","leaflet-layer",a)},_reset:function(){this._tiles={};this._initContainer();this._container.innerHTML=""},_update:function(){var a=this._map.getPixelBounds(),b=this.options.tileSize,c=new L.Point(Math.floor(a.min.x/b),Math.floor(a.min.y/b));a=new L.Point(Math.floor(a.max.x/
-b),Math.floor(a.max.y/b));c=new L.Bounds(c,a);this._loadTilesFromCenterOut(c);this.options.unloadInvisibleTiles&&this._unloadOtherTiles(c)},getTileUrl:function(a,b){return this._url.replace("{s}",this.options.subdomains[(a.x+a.y)%this.options.subdomains.length]).replace("{z}",b).replace("{x}",a.x).replace("{y}",a.y)},_loadTilesFromCenterOut:function(a){for(var b=[],c=a.getCenter(),d=a.min.y;d<=a.max.y;d++)for(var e=a.min.x;e<=a.max.x;e++)e+":"+d in this._tiles||b.push(new L.Point(e,d));b.sort(function(a,
-b){return a.distanceTo(c)-b.distanceTo(c)});this._tilesToLoad=b.length;a=0;for(d=this._tilesToLoad;a<d;a++)this._loadTile(b[a])},_unloadOtherTiles:function(a){var b,c,d;for(d in this._tiles)if(this._tiles.hasOwnProperty(d)&&(b=d.split(":"),c=parseInt(b[0],10),b=parseInt(b[1],10),c<a.min.x||c>a.max.x||b<a.min.y||b>a.max.y))this._tiles[d].parentNode==this._container&&this._container.removeChild(this._tiles[d]),delete this._tiles[d]},_loadTile:function(a){var b=this._map.getPixelOrigin();b=a.multiplyBy(this.options.tileSize).subtract(b);
-var c=this._map.getZoom(),d=1<<c;a.x=(a.x%d+d)%d;if(!(a.y<0||a.y>=d))d=this._tileImg.cloneNode(!1),L.DomUtil.setPosition(d,b),this._tiles[a.x+":"+a.y]=d,d._leaflet_layer=this,d.onload=this._tileOnLoad,d.onerror=this._tileOnError,d.onselectstart=d.onmousemove=L.Util.falseFn,d.src=this.getTileUrl(a,c),this._container.appendChild(d)},_tileOnLoad:function(){this.className+=" leaflet-tile-loaded";var a=this._leaflet_layer;a.fire("tileload",{tile:this,url:this.src});a._tilesToLoad--;a._tilesToLoad||a.fire("load")},
-_tileOnError:function(){this._leaflet_layer.fire("tileerror",{tile:this,url:this.src});var a=this._leaflet_layer.options.errorTileUrl;if(a)this.src=a}});L.ImageOverlay=L.Class.extend({includes:L.Mixin.Events,initialize:function(a,b){this._url=a;this._bounds=b},onAdd:function(a){this._map=a;this._image=L.DomUtil.create("img","leaflet-image-layer");this._image.style.visibility="hidden";L.Util.extend(this._image,{galleryimg:"no",onselectstart:L.Util.falseFn,onmousemove:L.Util.falseFn,onload:this._onImageLoad,src:this._url});this._map.getPanes().overlayPane.appendChild(this._image);this._map.on("viewreset",this._reset,this);this._reset()},_reset:function(){var a=
-this._map.latLngToLayerPoint(this._bounds.getNorthWest()),b=this._map.latLngToLayerPoint(this._bounds.getSouthEast()).subtract(a);L.DomUtil.setPosition(this._image,a);this._image.style.width=b.x+"px";this._image.style.height=b.y+"px"},_onImageLoad:function(){this.style.visibility=""}});L.Popup=L.Class.extend({includes:L.Mixin.Events,options:{maxWidth:300,autoPan:!0,closeButton:!0,offset:new L.Point(0,2),autoPanPadding:new L.Point(5,5)},initialize:function(a){L.Util.setOptions(this,a)},onAdd:function(a){this._map=a;this._container||this._initLayout();this._updateContent();this._container.style.opacity="0";this._map._panes.popupPane.appendChild(this._container);this._map.on("viewreset",this._updatePosition,this);if(this._map.options.closePopupOnClick)this._map.on("preclick",this._close,
+a+d.from+d.unit);this.fire("step")},_complete:function(){clearInterval(this._timer);this.fire("end")},_cubicBezier:function(a){var b=3*Math.pow(1-a,2)*a,c=3*(1-a)*Math.pow(a,2),d=Math.pow(a,3),a=this._p1.multiplyBy(Math.pow(1-a,3)),b=this._p2.multiplyBy(b),c=this._p3.multiplyBy(c),d=this._p4.multiplyBy(d);return a.add(b).add(c).add(d).y}});L.LatLng=function(a,b,c){c!==!0&&(a=Math.max(Math.min(a,90),-90),b=(b+180)%360+(b<-180?180:-180));this.lat=a;this.lng=b};L.Util.extend(L.LatLng,{DEG_TO_RAD:Math.PI/180,RAD_TO_DEG:180/Math.PI,MAX_MARGIN:1.0E-9});L.LatLng.prototype={equals:function(a){if(!(a instanceof L.LatLng))return!1;return Math.max(Math.abs(this.lat-a.lat),Math.abs(this.lng-a.lng))<=L.LatLng.MAX_MARGIN},toString:function(){return"LatLng("+L.Util.formatNum(this.lat)+", "+L.Util.formatNum(this.lng)+")"}};L.LatLngBounds=L.Class.extend({initialize:function(a,b){if(a)for(var c=a instanceof Array?a:[a,b],d=0,e=c.length;d<e;d++)this.extend(c[d])},extend:function(a){!this._southWest&&!this._northEast?(this._southWest=new L.LatLng(a.lat,a.lng),this._northEast=new L.LatLng(a.lat,a.lng)):(this._southWest.lat=Math.min(a.lat,this._southWest.lat),this._southWest.lng=Math.min(a.lng,this._southWest.lng),this._northEast.lat=Math.max(a.lat,this._northEast.lat),this._northEast.lng=Math.max(a.lng,this._northEast.lng))},
+getCenter:function(){return new L.LatLng((this._southWest.lat+this._northEast.lat)/2,(this._southWest.lng+this._northEast.lng)/2)},getSouthWest:function(){return this._southWest},getNorthEast:function(){return this._northEast},getNorthWest:function(){return new L.LatLng(this._northEast.lat,this._southWest.lng)},getSouthEast:function(){return new L.LatLng(this._southWest.lat,this._northEast.lng)},contains:function(a){var b=this._southWest,c=this._northEast,d;a instanceof L.LatLngBounds?(d=a.getSouthWest(),
+a=a.getNorthEast()):d=a;return d.lat>=b.lat&&a.lat<=c.lat&&d.lng>=b.lng&&a.lng<=c.lng}});L.Projection={};L.Projection.SphericalMercator={MAX_LATITUDE:85.0511287798,project:function(a){var b=L.LatLng.DEG_TO_RAD,c=this.MAX_LATITUDE,d=a.lng*b,a=Math.max(Math.min(c,a.lat),-c)*b,a=Math.log(Math.tan(Math.PI/4+a/2));return new L.Point(d,a)},unproject:function(a,b){var c=L.LatLng.RAD_TO_DEG;return new L.LatLng((2*Math.atan(Math.exp(a.y))-Math.PI/2)*c,a.x*c,b)}};L.Projection.LonLat={project:function(a){return new L.Point(a.lng,a.lat)},unproject:function(a,b){return new L.LatLng(a.y,a.x,b)}};L.Projection.Mercator={MAX_LATITUDE:85.0840591556,R_MINOR:6356752.3142,R_MAJOR:6378137,project:function(a){var b=L.LatLng.DEG_TO_RAD,c=this.MAX_LATITUDE,d=this.R_MAJOR,e=a.lng*b*d,a=Math.max(Math.min(c,a.lat),-c)*b,b=this.R_MINOR/d,b=Math.sqrt(1-b*b),c=b*Math.sin(a),c=Math.pow((1-c)/(1+c),b*0.5),a=-d*Math.log(Math.tan(0.5*(Math.PI*0.5-a))/c);return new L.Point(e,a)},unproject:function(a,b){for(var c=L.LatLng.RAD_TO_DEG,d=this.R_MAJOR,e=a.x*c/d,f=this.R_MINOR/d,f=Math.sqrt(1-f*f),d=Math.exp(-a.y/d),
+g=Math.PI/2-2*Math.atan(d),h=15,j=0.1;Math.abs(j)>1.0E-7&&--h>0;)j=f*Math.sin(g),j=Math.PI/2-2*Math.atan(d*Math.pow((1-j)/(1+j),0.5*f))-g,g+=j;return new L.LatLng(g*c,e,b)}};L.CRS={latLngToPoint:function(a,b){return this.transformation._transform(this.projection.project(a),b)},pointToLatLng:function(a,b,c){return this.projection.unproject(this.transformation.untransform(a,b),c)},project:function(a){return this.projection.project(a)}};L.CRS.EPSG3857=L.Util.extend({},L.CRS,{code:"EPSG:3857",projection:L.Projection.SphericalMercator,transformation:new L.Transformation(0.5/Math.PI,0.5,-0.5/Math.PI,0.5),project:function(a){return this.projection.project(a).multiplyBy(6378137)}});L.CRS.EPSG900913=L.Util.extend({},L.CRS.EPSG3857,{code:"EPSG:900913"});L.CRS.EPSG4326=L.Util.extend({},L.CRS,{code:"EPSG:4326",projection:L.Projection.LonLat,transformation:new L.Transformation(1/360,0.5,-1/360,0.5)});L.CRS.EPSG3395=L.Util.extend({},L.CRS,{code:"EPSG:3395",projection:L.Projection.Mercator,transformation:function(){var a=L.Projection.Mercator;return new L.Transformation(0.5/(Math.PI*a.R_MAJOR),0.5,-0.5/(Math.PI*a.R_MINOR),0.5)}()});L.LayerGroup=L.Class.extend({initialize:function(a){this._layers={};if(a)for(var b=0,c=a.length;b<c;b++)this.addLayer(a[b])},addLayer:function(a){this._layers[L.Util.stamp(a)]=a;this._map&&this._map.addLayer(a);return this},removeLayer:function(a){delete this._layers[L.Util.stamp(a)];this._map&&this._map.removeLayer(a);return this},clearLayers:function(){this._iterateLayers(this.removeLayer,this);return this},onAdd:function(a){this._map=a;this._iterateLayers(a.addLayer,a)},onRemove:function(a){this._iterateLayers(a.removeLayer,
+a);delete this._map},_iterateLayers:function(a,b){for(var c in this._layers)this._layers.hasOwnProperty(c)&&a.call(b,this._layers[c])}});L.FeatureGroup=L.LayerGroup.extend({includes:L.Mixin.Events,addLayer:function(a){this._initEvents(a);L.LayerGroup.prototype.addLayer.call(this,a);this._popupContent&&a.bindPopup&&a.bindPopup(this._popupContent)},bindPopup:function(a){this._popupContent=a;for(var b in this._layers)this._layers.hasOwnProperty(b)&&this._layers[b].bindPopup&&this._layers[b].bindPopup(a)},_events:["click","dblclick","mouseover","mouseout"],_initEvents:function(a){for(var b=0,c=this._events.length;b<c;b++)a.on(this._events[b],
+this._propagateEvent,this)},_propagateEvent:function(a){a.layer=a.target;a.target=this;this.fire(a.type,a)}});L.TileLayer=L.Class.extend({includes:L.Mixin.Events,options:{minZoom:0,maxZoom:18,tileSize:256,subdomains:"abc",errorTileUrl:"",attribution:"",opacity:1,scheme:"xyz",noWrap:!1,unloadInvisibleTiles:L.Browser.mobileWebkit,updateWhenIdle:L.Browser.mobileWebkit},initialize:function(a,b){L.Util.setOptions(this,b);this._url=a;if(typeof this.options.subdomains=="string")this.options.subdomains=this.options.subdomains.split("")},onAdd:function(a){this._map=a;this._initContainer();this._createTileProto();
+a.on("viewreset",this._reset,this);if(this.options.updateWhenIdle)a.on("moveend",this._update,this);else this._limitedUpdate=L.Util.limitExecByInterval(this._update,100,this),a.on("move",this._limitedUpdate,this);this._reset();this._update()},onRemove:function(){this._map.getPanes().tilePane.removeChild(this._container);this._container=null;this._map.off("viewreset",this._reset,this);this.options.updateWhenIdle?this._map.off("moveend",this._update,this):this._map.off("move",this._limitedUpdate,this)},
+getAttribution:function(){return this.options.attribution},setOpacity:function(a){this.options.opacity=a;this._setOpacity(a);if(L.Browser.webkit)for(i in this._tiles)this._tiles[i].style.webkitTransform+=" translate(0,0)"},_setOpacity:function(a){a<1&&L.DomUtil.setOpacity(this._container,a)},_initContainer:function(){var a=this._map.getPanes().tilePane;if(!this._container||a.empty)this._container=L.DomUtil.create("div","leaflet-layer",a),this._setOpacity(this.options.opacity)},_reset:function(){this._tiles=
+{};this._initContainer();this._container.innerHTML=""},_update:function(){var a=this._map.getPixelBounds(),b=this.options.tileSize,c=new L.Point(Math.floor(a.min.x/b),Math.floor(a.min.y/b)),a=new L.Point(Math.floor(a.max.x/b),Math.floor(a.max.y/b)),c=new L.Bounds(c,a);this._addTilesFromCenterOut(c);this.options.unloadInvisibleTiles&&this._removeOtherTiles(c)},_addTilesFromCenterOut:function(a){for(var b=[],c=a.getCenter(),d=a.min.y;d<=a.max.y;d++)for(var e=a.min.x;e<=a.max.x;e++)e+":"+d in this._tiles||
+b.push(new L.Point(e,d));b.sort(function(a,b){return a.distanceTo(c)-b.distanceTo(c)});this._tilesToLoad=b.length;a=0;for(d=this._tilesToLoad;a<d;a++)this._addTile(b[a])},_removeOtherTiles:function(a){var b,c,d;for(d in this._tiles)if(this._tiles.hasOwnProperty(d)&&(b=d.split(":"),c=parseInt(b[0],10),b=parseInt(b[1],10),c<a.min.x||c>a.max.x||b<a.min.y||b>a.max.y))this._tiles[d].src="",this._tiles[d].parentNode==this._container&&this._container.removeChild(this._tiles[d]),delete this._tiles[d]},_addTile:function(a){var b=
+this._getTilePos(a),c=this._map.getZoom(),d=a.x+":"+a.y,e=1<<c;if(!this.options.noWrap)a.x=(a.x%e+e)%e;if(!(a.y<0||a.y>=e)){var f=this._createTile();L.DomUtil.setPosition(f,b);this._tiles[d]=f;if(this.options.scheme=="tms")a.y=e-a.y-1;this._loadTile(f,a,c);this._container.appendChild(f)}},_getTilePos:function(a){var b=this._map.getPixelOrigin();return a.multiplyBy(this.options.tileSize).subtract(b)},getTileUrl:function(a,b){return this._url.replace("{s}",this.options.subdomains[(a.x+a.y)%this.options.subdomains.length]).replace("{z}",
+b).replace("{x}",a.x).replace("{y}",a.y)},_createTileProto:function(){this._tileImg=L.DomUtil.create("img","leaflet-tile");this._tileImg.galleryimg="no";var a=this.options.tileSize;this._tileImg.style.width=a+"px";this._tileImg.style.height=a+"px"},_createTile:function(){var a=this._tileImg.cloneNode(!1);a.onselectstart=a.onmousemove=L.Util.falseFn;return a},_loadTile:function(a,b,c){a._layer=this;a.onload=this._tileOnLoad;a.onerror=this._tileOnError;a.src=this.getTileUrl(b,c)},_tileOnLoad:function(){var a=
+this._layer;this.className+=" leaflet-tile-loaded";a.fire("tileload",{tile:this,url:this.src});a._tilesToLoad--;a._tilesToLoad||a.fire("load")},_tileOnError:function(){var a=this._layer;a.fire("tileerror",{tile:this,url:this.src});if(a=a.options.errorTileUrl)this.src=a}});L.TileLayer.WMS=L.TileLayer.extend({defaultWmsParams:{service:"WMS",request:"GetMap",version:"1.1.1",layers:"",styles:"",format:"image/jpeg",transparent:!1},initialize:function(a,b){this._url=a;this.wmsParams=L.Util.extend({},this.defaultWmsParams);this.wmsParams.width=this.wmsParams.height=this.options.tileSize;for(var c in b)this.options.hasOwnProperty(c)||(this.wmsParams[c]=b[c]);L.Util.setOptions(this,b)},onAdd:function(a){this.wmsParams[parseFloat(this.wmsParams.version)>=1.3?"crs":"srs"]=a.options.crs.code;
+L.TileLayer.prototype.onAdd.call(this,a)},getTileUrl:function(a){var b=this.options.tileSize,a=a.multiplyBy(b),b=a.add(new L.Point(b,b)),a=this._map.unproject(a,this._zoom,!0),b=this._map.unproject(b,this._zoom,!0),a=this._map.options.crs.project(a),b=this._map.options.crs.project(b),b=[a.x,b.y,b.x,a.y].join(",");return this._url+L.Util.getParamString(this.wmsParams)+"&bbox="+b}});L.TileLayer.Canvas=L.TileLayer.extend({options:{async:!1},initialize:function(a){L.Util.setOptions(this,a)},_createTileProto:function(){this._canvasProto=L.DomUtil.create("canvas","leaflet-tile");var a=this.options.tileSize;this._canvasProto.width=a;this._canvasProto.height=a},_createTile:function(){var a=this._canvasProto.cloneNode(!1);a.onselectstart=a.onmousemove=L.Util.falseFn;return a},_loadTile:function(a,b,c){a._layer=this;this.drawTile(a,b,c);this.options.async||this.tileDrawn(a)},drawTile:function(){},
+tileDrawn:function(a){this._tileOnLoad.call(a)}});L.ImageOverlay=L.Class.extend({includes:L.Mixin.Events,initialize:function(a,b){this._url=a;this._bounds=b},onAdd:function(a){this._map=a;this._image||this._initImage();a.getPanes().overlayPane.appendChild(this._image);a.on("viewreset",this._reset,this);this._reset()},onRemove:function(a){a.getPanes().overlayPane.removeChild(this._image);a.off("viewreset",this._reset,this)},_initImage:function(){this._image=L.DomUtil.create("img","leaflet-image-layer");this._image.style.visibility="hidden";L.Util.extend(this._image,
+{galleryimg:"no",onselectstart:L.Util.falseFn,onmousemove:L.Util.falseFn,onload:this._onImageLoad,src:this._url})},_reset:function(){var a=this._map.latLngToLayerPoint(this._bounds.getNorthWest()),b=this._map.latLngToLayerPoint(this._bounds.getSouthEast()).subtract(a);L.DomUtil.setPosition(this._image,a);this._image.style.width=b.x+"px";this._image.style.height=b.y+"px"},_onImageLoad:function(){this.style.visibility=""}});L.Popup=L.Class.extend({includes:L.Mixin.Events,options:{maxWidth:300,autoPan:!0,closeButton:!0,offset:new L.Point(0,2),autoPanPadding:new L.Point(5,5)},initialize:function(a){L.Util.setOptions(this,a)},onAdd:function(a){this._map=a;this._container||this._initLayout();this._updateContent();this._container.style.opacity="0";this._map._panes.popupPane.appendChild(this._container);this._map.on("viewreset",this._updatePosition,this);if(this._map.options.closePopupOnClick)this._map.on("preclick",this._close,
 this);this._update();this._container.style.opacity="1";this._opened=!0},onRemove:function(a){a._panes.popupPane.removeChild(this._container);a.off("viewreset",this._updatePosition,this);a.off("click",this._close,this);this._container.style.opacity="0";this._opened=!1},setLatLng:function(a){this._latlng=a;this._opened&&this._update();return this},setContent:function(a){this._content=a;this._opened&&this._update();return this},_close:function(){this._opened&&this._map.removeLayer(this)},_initLayout:function(){this._container=
-L.DomUtil.create("div","leaflet-popup");L.DomEvent.disableClickPropagation(this._container);this._closeButton=L.DomUtil.create("a","leaflet-popup-close-button",this._container);this._closeButton.href="#close";this._closeButton.onclick=L.Util.bind(this._onCloseButtonClick,this);this._wrapper=L.DomUtil.create("div","leaflet-popup-content-wrapper",this._container);this._contentNode=L.DomUtil.create("div","leaflet-popup-content",this._wrapper);this._tipContainer=L.DomUtil.create("div","leaflet-popup-tip-container",
-this._container);this._tip=L.DomUtil.create("div","leaflet-popup-tip",this._tipContainer)},_update:function(){this._container.style.visibility="hidden";this._updateLayout();this._updatePosition();this._container.style.visibility="";this._adjustPan()},_updateContent:function(){if(this._content)this._contentNode.innerHTML=this._content},_updateLayout:function(){this._container.style.width="";this._container.style.whiteSpace="nowrap";var a=this._container.offsetWidth;this._container.style.width=(a>this.options.maxWidth?
-this.options.maxWidth:a)+"px";this._container.style.whiteSpace="";this._containerWidth=this._container.offsetWidth},_updatePosition:function(){var a=this._map.latLngToLayerPoint(this._latlng);this._containerBottom=-a.y-this.options.offset.y;this._containerLeft=a.x-Math.round(this._containerWidth/2)+this.options.offset.x;this._container.style.bottom=this._containerBottom+"px";this._container.style.left=this._containerLeft+"px"},_adjustPan:function(){if(this.options.autoPan){var a=this._container.offsetHeight,
-b=this._map.layerPointToContainerPoint(new L.Point(this._containerLeft,-a-this._containerBottom)),c=new L.Point(0,0),d=this.options.autoPanPadding,e=this._map.getSize();if(b.x<0)c.x=b.x-d.x;if(b.x+this._containerWidth>e.x)c.x=b.x+this._containerWidth-e.x+d.x;if(b.y<0)c.y=b.y-d.y;if(b.y+a>e.y)c.y=b.y+a-e.y+d.y;(c.x||c.y)&&this._map.panBy(c)}},_onCloseButtonClick:function(){this._close();return!1}});L.Icon=L.Class.extend({iconUrl:L.ROOT_URL+"images/marker.png",shadowUrl:L.ROOT_URL+"images/marker-shadow.png",iconSize:new L.Point(25,41),shadowSize:new L.Point(41,41),iconAnchor:new L.Point(13,41),popupAnchor:new L.Point(0,-33),initialize:function(a){if(a)this.iconUrl=a},createIcon:function(){return this._createIcon("icon")},createShadow:function(){return this._createIcon("shadow")},_createIcon:function(a){var b=this[a+"Size"],c=this._createImg(this[a+"Url"]);c.className="leaflet-marker-"+a;c.style.marginLeft=
--this.iconAnchor.x+"px";c.style.marginTop=-this.iconAnchor.y+"px";if(b)c.style.width=b.x+"px",c.style.height=b.y+"px";return c},_createImg:function(a){var b;L.Browser.ie6?(b=document.createElement("div"),b.style.filter='progid:DXImageTransform.Microsoft.AlphaImageLoader(src="'+a+'")'):(b=document.createElement("img"),b.src=a);return b}});L.Marker=L.Class.extend({includes:L.Mixin.Events,options:{icon:new L.Icon,clickable:!0,draggable:!1},initialize:function(a,b){L.Util.setOptions(this,b);this._latlng=a},onAdd:function(a){this._map=a;if(!this._icon)this._icon=this.options.icon.createIcon(),a._panes.markerPane.appendChild(this._icon),this._initInteraction();if(!this._shadow)this._shadow=this.options.icon.createShadow(),a._panes.shadowPane.appendChild(this._shadow);a.on("viewreset",this._reset,this);this._reset()},onRemove:function(a){this._icon&&
-a._panes.markerPane.removeChild(this._icon);this._shadow&&a._panes.shadowPane.removeChild(this._shadow);a.off("viewreset",this._reset,this)},getLatLng:function(){return this._latlng},_reset:function(){var a=this._map.latLngToLayerPoint(this._latlng).round();L.DomUtil.setPosition(this._icon,a);L.DomUtil.setPosition(this._shadow,a);this._icon.style.zIndex=a.y},_initInteraction:function(){this.options.clickable&&(this._icon.className+=" leaflet-clickable",L.DomEvent.addListener(this._icon,"mousedown",
-this._fireMouseEvent,this),L.DomEvent.addListener(this._icon,"click",this._onMouseClick,this),L.DomEvent.addListener(this._icon,"dblclick",this._fireMouseEvent,this));if(L.Handler.MarkerDrag)this.dragging=new L.Handler.MarkerDrag(this),this.options.draggable&&this.dragging.enable()},_onMouseClick:function(a){L.DomEvent.stopPropagation(a);(!this.dragging||!this.dragging.moved())&&this.fire(a.type)},_fireMouseEvent:function(a){this.fire(a.type);L.DomEvent.stopPropagation(a)}});L.Marker.include({openPopup:function(){this._popup.setLatLng(this._latlng);this._map.openPopup(this._popup);return this},closePopup:function(){this._popup&&this._popup._close()},bindPopup:function(a,b){b=L.Util.extend({offset:this.options.icon.popupAnchor},b);this._popup=new L.Popup(b);this._popup.setContent(a);this.on("click",this.openPopup,this);return this}});L.Path=L.Class.extend({includes:[L.Mixin.Events],statics:function(){return{SVG_NS:"http://www.w3.org/2000/svg",SVG:!(!document.createElementNS||!document.createElementNS("http://www.w3.org/2000/svg","svg").createSVGRect),CLIP_PADDING:0.5}}(),options:{stroke:!0,color:"#0033ff",weight:5,opacity:0.5,fill:!1,fillColor:null,fillOpacity:0.2,clickable:!0,updateOnMoveEnd:!1},initialize:function(a){L.Util.setOptions(this,a)},onAdd:function(a){this._map=a;this._initElements();this._initEvents();this.projectLatlngs();
-this._updatePath();a.on("viewreset",this.projectLatlngs,this);this._updateTrigger=this.options.updateOnMoveEnd?"moveend":"viewreset";a.on(this._updateTrigger,this._updatePath,this)},onRemove:function(a){a._pathRoot.removeChild(this._container);a.off("viewreset",this._projectLatlngs,this);a.off(this._updateTrigger,this._updatePath,this)},projectLatlngs:function(){},getPathString:function(){},_initElements:function(){this._initRoot();this._initPath();this._initStyle()},_initRoot:function(){if(!this._map._pathRoot)this._map._pathRoot=
-this._createElement("svg"),this._map._panes.overlayPane.appendChild(this._map._pathRoot),this._map.on("moveend",this._updateSvgViewport,this),this._updateSvgViewport()},_updateSvgViewport:function(){this._updateViewport();var a=this._map._pathViewport,b=a.min,c=a.max;a=c.x-b.x;c=c.y-b.y;var d=this._map._pathRoot,e=this._map._panes.overlayPane;L.Browser.mobileWebkit&&e.removeChild(d);L.DomUtil.setPosition(d,b);d.setAttribute("width",a);d.setAttribute("height",c);d.setAttribute("viewBox",[b.x,b.y,a,
-c].join(" "));L.Browser.mobileWebkit&&e.appendChild(d)},_updateViewport:function(){var a=L.Path.CLIP_PADDING,b=this._map.getSize(),c=L.DomUtil.getPosition(this._map._mapPane).multiplyBy(-1).subtract(b.multiplyBy(a));a=c.add(b.multiplyBy(1+a*2));this._map._pathViewport=new L.Bounds(c,a)},_initPath:function(){this._container=this._createElement("g");this._path=this._createElement("path");this._container.appendChild(this._path);this._map._pathRoot.appendChild(this._container)},_initStyle:function(){this.options.stroke&&
-(this._path.setAttribute("stroke-linejoin","round"),this._path.setAttribute("stroke-linecap","round"));this.options.fill?this._path.setAttribute("fill-rule","evenodd"):this._path.setAttribute("fill","none");this._updateStyle()},_updateStyle:function(){this.options.stroke&&(this._path.setAttribute("stroke",this.options.color),this._path.setAttribute("stroke-opacity",this.options.opacity),this._path.setAttribute("stroke-width",this.options.weight));this.options.fill&&(this._path.setAttribute("fill",
-this.options.fillColor||this.options.color),this._path.setAttribute("fill-opacity",this.options.fillOpacity))},_updatePath:function(){var a=this.getPathString();a||(a="M0 0");this._path.setAttribute("d",a)},_createElement:function(a){return document.createElementNS(L.Path.SVG_NS,a)},_initEvents:function(){this.options.clickable&&(this._path.setAttribute("class","leaflet-clickable"),L.DomEvent.addListener(this._container,"click",this._onMouseClick,this),L.DomEvent.addListener(this._container,"dblclick",
-this._fireMouseEvent,this),L.DomEvent.addListener(this._container,"mousedown",this._fireMouseEvent,this))},_onMouseClick:function(a){(!this._map.dragging||!this._map.dragging.moved())&&this._fireMouseEvent(a)},_fireMouseEvent:function(a){this.hasEventListeners(a.type)&&(this.fire(a.type,{latlng:this._map.mouseEventToLatLng(a),layerPoint:this._map.mouseEventToLayerPoint(a)}),L.DomEvent.stopPropagation(a))}});L.Path.VML=function(){var a=document.createElement("div");a.innerHTML='<v:shape adj="1"/>';a=a.firstChild;a.style.behavior="url(#default#VML)";return a&&typeof a.adj=="object"}();
-L.Path=!L.Path.VML?L.Path:L.Path.extend({statics:{CLIP_PADDING:0.02},_createElement:function(){document.createStyleSheet().addRule(".lvml","behavior:url(#default#VML); display: inline-block; position: absolute;");try{return document.namespaces.add("lvml","urn:schemas-microsoft-com:vml"),function(a){return document.createElement("<lvml:"+a+' class="lvml">')}}catch(a){return function(a){return document.createElement("<"+a+' xmlns="urn:schemas-microsoft.com:vml" class="lvml">')}}}(),_initRoot:function(){if(!this._map._pathRoot)this._map._pathRoot=
-document.createElement("div"),this._map._pathRoot.className="leaflet-vml-container",this._map._panes.overlayPane.appendChild(this._map._pathRoot),this._map.on("moveend",this._updateViewport,this),this._updateViewport()},_initPath:function(){this._container=this._createElement("shape");this._container.className+=" leaflet-vml-shape";this._container.coordsize="1 1";this._path=this._createElement("path");this._container.appendChild(this._path);this._map._pathRoot.appendChild(this._container)},_initStyle:function(){this.options.stroke?
+L.DomUtil.create("div","leaflet-popup");this._closeButton=L.DomUtil.create("a","leaflet-popup-close-button",this._container);this._closeButton.href="#close";this._closeButton.onclick=L.Util.bind(this._onCloseButtonClick,this);this._wrapper=L.DomUtil.create("div","leaflet-popup-content-wrapper",this._container);L.DomEvent.disableClickPropagation(this._wrapper);this._contentNode=L.DomUtil.create("div","leaflet-popup-content",this._wrapper);this._tipContainer=L.DomUtil.create("div","leaflet-popup-tip-container",
+this._container);this._tip=L.DomUtil.create("div","leaflet-popup-tip",this._tipContainer)},_update:function(){this._container.style.visibility="hidden";this._updateContent();this._updateLayout();this._updatePosition();this._container.style.visibility="";this._adjustPan()},_updateContent:function(){if(this._content)typeof this._content=="string"?this._contentNode.innerHTML=this._content:(this._contentNode.innerHTML="",this._contentNode.appendChild(this._content))},_updateLayout:function(){this._container.style.width=
+"";this._container.style.whiteSpace="nowrap";var a=this._container.offsetWidth;this._container.style.width=(a>this.options.maxWidth?this.options.maxWidth:a)+"px";this._container.style.whiteSpace="";this._containerWidth=this._container.offsetWidth},_updatePosition:function(){var a=this._map.latLngToLayerPoint(this._latlng);this._containerBottom=-a.y-this.options.offset.y;this._containerLeft=a.x-Math.round(this._containerWidth/2)+this.options.offset.x;this._container.style.bottom=this._containerBottom+
+"px";this._container.style.left=this._containerLeft+"px"},_adjustPan:function(){if(this.options.autoPan){var a=this._container.offsetHeight,b=this._map.layerPointToContainerPoint(new L.Point(this._containerLeft,-a-this._containerBottom)),c=new L.Point(0,0),d=this.options.autoPanPadding,e=this._map.getSize();if(b.x<0)c.x=b.x-d.x;if(b.x+this._containerWidth>e.x)c.x=b.x+this._containerWidth-e.x+d.x;if(b.y<0)c.y=b.y-d.y;if(b.y+a>e.y)c.y=b.y+a-e.y+d.y;(c.x||c.y)&&this._map.panBy(c)}},_onCloseButtonClick:function(a){this._close();
+L.DomEvent.stop(a)}});L.Icon=L.Class.extend({iconUrl:L.ROOT_URL+"images/marker.png",shadowUrl:L.ROOT_URL+"images/marker-shadow.png",iconSize:new L.Point(25,41),shadowSize:new L.Point(41,41),iconAnchor:new L.Point(13,41),popupAnchor:new L.Point(0,-33),initialize:function(a){if(a)this.iconUrl=a},createIcon:function(){return this._createIcon("icon")},createShadow:function(){return this._createIcon("shadow")},_createIcon:function(a){var b=this[a+"Size"],c=this[a+"Url"],d=this._createImg(c);if(!c)return null;d.className="leaflet-marker-"+
+a;d.style.marginLeft=-this.iconAnchor.x+"px";d.style.marginTop=-this.iconAnchor.y+"px";if(b)d.style.width=b.x+"px",d.style.height=b.y+"px";return d},_createImg:function(a){var b;L.Browser.ie6?(b=document.createElement("div"),b.style.filter='progid:DXImageTransform.Microsoft.AlphaImageLoader(src="'+a+'")'):(b=document.createElement("img"),b.src=a);return b}});L.Marker=L.Class.extend({includes:L.Mixin.Events,options:{icon:new L.Icon,title:"",clickable:!0,draggable:!1},initialize:function(a,b){L.Util.setOptions(this,b);this._latlng=a},onAdd:function(a){this._map=a;this._initIcon();a.on("viewreset",this._reset,this);this._reset()},onRemove:function(a){this._removeIcon();a.off("viewreset",this._reset,this)},getLatLng:function(){return this._latlng},setLatLng:function(a){this._latlng=a;this._reset()},setIcon:function(a){this._removeIcon();this._icon=this._shadow=
+null;this.options.icon=a;this._initIcon()},_initIcon:function(){if(!this._icon){this._icon=this.options.icon.createIcon();if(this.options.title)this._icon.title=this.options.title;this._initInteraction()}if(!this._shadow)this._shadow=this.options.icon.createShadow();this._map._panes.markerPane.appendChild(this._icon);this._shadow&&this._map._panes.shadowPane.appendChild(this._shadow)},_removeIcon:function(){this._map._panes.markerPane.removeChild(this._icon);this._shadow&&this._map._panes.shadowPane.removeChild(this._shadow)},
+_reset:function(){var a=this._map.latLngToLayerPoint(this._latlng).round();L.DomUtil.setPosition(this._icon,a);this._shadow&&L.DomUtil.setPosition(this._shadow,a);this._icon.style.zIndex=a.y},_initInteraction:function(){if(this.options.clickable){this._icon.className+=" leaflet-clickable";L.DomEvent.addListener(this._icon,"click",this._onMouseClick,this);for(var a=["dblclick","mousedown","mouseover","mouseout"],b=0;b<a.length;b++)L.DomEvent.addListener(this._icon,a[b],this._fireMouseEvent,this)}if(L.Handler.MarkerDrag)this.dragging=
+new L.Handler.MarkerDrag(this),this.options.draggable&&this.dragging.enable()},_onMouseClick:function(a){L.DomEvent.stopPropagation(a);(!this.dragging||!this.dragging.moved())&&this.fire(a.type)},_fireMouseEvent:function(a){this.fire(a.type);L.DomEvent.stopPropagation(a)}});L.Marker.include({openPopup:function(){this._popup.setLatLng(this._latlng);this._map.openPopup(this._popup);return this},closePopup:function(){this._popup&&this._popup._close()},bindPopup:function(a,b){b=L.Util.extend({offset:this.options.icon.popupAnchor},b);this._popup=new L.Popup(b);this._popup.setContent(a);this.on("click",this.openPopup,this);return this}});L.Path=L.Class.extend({includes:[L.Mixin.Events],statics:function(){return{SVG_NS:"http://www.w3.org/2000/svg",SVG:!(!document.createElementNS||!document.createElementNS("http://www.w3.org/2000/svg","svg").createSVGRect),CLIP_PADDING:0.5}}(),options:{stroke:!0,color:"#0033ff",weight:5,opacity:0.5,fill:!1,fillColor:null,fillOpacity:0.2,clickable:!0,updateOnMoveEnd:!1},initialize:function(a){L.Util.setOptions(this,a)},onAdd:function(a){this._map=a;this._initElements();this._initEvents();this.projectLatlngs();
+this._updatePath();a.on("viewreset",this.projectLatlngs,this);this._updateTrigger=this.options.updateOnMoveEnd?"moveend":"viewreset";a.on(this._updateTrigger,this._updatePath,this)},onRemove:function(a){a._pathRoot.removeChild(this._container);a.off("viewreset",this._projectLatlngs,this);a.off(this._updateTrigger,this._updatePath,this)},projectLatlngs:function(){},getPathString:function(){},setStyle:function(a){L.Util.setOptions(this,a);this._path&&this._updateStyle()},_initElements:function(){this._initRoot();
+this._initPath();this._initStyle()},_initRoot:function(){if(!this._map._pathRoot)this._map._pathRoot=this._createElement("svg"),this._map._panes.overlayPane.appendChild(this._map._pathRoot),this._map.on("moveend",this._updateSvgViewport,this),this._updateSvgViewport()},_updateSvgViewport:function(){this._updateViewport();var a=this._map._pathViewport,b=a.min,c=a.max,a=c.x-b.x,c=c.y-b.y,d=this._map._pathRoot,e=this._map._panes.overlayPane;L.Browser.mobileWebkit&&e.removeChild(d);L.DomUtil.setPosition(d,
+b);d.setAttribute("width",a);d.setAttribute("height",c);d.setAttribute("viewBox",[b.x,b.y,a,c].join(" "));L.Browser.mobileWebkit&&e.appendChild(d)},_updateViewport:function(){var a=L.Path.CLIP_PADDING,b=this._map.getSize(),c=L.DomUtil.getPosition(this._map._mapPane).multiplyBy(-1).subtract(b.multiplyBy(a)),a=c.add(b.multiplyBy(1+a*2));this._map._pathViewport=new L.Bounds(c,a)},_initPath:function(){this._container=this._createElement("g");this._path=this._createElement("path");this._container.appendChild(this._path);
+this._map._pathRoot.appendChild(this._container)},_initStyle:function(){this.options.stroke&&(this._path.setAttribute("stroke-linejoin","round"),this._path.setAttribute("stroke-linecap","round"));this.options.fill?this._path.setAttribute("fill-rule","evenodd"):this._path.setAttribute("fill","none");this._updateStyle()},_updateStyle:function(){this.options.stroke&&(this._path.setAttribute("stroke",this.options.color),this._path.setAttribute("stroke-opacity",this.options.opacity),this._path.setAttribute("stroke-width",
+this.options.weight));this.options.fill&&(this._path.setAttribute("fill",this.options.fillColor||this.options.color),this._path.setAttribute("fill-opacity",this.options.fillOpacity))},_updatePath:function(){var a=this.getPathString();a||(a="M0 0");this._path.setAttribute("d",a)},_createElement:function(a){return document.createElementNS(L.Path.SVG_NS,a)},_initEvents:function(){if(this.options.clickable){L.Path.VML||this._path.setAttribute("class","leaflet-clickable");L.DomEvent.addListener(this._container,
+"click",this._onMouseClick,this);for(var a=["dblclick","mousedown","mouseover","mouseout"],b=0;b<a.length;b++)L.DomEvent.addListener(this._container,a[b],this._fireMouseEvent,this)}},_onMouseClick:function(a){(!this._map.dragging||!this._map.dragging.moved())&&this._fireMouseEvent(a)},_fireMouseEvent:function(a){this.hasEventListeners(a.type)&&(this.fire(a.type,{latlng:this._map.mouseEventToLatLng(a),layerPoint:this._map.mouseEventToLayerPoint(a)}),L.DomEvent.stopPropagation(a))},_redraw:function(){this.projectLatlngs();
+this._updatePath()}});L.Path.VML=function(){var a=document.createElement("div");a.innerHTML='<v:shape adj="1"/>';a=a.firstChild;a.style.behavior="url(#default#VML)";return a&&typeof a.adj=="object"}();
+L.Path=L.Path.SVG||!L.Path.VML?L.Path:L.Path.extend({statics:{CLIP_PADDING:0.02},_createElement:function(){try{return document.namespaces.add("lvml","urn:schemas-microsoft-com:vml"),function(a){return document.createElement("<lvml:"+a+' class="lvml">')}}catch(a){return function(a){return document.createElement("<"+a+' xmlns="urn:schemas-microsoft.com:vml" class="lvml">')}}}(),_initRoot:function(){if(!this._map._pathRoot)this._map._pathRoot=document.createElement("div"),this._map._pathRoot.className=
+"leaflet-vml-container",this._map._panes.overlayPane.appendChild(this._map._pathRoot),this._map.on("moveend",this._updateViewport,this),this._updateViewport()},_initPath:function(){this._container=this._createElement("shape");this._container.className+=" leaflet-vml-shape"+(this.options.clickable?" leaflet-clickable":"");this._container.coordsize="1 1";this._path=this._createElement("path");this._container.appendChild(this._path);this._map._pathRoot.appendChild(this._container)},_initStyle:function(){this.options.stroke?
 (this._stroke=this._createElement("stroke"),this._stroke.endcap="round",this._container.appendChild(this._stroke)):this._container.stroked=!1;this.options.fill?(this._container.filled=!0,this._fill=this._createElement("fill"),this._container.appendChild(this._fill)):this._container.filled=!1;this._updateStyle()},_updateStyle:function(){if(this.options.stroke)this._stroke.weight=this.options.weight+"px",this._stroke.color=this.options.color,this._stroke.opacity=this.options.opacity;if(this.options.fill)this._fill.color=
-this.options.fillColor||this.options.color,this._fill.opacity=this.options.fillOpacity},_updatePath:function(){this._container.style.display="none";this._path.v=this.getPathString()+" ";this._container.style.display=""}});L.Path.include({bindPopup:function(a,b){this._popup=new L.Popup(b);this._popup.setContent(a);this.on("click",this._openPopup,this);return this},_openPopup:function(a){this._popup.setLatLng(a.latlng);this._map.openPopup(this._popup)}});L.Polyline=L.Path.extend({initialize:function(a,b){L.Path.prototype.initialize.call(this,b);this._latlngs=a},options:{smoothFactor:1,noClip:!1,updateOnMoveEnd:!0},projectLatlngs:function(){this._originalPoints=[];for(var a=0,b=this._latlngs.length;a<b;a++)this._originalPoints[a]=this._map.latLngToLayerPoint(this._latlngs[a])},getPathString:function(){for(var a=0,b=this._parts.length,c="";a<b;a++)c+=this._getPathPartStr(this._parts[a]);return c},_getPathPartStr:function(a){for(var b=L.Path.VML,c=0,
-d=a.length,e="",f;c<d;c++)f=a[c],b&&f._round(),e+=(c?"L":"M")+f.x+" "+f.y;return e},_clipPoints:function(){var a=this._originalPoints,b=a.length,c,d,e;if(this.options.noClip)this._parts=[a];else{var f=this._parts=[],g=this._map._pathViewport,h=L.LineUtil;for(d=c=0;c<b-1;c++)if(e=h.clipSegment(a[c],a[c+1],g,c))if(f[d]=f[d]||[],f[d].push(e[0]),e[1]!=a[c+1]||c==b-2)f[d].push(e[1]),d++}},_simplifyPoints:function(){for(var a=this._parts,b=L.LineUtil,c=0,d=a.length;c<d;c++)a[c]=b.simplify(a[c],this.options.smoothFactor)},
-_updatePath:function(){this._clipPoints();this._simplifyPoints();L.Path.prototype._updatePath.call(this)}});L.Polygon=L.Polyline.extend({options:{fill:!0},initialize:function(a,b){L.Polyline.prototype.initialize.call(this,a,b);if(a[0]instanceof Array)this._latlngs=a[0],this._holes=a.slice(1)},projectLatlngs:function(){L.Polyline.prototype.projectLatlngs.call(this);this._holePoints=[];if(this._holes)for(var a=0,b=this._holes.length;a<b;a++){this._holePoints[a]=[];for(var c=0,d=this._holes[a].length;c<d;c++)this._holePoints[a][c]=this._map.latLngToLayerPoint(this._holes[a][c])}},_clipPoints:function(){var a=
-[];this._parts=[this._originalPoints].concat(this._holePoints);if(!this.options.noClip){for(var b=0,c=this._parts.length;b<c;b++){var d=L.PolyUtil.clipPolygon(this._parts[b],this._map._pathViewport);d.length&&a.push(d)}this._parts=a}},_getPathPartStr:function(a){return L.Polyline.prototype._getPathPartStr.call(this,a)+(L.Path.SVG?"z":"x")}});L.Circle=L.Path.extend({initialize:function(a,b,c){L.Path.prototype.initialize.call(this,c);this._latlng=a;this._radius=b},options:{fill:!0},projectLatlngs:function(){this._point=this._map.latLngToLayerPoint(this._latlng)},getPathString:function(){var a=this._point,b=this._radius;return L.Path.SVG?"M"+a.x+","+(a.y-b)+"A"+b+","+b+",0,1,1,"+(a.x-0.1)+","+(a.y-b)+" z":(a._round(),b=Math.round(b),"AL "+a.x+","+a.y+" "+b+","+b+" 0,23592600")}});L.Handler=L.Class.extend({initialize:function(a){this._map=a},enabled:function(){return!!this._enabled}});L.Handler.MapDrag=L.Handler.extend({enable:function(){if(!this._enabled){if(!this._draggable)this._draggable=new L.Draggable(this._map._mapPane,this._map._container),this._draggable.on("dragstart",this._onDragStart,this),this._draggable.on("drag",this._onDrag,this),this._draggable.on("dragend",this._onDragEnd,this);this._draggable.enable();this._enabled=!0}},disable:function(){if(this._enabled)this._draggable.disable(),this._enabled=!1},moved:function(){return this._draggable._moved},_onDragStart:function(){this._map.fire("movestart");
+this.options.fillColor||this.options.color,this._fill.opacity=this.options.fillOpacity},_updatePath:function(){this._container.style.display="none";this._path.v=this.getPathString()+" ";this._container.style.display=""}});L.Path.include({bindPopup:function(a,b){if(!this._popup||this._popup.options!==b)this._popup=new L.Popup(b);this._popup.setContent(a);if(!this._openPopupAdded)this.on("click",this._openPopup,this),this._openPopupAdded=!0;return this},_openPopup:function(a){this._popup.setLatLng(a.latlng);this._map.openPopup(this._popup)}});L.Polyline=L.Path.extend({initialize:function(a,b){L.Path.prototype.initialize.call(this,b);this._latlngs=a},options:{smoothFactor:1,noClip:!1,updateOnMoveEnd:!0},projectLatlngs:function(){this._originalPoints=[];for(var a=0,b=this._latlngs.length;a<b;a++)this._originalPoints[a]=this._map.latLngToLayerPoint(this._latlngs[a])},getPathString:function(){for(var a=0,b=this._parts.length,c="";a<b;a++)c+=this._getPathPartStr(this._parts[a]);return c},getLatLngs:function(){return this._latlngs},setLatLngs:function(a){this._latlngs=
+a;this._redraw();return this},addLatLng:function(a){this._latlngs.push(a);this._redraw();return this},spliceLatLngs:function(){var a=[].splice.apply(this._latlngs,arguments);this._redraw();return a},_getPathPartStr:function(a){for(var b=L.Path.VML,c=0,d=a.length,e="",f;c<d;c++)f=a[c],b&&f._round(),e+=(c?"L":"M")+f.x+" "+f.y;return e},_clipPoints:function(){var a=this._originalPoints,b=a.length,c,d,e;if(this.options.noClip)this._parts=[a];else{var f=this._parts=[],g=this._map._pathViewport,h=L.LineUtil;
+for(d=c=0;c<b-1;c++)if(e=h.clipSegment(a[c],a[c+1],g,c))if(f[d]=f[d]||[],f[d].push(e[0]),e[1]!=a[c+1]||c==b-2)f[d].push(e[1]),d++}},_simplifyPoints:function(){for(var a=this._parts,b=L.LineUtil,c=0,d=a.length;c<d;c++)a[c]=b.simplify(a[c],this.options.smoothFactor)},_updatePath:function(){this._clipPoints();this._simplifyPoints();L.Path.prototype._updatePath.call(this)}});L.Polygon=L.Polyline.extend({options:{fill:!0},initialize:function(a,b){L.Polyline.prototype.initialize.call(this,a,b);if(a[0]instanceof Array)this._latlngs=a[0],this._holes=a.slice(1)},projectLatlngs:function(){L.Polyline.prototype.projectLatlngs.call(this);this._holePoints=[];if(this._holes)for(var a=0,b=this._holes.length;a<b;a++){this._holePoints[a]=[];for(var c=0,d=this._holes[a].length;c<d;c++)this._holePoints[a][c]=this._map.latLngToLayerPoint(this._holes[a][c])}},_clipPoints:function(){var a=
+[];this._parts=[this._originalPoints].concat(this._holePoints);if(!this.options.noClip){for(var b=0,c=this._parts.length;b<c;b++){var d=L.PolyUtil.clipPolygon(this._parts[b],this._map._pathViewport);d.length&&a.push(d)}this._parts=a}},_getPathPartStr:function(a){return L.Polyline.prototype._getPathPartStr.call(this,a)+(L.Path.SVG?"z":"x")}});(function(){function a(a){return L.FeatureGroup.extend({initialize:function(c,d){this._layers={};for(var e=0,f=c.length;e<f;e++)this.addLayer(new a(c[e],d))},setStyle:function(a){for(var b in this._layers)this._layers.hasOwnProperty(b)&&this._layers[b].setStyle&&this._layers[b].setStyle(a)}})}L.MultiPolyline=a(L.Polyline);L.MultiPolygon=a(L.Polygon)})();L.Circle=L.Path.extend({initialize:function(a,b,c){L.Path.prototype.initialize.call(this,c);this._latlng=a;this._mRadius=b},options:{fill:!0},setLatLng:function(a){this._latlng=a;this._redraw();return this},setRadius:function(a){this._mRadius=a;this._redraw();return this},projectLatlngs:function(){var a=this._map.options.scale(this._map._zoom);this._point=this._map.latLngToLayerPoint(this._latlng);this._radius=this._mRadius/40075017*a},getPathString:function(){var a=this._point,b=this._radius;return L.Path.SVG?
+"M"+a.x+","+(a.y-b)+"A"+b+","+b+",0,1,1,"+(a.x-0.1)+","+(a.y-b)+" z":(a._round(),b=Math.round(b),"AL "+a.x+","+a.y+" "+b+","+b+" 0,23592600")}});L.CircleMarker=L.Circle.extend({options:{radius:10,weight:2},initialize:function(a,b){L.Circle.prototype.initialize.call(this,a,null,b);this._radius=this.options.radius},projectLatlngs:function(){this._point=this._map.latLngToLayerPoint(this._latlng)},setRadius:function(a){this._radius=a;this._redraw();return this}});L.GeoJSON=L.LayerGroup.extend({includes:L.Mixin.Events,initialize:function(a,b){L.Util.setOptions(this,b);this._geojson=a;this._layers={};a&&this.addGeoJSON(a)},addGeoJSON:function(a){if(a.features)for(var b=0,c=a.features.length;b<c;b++)this.addGeoJSON(a.features[b]);else b=a.type=="Feature"?a.geometry:a,c=L.GeoJSON.geometryToLayer(b,this.options.pointToLayer),this.fire("featureparse",{layer:c,properties:a.properties,geometryType:b.type,bbox:a.bbox,id:a.id}),this.addLayer(c)}});
+L.Util.extend(L.GeoJSON,{geometryToLayer:function(a,b){var c=a.coordinates,d,e,f,g=[];switch(a.type){case "Point":return d=this.coordsToLatLng(c),b?b(d):new L.Marker(d);case "MultiPoint":e=0;for(f=c.length;e<f;e++)d=this.coordsToLatLng(c[e]),d=b?b(d):new L.Marker(d),g.push(d);return new L.FeatureGroup(g);case "LineString":return c=this.coordsToLatLngs(c),new L.Polyline(c);case "Polygon":return c=this.coordsToLatLngs(c,1),new L.Polygon(c);case "MultiLineString":return c=this.coordsToLatLngs(c,1),new L.MultiPolyline(c);
+case "MultiPolygon":return c=this.coordsToLatLngs(c,2),new L.MultiPolygon(c);case "GeometryCollection":e=0;for(f=a.geometries.length;e<f;e++)d=this.geometryToLayer(a.geometries[e]),g.push(d);return new L.FeatureGroup(g);default:throw Error("Invalid GeoJSON object.");}},coordsToLatLng:function(a,b){var c=parseFloat(a[b?0:1]),d=parseFloat(a[b?1:0]);return new L.LatLng(c,d)},coordsToLatLngs:function(a,b,c){var d,e=[],f,g=a.length;for(f=0;f<g;f++)d=b?this.coordsToLatLngs(a[f],b-1,c):this.coordsToLatLng(a[f],
+c),e.push(d);return e}});L.Handler=L.Class.extend({initialize:function(a){this._map=a},enabled:function(){return!!this._enabled}});L.Handler.MapDrag=L.Handler.extend({enable:function(){if(!this._enabled){if(!this._draggable)this._draggable=new L.Draggable(this._map._mapPane,this._map._container),this._draggable.on("dragstart",this._onDragStart,this),this._draggable.on("drag",this._onDrag,this),this._draggable.on("dragend",this._onDragEnd,this);this._draggable.enable();this._enabled=!0}},disable:function(){if(this._enabled)this._draggable.disable(),this._enabled=!1},moved:function(){return this._draggable._moved},_onDragStart:function(){this._map.fire("movestart");
 this._map.fire("dragstart")},_onDrag:function(){this._map.fire("move");this._map.fire("drag")},_onDragEnd:function(){this._map.fire("moveend");this._map.fire("dragend")}});L.Handler.TouchZoom=L.Handler.extend({enable:function(){if(L.Browser.mobileWebkit&&!this._enabled)L.DomEvent.addListener(this._map._container,"touchstart",this._onTouchStart,this),this._enabled=!0},disable:function(){if(this._enabled)L.DomEvent.removeListener(this._map._container,"touchstart",this._onTouchStart,this),this._enabled=!1},_onTouchStart:function(a){if(a.touches&&!(a.touches.length!=2||this._map._animatingZoom)){var b=this._map.mouseEventToLayerPoint(a.touches[0]),c=this._map.mouseEventToLayerPoint(a.touches[1]),
 d=this._map.containerPointToLayerPoint(this._map.getSize().divideBy(2));this._startCenter=b.add(c).divideBy(2,!0);this._startDist=b.distanceTo(c);this._moved=!1;this._zooming=!0;this._centerOffset=d.subtract(this._startCenter);L.DomEvent.addListener(document,"touchmove",this._onTouchMove,this);L.DomEvent.addListener(document,"touchend",this._onTouchEnd,this);L.DomEvent.preventDefault(a)}},_onTouchMove:function(a){if(a.touches&&a.touches.length==2){if(!this._moved)this._map._mapPane.className+=" leaflet-zoom-anim",
 this._map._prepareTileBg(),this._moved=!0;var b=this._map.mouseEventToLayerPoint(a.touches[0]),c=this._map.mouseEventToLayerPoint(a.touches[1]);this._scale=b.distanceTo(c)/this._startDist;this._delta=b.add(c).divideBy(2,!0).subtract(this._startCenter);this._map._tileBg.style.webkitTransform=[L.DomUtil.getTranslateString(this._delta),L.DomUtil.getScaleString(this._scale,this._startCenter)].join(" ");L.DomEvent.preventDefault(a)}},_onTouchEnd:function(){if(this._moved&&this._zooming){this._zooming=
-!1;var a=this._map.getZoom(),b=Math.log(this._scale)/Math.LN2;b=this._map._limitZoom(a+(b>0?Math.ceil(b):Math.floor(b)));a=b-a;var c=this._centerOffset.subtract(this._delta).divideBy(this._scale),d=this._map.unproject(this._map.getPixelOrigin().add(this._startCenter).add(c));L.DomEvent.removeListener(document,"touchmove",this._onTouchMove);L.DomEvent.removeListener(document,"touchend",this._onTouchEnd);this._map._runAnimation(d,b,Math.pow(2,a)/this._scale,this._startCenter.add(c))}}});L.Handler.ScrollWheelZoom=L.Handler.extend({enable:function(){if(!this._enabled)L.DomEvent.addListener(this._map._container,"mousewheel",this._onWheelScroll,this),this._delta=0,this._enabled=!0},disable:function(){if(this._enabled)L.DomEvent.removeListener(this._map._container,"mousewheel",this._onWheelScroll),this._enabled=!1},_onWheelScroll:function(a){this._delta+=L.DomEvent.getWheelDelta(a);this._lastMousePos=this._map.mouseEventToContainerPoint(a);clearTimeout(this._timer);this._timer=setTimeout(L.Util.bind(this._performZoom,
-this),50);L.DomEvent.preventDefault(a)},_performZoom:function(){var a=Math.round(this._delta);this._delta=0;if(a){var b=this._getCenterForScrollWheelZoom(this._lastMousePos,a);a=this._map.getZoom()+a;this._map._limitZoom(a)!=this._map._zoom&&this._map.setView(b,a)}},_getCenterForScrollWheelZoom:function(a,b){var c=this._map.getPixelBounds().getCenter(),d=this._map.getSize().divideBy(2);d=a.subtract(d).multiplyBy(1-Math.pow(2,-b));return this._map.unproject(c.add(d))}});L.Handler.DoubleClickZoom=L.Handler.extend({enable:function(){if(!this._enabled)this._map.on("dblclick",this._onDoubleClick,this._map),this._enabled=!0},disable:function(){if(this._enabled)this._map.off("dblclick",this._onDoubleClick,this._map),this._enabled=!1},_onDoubleClick:function(a){this.setView(a.latlng,this._zoom+1)}});L.Handler.ShiftDragZoom=L.Handler.extend({initialize:function(a){this._map=a;this._container=a._container;this._pane=a._panes.overlayPane},enable:function(){if(!this._enabled)L.DomEvent.addListener(this._container,"mousedown",this._onMouseDown,this),this._enabled=!0},disable:function(){if(this._enabled)L.DomEvent.removeListener(this._container,"mousedown",this._onMouseDown),this._enabled=!1},_onMouseDown:function(a){if(!a.shiftKey||a.which!=1&&a.button!=1)return!1;L.DomUtil.disableTextSelection();
-this._startLayerPoint=this._map.mouseEventToLayerPoint(a);this._box=L.DomUtil.create("div","leaflet-zoom-box",this._pane);L.DomUtil.setPosition(this._box,this._startLayerPoint);this._container.style.cursor="crosshair";L.DomEvent.addListener(document,"mousemove",this._onMouseMove,this);L.DomEvent.addListener(document,"mouseup",this._onMouseUp,this);L.DomEvent.preventDefault(a)},_onMouseMove:function(a){var b=this._map.mouseEventToLayerPoint(a);a=b.x-this._startLayerPoint.x;var c=b.y-this._startLayerPoint.y;
+!1;var a=this._map.getZoom(),b=Math.log(this._scale)/Math.LN2,b=this._map._limitZoom(a+(b>0?Math.ceil(b):Math.floor(b))),a=b-a,c=this._centerOffset.subtract(this._delta).divideBy(this._scale),d=this._map.unproject(this._map.getPixelOrigin().add(this._startCenter).add(c));L.DomEvent.removeListener(document,"touchmove",this._onTouchMove);L.DomEvent.removeListener(document,"touchend",this._onTouchEnd);this._map._runAnimation(d,b,Math.pow(2,a)/this._scale,this._startCenter.add(c))}}});L.Handler.ScrollWheelZoom=L.Handler.extend({enable:function(){if(!this._enabled)L.DomEvent.addListener(this._map._container,"mousewheel",this._onWheelScroll,this),this._delta=0,this._enabled=!0},disable:function(){if(this._enabled)L.DomEvent.removeListener(this._map._container,"mousewheel",this._onWheelScroll),this._enabled=!1},_onWheelScroll:function(a){this._delta+=L.DomEvent.getWheelDelta(a);this._lastMousePos=this._map.mouseEventToContainerPoint(a);clearTimeout(this._timer);this._timer=setTimeout(L.Util.bind(this._performZoom,
+this),50);L.DomEvent.preventDefault(a)},_performZoom:function(){var a=Math.round(this._delta);this._delta=0;if(a){var b=this._getCenterForScrollWheelZoom(this._lastMousePos,a),a=this._map.getZoom()+a;this._map._limitZoom(a)!=this._map._zoom&&this._map.setView(b,a)}},_getCenterForScrollWheelZoom:function(a,b){var c=this._map.getPixelBounds().getCenter(),d=this._map.getSize().divideBy(2),d=a.subtract(d).multiplyBy(1-Math.pow(2,-b));return this._map.unproject(c.add(d),this._map._zoom,!0)}});L.Handler.DoubleClickZoom=L.Handler.extend({enable:function(){if(!this._enabled)this._map.on("dblclick",this._onDoubleClick,this._map),this._enabled=!0},disable:function(){if(this._enabled)this._map.off("dblclick",this._onDoubleClick,this._map),this._enabled=!1},_onDoubleClick:function(a){this.setView(a.latlng,this._zoom+1)}});L.Handler.ShiftDragZoom=L.Handler.extend({initialize:function(a){this._map=a;this._container=a._container;this._pane=a._panes.overlayPane},enable:function(){if(!this._enabled)L.DomEvent.addListener(this._container,"mousedown",this._onMouseDown,this),this._enabled=!0},disable:function(){if(this._enabled)L.DomEvent.removeListener(this._container,"mousedown",this._onMouseDown),this._enabled=!1},_onMouseDown:function(a){if(!a.shiftKey||a.which!=1&&a.button!=1)return!1;L.DomUtil.disableTextSelection();
+this._startLayerPoint=this._map.mouseEventToLayerPoint(a);this._box=L.DomUtil.create("div","leaflet-zoom-box",this._pane);L.DomUtil.setPosition(this._box,this._startLayerPoint);this._container.style.cursor="crosshair";L.DomEvent.addListener(document,"mousemove",this._onMouseMove,this);L.DomEvent.addListener(document,"mouseup",this._onMouseUp,this);L.DomEvent.preventDefault(a)},_onMouseMove:function(a){var b=this._map.mouseEventToLayerPoint(a),a=b.x-this._startLayerPoint.x,c=b.y-this._startLayerPoint.y,
 b=new L.Point(Math.min(b.x,this._startLayerPoint.x),Math.min(b.y,this._startLayerPoint.y));L.DomUtil.setPosition(this._box,b);this._box.style.width=Math.abs(a)-4+"px";this._box.style.height=Math.abs(c)-4+"px"},_onMouseUp:function(a){this._pane.removeChild(this._box);this._container.style.cursor="";L.DomUtil.enableTextSelection();L.DomEvent.removeListener(document,"mousemove",this._onMouseMove);L.DomEvent.removeListener(document,"mouseup",this._onMouseUp);a=this._map.mouseEventToLayerPoint(a);this._map.fitBounds(new L.LatLngBounds(this._map.layerPointToLatLng(this._startLayerPoint),
 this._map.layerPointToLatLng(a)))}});L.Handler.MarkerDrag=L.Handler.extend({initialize:function(a){this._marker=a},enable:function(){if(!this._enabled){if(!this._draggable)this._draggable=new L.Draggable(this._marker._icon,this._marker._icon),this._draggable.on("dragstart",this._onDragStart,this),this._draggable.on("drag",this._onDrag,this),this._draggable.on("dragend",this._onDragEnd,this);this._draggable.enable();this._enabled=!0}},disable:function(){if(this._enabled)this._draggable.disable(),this._enabled=!1},moved:function(){return this._draggable&&
 this._draggable._moved},_onDragStart:function(){this._marker.closePopup();this._marker.fire("movestart");this._marker.fire("dragstart")},_onDrag:function(){var a=L.DomUtil.getPosition(this._marker._icon);L.DomUtil.setPosition(this._marker._shadow,a);this._marker._latlng=this._marker._map.layerPointToLatLng(a);this._marker.fire("move");this._marker.fire("drag")},_onDragEnd:function(){this._marker.fire("moveend");this._marker.fire("dragend")}});L.Control={};L.Control.Position={TOP_LEFT:"topLeft",TOP_RIGHT:"topRight",BOTTOM_LEFT:"bottomLeft",BOTTOM_RIGHT:"bottomRight"};L.Control.Zoom=L.Class.extend({onAdd:function(a){this._map=a;this._container=L.DomUtil.create("div","leaflet-control-zoom");this._zoomInButton=this._createButton("Zoom in","leaflet-control-zoom-in",this._map.zoomIn,this._map);this._zoomOutButton=this._createButton("Zoom out","leaflet-control-zoom-out",this._map.zoomOut,this._map);this._container.appendChild(this._zoomInButton);this._container.appendChild(this._zoomOutButton)},getContainer:function(){return this._container},getPosition:function(){return L.Control.Position.TOP_LEFT},
 _createButton:function(a,b,c,d){var e=document.createElement("a");e.href="#";e.title=a;e.className=b;L.DomEvent.disableClickPropagation(e);L.DomEvent.addListener(e,"click",L.DomEvent.preventDefault);L.DomEvent.addListener(e,"click",c,d);return e}});L.Control.Attribution=L.Class.extend({onAdd:function(a){this._container=L.DomUtil.create("div","leaflet-control-attribution");this._map=a;this._prefix='Powered by <a href="http://leaflet.cloudmade.com">Leaflet</a>';this._attributions={};this._update()},getPosition:function(){return L.Control.Position.BOTTOM_RIGHT},getContainer:function(){return this._container},setPrefix:function(a){this._prefix=a},addAttribution:function(a){a&&(this._attributions[a]=!0,this._update())},removeAttribution:function(a){a&&
-(delete this._attributions[a],this._update())},_update:function(){if(this._map){var a=[],b;for(b in this._attributions)this._attributions.hasOwnProperty(b)&&a.push(b);this._container.innerHTML=[this._prefix,a.join(", ")].join(" — ")}}});L.Map=L.Class.extend({includes:L.Mixin.Events,options:{projection:L.Projection.Mercator,transformation:new L.Transformation(0.5/Math.PI,0.5,-0.5/Math.PI,0.5),scaling:function(a){return 256*(1<<a)},center:null,zoom:null,layers:[],dragging:!0,touchZoom:L.Browser.mobileWebkit&&!L.Browser.android,scrollWheelZoom:!L.Browser.mobileWebkit,doubleClickZoom:!0,shiftDragZoom:!0,zoomControl:!0,attributionControl:!0,fadeAnimation:L.DomUtil.TRANSITION&&!L.Browser.android,zoomAnimation:L.DomUtil.TRANSITION&&!L.Browser.android,
-trackResize:!0,closePopupOnClick:!0},initialize:function(a,b){L.Util.setOptions(this,b);this._container=L.DomUtil.get(a);this._initLayout();L.DomEvent&&(this._initEvents(),L.Handler&&this._initInteraction(),L.Control&&this._initControls());var c=this.options.center,d=this.options.zoom;c!==null&&d!==null&&this.setView(c,d,!0);c=this.options.layers;c=c instanceof Array?c:[c];this._tileLayersNum=0;this._initLayers(c)},setView:function(a,b){this._resetView(a,this._limitZoom(b));return this},setZoom:function(a){return this.setView(this.getCenter(),
+(delete this._attributions[a],this._update())},_update:function(){if(this._map){var a=[],b;for(b in this._attributions)this._attributions.hasOwnProperty(b)&&a.push(b);b=[];this._prefix&&b.push(this._prefix);a.length&&b.push(a.join(", "));this._container.innerHTML=b.join(" — ")}}});L.Map=L.Class.extend({includes:L.Mixin.Events,options:{crs:L.CRS.EPSG3857||L.CRS.EPSG4326,scale:function(a){return 256*(1<<a)},center:null,zoom:null,layers:[],dragging:!0,touchZoom:L.Browser.mobileWebkit&&!L.Browser.android,scrollWheelZoom:!L.Browser.mobileWebkit,doubleClickZoom:!0,shiftDragZoom:!0,zoomControl:!0,attributionControl:!0,fadeAnimation:L.DomUtil.TRANSITION&&!L.Browser.android,zoomAnimation:L.DomUtil.TRANSITION&&!L.Browser.android&&!L.Browser.mobileOpera,trackResize:!0,closePopupOnClick:!0},
+initialize:function(a,b){L.Util.setOptions(this,b);this._container=L.DomUtil.get(a);this._initLayout();L.DomEvent&&(this._initEvents(),L.Handler&&this._initInteraction(),L.Control&&this._initControls());var c=this.options.center,d=this.options.zoom;c!==null&&d!==null&&this.setView(c,d,!0);c=this.options.layers;c=c instanceof Array?c:[c];this._tileLayersNum=0;this._initLayers(c)},setView:function(a,b){this._resetView(a,this._limitZoom(b));return this},setZoom:function(a){return this.setView(this.getCenter(),
 a)},zoomIn:function(){return this.setZoom(this._zoom+1)},zoomOut:function(){return this.setZoom(this._zoom-1)},fitBounds:function(a){var b=this.getBoundsZoom(a);return this.setView(a.getCenter(),b)},fitWorld:function(){var a=new L.LatLng(-60,-170),b=new L.LatLng(85,179);return this.fitBounds(new L.LatLngBounds(a,b))},panTo:function(a){return this.setView(a,this._zoom)},panBy:function(a){this.fire("movestart");this._rawPanBy(a);this.fire("move");this.fire("moveend");return this},addLayer:function(a){var b=
 L.Util.stamp(a);if(this._layers[b])return this;this._layers[b]=a;if(a.options&&!isNaN(a.options.maxZoom))this._layersMaxZoom=Math.max(this._layersMaxZoom||0,a.options.maxZoom);if(a.options&&!isNaN(a.options.minZoom))this._layersMinZoom=Math.min(this._layersMinZoom||Infinity,a.options.minZoom);this.options.zoomAnimation&&L.TileLayer&&a instanceof L.TileLayer&&(this._tileLayersNum++,a.on("load",this._onTileLayerLoad,this));this.attributionControl&&a.getAttribution&&this.attributionControl.addAttribution(a.getAttribution());
-b=function(){a.onAdd(this);this.fire("layeradd",{layer:a})};if(this._loaded)b.call(this);else this.on("load",b,this);return this},removeLayer:function(a){var b=L.Util.stamp(a);this._layers[b]&&(a.onRemove(this),delete this._layers[b],this.options.zoomAnimation&&L.TileLayer&&a instanceof L.TileLayer&&this._tileLayersNum--,this.attributionControl&&a.getAttribution&&this.attributionControl.removeAttribution(a.getAttribution()),this.fire("layerremove",{layer:a}));return this},invalidateSize:function(){this._sizeChanged=
-!0;this.fire("move");clearTimeout(this._sizeTimer);this._sizeTimer=setTimeout(L.Util.bind(function(){this.fire("moveend")},this),200);return this},getCenter:function(a){var b=this.getSize().divideBy(2);return this.unproject(this._getTopLeftPoint().add(b),this._zoom,a)},getZoom:function(){return this._zoom},getBounds:function(){var a=this.getPixelBounds(),b=this.unproject(new L.Point(a.min.x,a.max.y));a=this.unproject(new L.Point(a.max.x,a.min.y));return new L.LatLngBounds(b,a)},getMinZoom:function(){return isNaN(this.options.minZoom)?
-this._layersMinZoom||0:this.options.minZoom},getMaxZoom:function(){return isNaN(this.options.maxZoom)?this._layersMaxZoom||Infinity:this.options.maxZoom},getBoundsZoom:function(a){var b=this.getSize(),c=this.getMinZoom(),d=this.getMaxZoom(),e=a.getNorthEast();a=a.getSouthWest();var f,g;do c++,f=this.project(e,c),g=this.project(a,c),f=new L.Point(f.x-g.x,g.y-f.y);while(f.x<=b.x&&f.y<=b.y&&c<=d);return c-1},getSize:function(){if(!this._size||this._sizeChanged)this._size=new L.Point(this._container.clientWidth,
-this._container.clientHeight),this._sizeChanged=!1;return this._size},getPixelBounds:function(){var a=this._getTopLeftPoint(),b=this.getSize();return new L.Bounds(a,a.add(b))},getPixelOrigin:function(){return this._initialTopLeftPoint},getPanes:function(){return this._panes},mouseEventToContainerPoint:function(a){return L.DomEvent.getMousePosition(a,this._container)},mouseEventToLayerPoint:function(a){return this.containerPointToLayerPoint(this.mouseEventToContainerPoint(a))},mouseEventToLatLng:function(a){return this.layerPointToLatLng(this.mouseEventToLayerPoint(a))},
-containerPointToLayerPoint:function(a){return a.subtract(L.DomUtil.getPosition(this._mapPane))},layerPointToContainerPoint:function(a){return a.add(L.DomUtil.getPosition(this._mapPane))},layerPointToLatLng:function(a){return this.unproject(a.add(this._initialTopLeftPoint))},latLngToLayerPoint:function(a){return this.project(a)._subtract(this._initialTopLeftPoint)},project:function(a,b){var c=this.options,d=c.projection.project(a),e=c.scaling(isNaN(b)?this._zoom:b);return c.transformation._transform(d,
-e)},unproject:function(a,b,c){b=this.options.scaling(isNaN(b)?this._zoom:b);return this.options.projection.unproject(this.options.transformation.untransform(a,b),c)},_initLayout:function(){var a=this._container;a.className+=" leaflet-container";this.options.fadeAnimation&&(a.className+=" leaflet-fade-anim");var b=L.DomUtil.getStyle(a,"position");a.style.position=b=="absolute"?"absolute":"relative";this._initPanes();this._initControlPos&&this._initControlPos()},_initPanes:function(){var a=this._panes=
-{};this._mapPane=a.mapPane=this._createPane("leaflet-map-pane",this._container);this._tilePane=a.tilePane=this._createPane("leaflet-tile-pane",this._mapPane);this._objectsPane=a.objectsPane=this._createPane("leaflet-objects-pane",this._mapPane);a.shadowPane=this._createPane("leaflet-shadow-pane");a.overlayPane=this._createPane("leaflet-overlay-pane");a.markerPane=this._createPane("leaflet-marker-pane");a.popupPane=this._createPane("leaflet-popup-pane")},_createPane:function(a,b){return L.DomUtil.create("div",
-a,b||this._objectsPane)},_resetView:function(a,b,c){var d=this._zoom!=b;this.fire("movestart");this._zoom=b;this._initialTopLeftPoint=this._getNewTopLeftPoint(a);c?this._initialTopLeftPoint._add(L.DomUtil.getPosition(this._mapPane)):L.DomUtil.setPosition(this._mapPane,new L.Point(0,0));this.fire("viewreset");this.fire("move");d&&this.fire("zoomend");this.fire("moveend");if(!this._loaded)this._loaded=!0,this.fire("load")},_initLayers:function(a){this._layers={};for(var b=0,c=a.length;b<c;b++)this.addLayer(a[b])},
-_initControls:function(){this.options.zoomControl&&this.addControl(new L.Control.Zoom);if(this.options.attributionControl)this.attributionControl=new L.Control.Attribution,this.addControl(this.attributionControl)},_rawPanBy:function(a){var b=L.DomUtil.getPosition(this._mapPane);L.DomUtil.setPosition(this._mapPane,b.subtract(a))},_initEvents:function(){L.DomEvent.addListener(this._container,"click",this._onMouseClick,this);L.DomEvent.addListener(this._container,"dblclick",this._fireMouseEvent,this);
-L.DomEvent.addListener(this._container,"mousedown",this._fireMouseEvent,this);this.options.trackResize&&L.DomEvent.addListener(window,"resize",this.invalidateSize,this)},_onMouseClick:function(a){(!this.dragging||!this.dragging.moved())&&this._fireMouseEvent(a)},_fireMouseEvent:function(a){this.fire("pre"+a.type);this.hasEventListeners(a.type)&&this.fire(a.type,{latlng:this.mouseEventToLatLng(a),layerPoint:this.mouseEventToLayerPoint(a)})},_initInteraction:function(){var a={dragging:L.Handler.MapDrag,
-touchZoom:L.Handler.TouchZoom,doubleClickZoom:L.Handler.DoubleClickZoom,scrollWheelZoom:L.Handler.ScrollWheelZoom,shiftDragZoom:L.Handler.ShiftDragZoom},b;for(b in a)a.hasOwnProperty(b)&&a[b]&&(this[b]=new a[b](this),this.options[b]&&this[b].enable())},_onTileLayerLoad:function(){this._tileLayersToLoad--;if(this._tileLayersNum&&!this._tileLayersToLoad&&this._tileBg)clearTimeout(this._clearTileBgTimer),this._clearTileBgTimer=setTimeout(L.Util.bind(this._clearTileBg,this),500)},_getTopLeftPoint:function(){if(!this._loaded)throw Error("Set map center and zoom first.");
-return this._initialTopLeftPoint.subtract(L.DomUtil.getPosition(this._mapPane))},_getNewTopLeftPoint:function(a){var b=this.getSize().divideBy(2);return this.project(a).subtract(b).round()},_limitZoom:function(a){var b=this.getMinZoom(),c=this.getMaxZoom();return Math.max(b,Math.min(c,a))}});L.Map.include({locate:function(){navigator.geolocation?navigator.geolocation.getCurrentPosition(L.Util.bind(this._handleGeolocationResponse,this),L.Util.bind(this._handleGeolocationError,this)):this.fire("locationerror",{message:"Geolocation not supported."});return this},locateAndSetView:function(){this._setViewOnLocate=!0;return this.locate()},_handleGeolocationError:function(a){this.fire("locationerror",{message:a.message})},_handleGeolocationResponse:function(a){var b=180*a.coords.accuracy/4E7,
-c=b*2,d=a.coords.latitude,e=a.coords.longitude,f=new L.LatLng(d-b,e-c);b=new L.LatLng(d+b,e+c);f=new L.LatLngBounds(f,b);if(this._setViewOnLocate)this.fitBounds(f),this._setViewOnLocate=!1;this.fire("locationfound",{latlng:new L.LatLng(d,e),bounds:f,accuracy:a.coords.accuracy})}});L.Map.include({openPopup:function(a){this.closePopup();this._popup=a;return this.addLayer(a)},closePopup:function(){this._popup&&this.removeLayer(this._popup);return this}});L.Map.include(!L.Transition||!L.Transition.implemented()?{}:{setView:function(a,b,c){b=this._limitZoom(b);var d=this._zoom!=b;if(this._loaded&&!c&&this._layers&&(c=this._getNewTopLeftPoint(a).subtract(this._getTopLeftPoint()),d?this._zoomToIfCenterInView&&this._zoomToIfCenterInView(a,b,c):this._panByIfClose(c)))return this;this._resetView(a,b);return this},panBy:function(a){if(!this._panTransition)this._panTransition=new L.Transition(this._mapPane,{duration:0.3}),this._panTransition.on("step",this._onPanTransitionStep,
-this),this._panTransition.on("end",this._onPanTransitionEnd,this);this.fire(this,"movestart");this._panTransition.run({position:L.DomUtil.getPosition(this._mapPane).subtract(a)});return this},_onPanTransitionStep:function(){this.fire("move")},_onPanTransitionEnd:function(){this.fire("moveend")},_panByIfClose:function(a){if(this._offsetIsWithinView(a))return this.panBy(a),!0;return!1},_offsetIsWithinView:function(a,b){var c=b||1,d=this.getSize();return Math.abs(a.x)<=d.x*c&&Math.abs(a.y)<=d.y*c}});L.Map.include(!L.DomUtil.TRANSITION?{}:{_zoomToIfCenterInView:function(a,b,c){if(this._animatingZoom)return!0;if(!this.options.zoomAnimation)return!1;var d=Math.pow(2,b-this._zoom);c=c.divideBy(1-1/d);if(!this._offsetIsWithinView(c,1))return!1;this._mapPane.className+=" leaflet-zoom-anim";c=this.containerPointToLayerPoint(this.getSize().divideBy(2)).add(c);this._prepareTileBg();this._runAnimation(a,b,d,c);return!0},_runAnimation:function(a,b,c,d){this._animatingZoom=!0;this._animateToCenter=a;this._animateToZoom=
-b;a=L.DomUtil.TRANSFORM;L.Browser.gecko&&(this._tileBg.style[a]+=" translate(0,0)");L.Browser.android?(this._tileBg.style[a+"Origin"]=d.x+"px "+d.y+"px",c="scale("+c+")"):c=L.DomUtil.getScaleString(c,d);L.Util.falseFn(this._tileBg.offsetWidth);d={};d[a]=this._tileBg.style[a]+" "+c;this._tileBg.transition.run(d)},_prepareTileBg:function(){if(!this._tileBg)this._tileBg=this._createPane("leaflet-tile-pane",this._mapPane),this._tileBg.style.zIndex=1;var a=this._tilePane,b=this._tileBg;b.innerHTML="";
-b.style[L.DomUtil.TRANSFORM]="";b.style.display="none";b.empty=!0;a.empty=!1;this._tilePane=this._panes.tilePane=b;this._tileBg=a;if(!this._tileBg.transition)this._tileBg.transition=new L.Transition(this._tileBg,{duration:0.3,easing:"cubic-bezier(0.25,0.1,0.25,0.75)"}),this._tileBg.transition.on("end",this._onZoomTransitionEnd,this);this._removeExcessiveBgTiles()},_removeExcessiveBgTiles:function(){for(var a=[].slice.call(this._tileBg.getElementsByTagName("img")),b=this._container.getBoundingClientRect(),
-c=0,d=a.length;c<d;c++){var e=a[c].getBoundingClientRect();if(!a[c].complete||e.right<=b.left||e.left>=b.right||e.top>=b.bottom||e.bottom<=b.top)a[c].src="",a[c].parentNode.removeChild(a[c])}},_onZoomTransitionEnd:function(){this._restoreTileFront();L.Util.falseFn(this._tileBg.offsetWidth);this._resetView(this._animateToCenter,this._animateToZoom,!0);this._mapPane.className=this._mapPane.className.replace(" leaflet-zoom-anim","");this._animatingZoom=!1},_restoreTileFront:function(){this._tilePane.style.display=
-"";this._tilePane.style.zIndex=2;this._tileBg.style.zIndex=1},_clearTileBg:function(){if(!this._animatingZoom)this._tileBg.innerHTML=""}});L.Map.include({addControl:function(a){a.onAdd(this);var b=a.getPosition(),c=this._controlCorners[b];a=a.getContainer();L.DomUtil.addClass(a,"leaflet-control");b.indexOf("bottom")!=-1?c.insertBefore(a,c.firstChild):c.appendChild(a);return this},removeControl:function(){var a=this._controlCorners[control.getPosition()],b=control.getContainer();a.removeChild(b);if(control.onRemove)control.onRemove(this);return this},_initControlPos:function(){var a=this._controlCorners={},b=L.DomUtil.create("div","leaflet-control-container",
+b=function(){a.onAdd(this);this.fire("layeradd",{layer:a})};if(this._loaded)b.call(this);else this.on("load",b,this);return this},removeLayer:function(a){var b=L.Util.stamp(a);this._layers[b]&&(a.onRemove(this),delete this._layers[b],this.options.zoomAnimation&&L.TileLayer&&a instanceof L.TileLayer&&(this._tileLayersNum--,a.off("load",this._onTileLayerLoad,this)),this.attributionControl&&a.getAttribution&&this.attributionControl.removeAttribution(a.getAttribution()),this.fire("layerremove",{layer:a}));
+return this},invalidateSize:function(){this._sizeChanged=!0;this.fire("move");clearTimeout(this._sizeTimer);this._sizeTimer=setTimeout(L.Util.bind(function(){this.fire("moveend")},this),200);return this},getCenter:function(a){var b=this.getSize().divideBy(2);return this.unproject(this._getTopLeftPoint().add(b),this._zoom,a)},getZoom:function(){return this._zoom},getBounds:function(){var a=this.getPixelBounds(),b=this.unproject(new L.Point(a.min.x,a.max.y)),a=this.unproject(new L.Point(a.max.x,a.min.y));
+return new L.LatLngBounds(b,a)},getMinZoom:function(){return isNaN(this.options.minZoom)?this._layersMinZoom||0:this.options.minZoom},getMaxZoom:function(){return isNaN(this.options.maxZoom)?this._layersMaxZoom||Infinity:this.options.maxZoom},getBoundsZoom:function(a){var b=this.getSize(),c=this.getMinZoom(),d=this.getMaxZoom(),e=a.getNorthEast(),a=a.getSouthWest(),f,g;do c++,f=this.project(e,c),g=this.project(a,c),f=new L.Point(f.x-g.x,g.y-f.y);while(f.x<=b.x&&f.y<=b.y&&c<=d);return c-1},getSize:function(){if(!this._size||
+this._sizeChanged)this._size=new L.Point(this._container.clientWidth,this._container.clientHeight),this._sizeChanged=!1;return this._size},getPixelBounds:function(){var a=this._getTopLeftPoint(),b=this.getSize();return new L.Bounds(a,a.add(b))},getPixelOrigin:function(){return this._initialTopLeftPoint},getPanes:function(){return this._panes},mouseEventToContainerPoint:function(a){return L.DomEvent.getMousePosition(a,this._container)},mouseEventToLayerPoint:function(a){return this.containerPointToLayerPoint(this.mouseEventToContainerPoint(a))},
+mouseEventToLatLng:function(a){return this.layerPointToLatLng(this.mouseEventToLayerPoint(a))},containerPointToLayerPoint:function(a){return a.subtract(L.DomUtil.getPosition(this._mapPane))},layerPointToContainerPoint:function(a){return a.add(L.DomUtil.getPosition(this._mapPane))},layerPointToLatLng:function(a){return this.unproject(a.add(this._initialTopLeftPoint))},latLngToLayerPoint:function(a){return this.project(a)._subtract(this._initialTopLeftPoint)},project:function(a,b){b=typeof b=="undefined"?
+this._zoom:b;return this.options.crs.latLngToPoint(a,this.options.scale(b))},unproject:function(a,b,c){b=typeof b=="undefined"?this._zoom:b;return this.options.crs.pointToLatLng(a,this.options.scale(b),c)},_initLayout:function(){var a=this._container;a.className+=" leaflet-container";this.options.fadeAnimation&&(a.className+=" leaflet-fade-anim");var b=L.DomUtil.getStyle(a,"position");if(b!="absolute"&&b!="relative")a.style.position="relative";this._initPanes();this._initControlPos&&this._initControlPos()},
+_initPanes:function(){var a=this._panes={};this._mapPane=a.mapPane=this._createPane("leaflet-map-pane",this._container);this._tilePane=a.tilePane=this._createPane("leaflet-tile-pane",this._mapPane);this._objectsPane=a.objectsPane=this._createPane("leaflet-objects-pane",this._mapPane);a.shadowPane=this._createPane("leaflet-shadow-pane");a.overlayPane=this._createPane("leaflet-overlay-pane");a.markerPane=this._createPane("leaflet-marker-pane");a.popupPane=this._createPane("leaflet-popup-pane")},_createPane:function(a,
+b){return L.DomUtil.create("div",a,b||this._objectsPane)},_resetView:function(a,b,c){var d=this._zoom!=b;this.fire("movestart");this._zoom=b;this._initialTopLeftPoint=this._getNewTopLeftPoint(a);c?this._initialTopLeftPoint._add(L.DomUtil.getPosition(this._mapPane)):L.DomUtil.setPosition(this._mapPane,new L.Point(0,0));this._tileLayersToLoad=this._tileLayersNum;this.fire("viewreset");this.fire("move");d&&this.fire("zoomend");this.fire("moveend");if(!this._loaded)this._loaded=!0,this.fire("load")},
+_initLayers:function(a){this._layers={};for(var b=0,c=a.length;b<c;b++)this.addLayer(a[b])},_initControls:function(){this.options.zoomControl&&this.addControl(new L.Control.Zoom);if(this.options.attributionControl)this.attributionControl=new L.Control.Attribution,this.addControl(this.attributionControl)},_rawPanBy:function(a){var b=L.DomUtil.getPosition(this._mapPane);L.DomUtil.setPosition(this._mapPane,b.subtract(a))},_initEvents:function(){L.DomEvent.addListener(this._container,"click",this._onMouseClick,
+this);for(var a=["dblclick","mousedown","mouseenter","mouseleave","mousemove"],b=0;b<a.length;b++)L.DomEvent.addListener(this._container,a[b],this._fireMouseEvent,this);this.options.trackResize&&L.DomEvent.addListener(window,"resize",this.invalidateSize,this)},_onMouseClick:function(a){if(!this.dragging||!this.dragging.moved())this.fire("pre"+a.type),this._fireMouseEvent(a)},_fireMouseEvent:function(a){var b=a.type,b=b=="mouseenter"?"mouseover":b=="mouseleave"?"mouseout":b;this.hasEventListeners(b)&&
+this.fire(b,{latlng:this.mouseEventToLatLng(a),layerPoint:this.mouseEventToLayerPoint(a)})},_initInteraction:function(){var a={dragging:L.Handler.MapDrag,touchZoom:L.Handler.TouchZoom,doubleClickZoom:L.Handler.DoubleClickZoom,scrollWheelZoom:L.Handler.ScrollWheelZoom,shiftDragZoom:L.Handler.ShiftDragZoom},b;for(b in a)a.hasOwnProperty(b)&&a[b]&&(this[b]=new a[b](this),this.options[b]&&this[b].enable())},_onTileLayerLoad:function(){this._tileLayersToLoad--;if(this._tileLayersNum&&!this._tileLayersToLoad&&
+this._tileBg)clearTimeout(this._clearTileBgTimer),this._clearTileBgTimer=setTimeout(L.Util.bind(this._clearTileBg,this),500)},_getTopLeftPoint:function(){if(!this._loaded)throw Error("Set map center and zoom first.");return this._initialTopLeftPoint.subtract(L.DomUtil.getPosition(this._mapPane))},_getNewTopLeftPoint:function(a){var b=this.getSize().divideBy(2);return this.project(a).subtract(b).round()},_limitZoom:function(a){var b=this.getMinZoom(),c=this.getMaxZoom();return Math.max(b,Math.min(c,
+a))}});L.Map.include({locate:function(a){var b={timeout:1E4};L.Util.extend(b,a);navigator.geolocation?navigator.geolocation.getCurrentPosition(L.Util.bind(this._handleGeolocationResponse,this),L.Util.bind(this._handleGeolocationError,this),b):this.fire("locationerror",{code:0,message:"Geolocation not supported."});return this},locateAndSetView:function(a,b){this._setViewOnLocate=!0;this._maxLocateZoom=a||Infinity;return this.locate(b)},_handleGeolocationError:function(a){var a=a.code,b=a==1?"permission denied":
+a==2?"position unavailable":"timeout";if(this._setViewOnLocate)this.fitWorld(),this._setViewOnLocate=!1;this.fire("locationerror",{code:a,message:"Geolocation error: "+b+"."})},_handleGeolocationResponse:function(a){var b=180*a.coords.accuracy/4E7,c=b*2,d=a.coords.latitude,e=a.coords.longitude,f=new L.LatLng(d-b,e-c),b=new L.LatLng(d+b,e+c),f=new L.LatLngBounds(f,b);if(this._setViewOnLocate)b=Math.min(this.getBoundsZoom(f),this._maxLocateZoom),this.setView(f.getCenter(),b),this._setViewOnLocate=!1;
+this.fire("locationfound",{latlng:new L.LatLng(d,e),bounds:f,accuracy:a.coords.accuracy})}});L.Map.include({openPopup:function(a){this.closePopup();this._popup=a;return this.addLayer(a)},closePopup:function(){this._popup&&this.removeLayer(this._popup);return this}});L.Map.include(!L.Transition||!L.Transition.implemented()?{}:{setView:function(a,b,c){var b=this._limitZoom(b),d=this._zoom!=b;if(this._loaded&&!c&&this._layers&&(c=this._getNewTopLeftPoint(a).subtract(this._getTopLeftPoint()),d?this._zoomToIfCenterInView&&this._zoomToIfCenterInView(a,b,c):this._panByIfClose(c)))return this;this._resetView(a,b);return this},panBy:function(a){if(!this._panTransition)this._panTransition=new L.Transition(this._mapPane,{duration:0.3}),this._panTransition.on("step",this._onPanTransitionStep,
+this),this._panTransition.on("end",this._onPanTransitionEnd,this);this.fire(this,"movestart");this._panTransition.run({position:L.DomUtil.getPosition(this._mapPane).subtract(a)});return this},_onPanTransitionStep:function(){this.fire("move")},_onPanTransitionEnd:function(){this.fire("moveend")},_panByIfClose:function(a){if(this._offsetIsWithinView(a))return this.panBy(a),!0;return!1},_offsetIsWithinView:function(a,b){var c=b||1,d=this.getSize();return Math.abs(a.x)<=d.x*c&&Math.abs(a.y)<=d.y*c}});L.Map.include(!L.DomUtil.TRANSITION?{}:{_zoomToIfCenterInView:function(a,b,c){if(this._animatingZoom)return!0;if(!this.options.zoomAnimation)return!1;var d=Math.pow(2,b-this._zoom),c=c.divideBy(1-1/d);if(!this._offsetIsWithinView(c,1))return!1;this._mapPane.className+=" leaflet-zoom-anim";c=this.containerPointToLayerPoint(this.getSize().divideBy(2)).add(c);this._prepareTileBg();this._runAnimation(a,b,d,c);return!0},_runAnimation:function(a,b,c,d){this._animatingZoom=!0;this._animateToCenter=a;this._animateToZoom=
+b;a=L.DomUtil.TRANSFORM;if(L.Browser.gecko||window.opera)this._tileBg.style[a]+=" translate(0,0)";L.Browser.android?(this._tileBg.style[a+"Origin"]=d.x+"px "+d.y+"px",c="scale("+c+")"):c=L.DomUtil.getScaleString(c,d);L.Util.falseFn(this._tileBg.offsetWidth);d={};d[a]=this._tileBg.style[a]+" "+c;this._tileBg.transition.run(d)},_prepareTileBg:function(){if(!this._tileBg)this._tileBg=this._createPane("leaflet-tile-pane",this._mapPane),this._tileBg.style.zIndex=1;var a=this._tilePane,b=this._tileBg;b.style[L.DomUtil.TRANSFORM]=
+"";b.style.visibility="hidden";b.empty=!0;a.empty=!1;this._tilePane=this._panes.tilePane=b;this._tileBg=a;if(!this._tileBg.transition)this._tileBg.transition=new L.Transition(this._tileBg,{duration:0.3,easing:"cubic-bezier(0.25,0.1,0.25,0.75)"}),this._tileBg.transition.on("end",this._onZoomTransitionEnd,this);this._stopLoadingBgTiles()},_stopLoadingBgTiles:function(){for(var a=[].slice.call(this._tileBg.getElementsByTagName("img")),b=0,c=a.length;b<c;b++)if(!a[b].complete)a[b].src="",a[b].parentNode.removeChild(a[b])},
+_onZoomTransitionEnd:function(){this._restoreTileFront();L.Util.falseFn(this._tileBg.offsetWidth);this._resetView(this._animateToCenter,this._animateToZoom,!0);this._mapPane.className=this._mapPane.className.replace(" leaflet-zoom-anim","");this._animatingZoom=!1},_restoreTileFront:function(){this._tilePane.innerHTML="";this._tilePane.style.visibility="";this._tilePane.style.zIndex=2;this._tileBg.style.zIndex=1},_clearTileBg:function(){if(!this._animatingZoom&&!this.touchZoom._zooming)this._tileBg.innerHTML=
+""}});L.Map.include({addControl:function(a){a.onAdd(this);var b=a.getPosition(),c=this._controlCorners[b],a=a.getContainer();L.DomUtil.addClass(a,"leaflet-control");b.indexOf("bottom")!=-1?c.insertBefore(a,c.firstChild):c.appendChild(a);return this},removeControl:function(a){var b=this._controlCorners[a.getPosition()],c=a.getContainer();b.removeChild(c);if(a.onRemove)a.onRemove(this);return this},_initControlPos:function(){var a=this._controlCorners={},b=L.DomUtil.create("div","leaflet-control-container",
 this._container);L.Browser.mobileWebkit&&(b.className+=" leaflet-big-buttons");a.topLeft=L.DomUtil.create("div","leaflet-top leaflet-left",b);a.topRight=L.DomUtil.create("div","leaflet-top leaflet-right",b);a.bottomLeft=L.DomUtil.create("div","leaflet-bottom leaflet-left",b);a.bottomRight=L.DomUtil.create("div","leaflet-bottom leaflet-right",b)}});


--- a/yt/gui/reason/http_repl.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/gui/reason/http_repl.py	Fri Sep 09 10:33:52 2011 -0400
@@ -4,7 +4,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: Columbia University
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/gui/reason/pannable_map.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/gui/reason/pannable_map.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: Columbia University
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/gui/traited_explorer/plot_editors.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/gui/traited_explorer/plot_editors.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2009 Matthew Turk.  All Rights Reserved.
 


--- a/yt/gui/traited_explorer/traited_explorer.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/gui/traited_explorer/traited_explorer.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2009 Matthew Turk.  All Rights Reserved.
 


--- a/yt/gui/traited_explorer/tvtk_interface.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/gui/traited_explorer/tvtk_interface.py	Fri Sep 09 10:33:52 2011 -0400
@@ -1,11 +1,11 @@
 """
 This is the preliminary interface to VTK.  Note that as of VTK 5.2, it still
 requires a patchset prepared here:
-http://yt.enzotools.org/files/vtk_composite_data.zip
+http://yt-project.org/files/vtk_composite_data.zip
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2007-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/mods.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/mods.py	Fri Sep 09 10:33:52 2011 -0400
@@ -4,7 +4,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2008-2011 Matthew Turk.  All Rights Reserved.
 
@@ -40,7 +40,6 @@
 from yt.utilities.performance_counters import yt_counters, time_function
 from yt.config import ytcfg
 import yt.utilities.physical_constants as physical_constants
-from yt.utilities.cookbook import Intent
 
 from yt.data_objects.api import \
     BinnedProfile1D, BinnedProfile2D, BinnedProfile3D, \


--- a/yt/utilities/_amr_utils/CICDeposit.pyx	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/utilities/_amr_utils/CICDeposit.pyx	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Britton Smith <brittonsmith at gmail.com>
 Affiliation: CASA/University of Colorado
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2008 Matthew Turk.  All Rights Reserved.
 


--- a/yt/utilities/_amr_utils/ContourFinding.pyx	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/utilities/_amr_utils/ContourFinding.pyx	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/utilities/_amr_utils/DepthFirstOctree.pyx	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/utilities/_amr_utils/DepthFirstOctree.pyx	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2008 Matthew Turk.  All Rights Reserved.
 


--- a/yt/utilities/_amr_utils/Interpolators.pyx	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/utilities/_amr_utils/Interpolators.pyx	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2008 Matthew Turk.  All Rights Reserved.
 
@@ -27,6 +27,8 @@
 cimport numpy as np
 cimport cython
 
+ at cython.cdivision(True)
+ at cython.wraparound(False)
 @cython.boundscheck(False)
 def UnilinearlyInterpolate(np.ndarray[np.float64_t, ndim=1] table,
                            np.ndarray[np.float64_t, ndim=1] x_vals,
@@ -44,6 +46,8 @@
         output[i]  = table[x_i  ] * (xm) \
                    + table[x_i+1] * (xp)
 
+ at cython.cdivision(True)
+ at cython.wraparound(False)
 @cython.boundscheck(False)
 def BilinearlyInterpolate(np.ndarray[np.float64_t, ndim=2] table,
                           np.ndarray[np.float64_t, ndim=1] x_vals,
@@ -73,6 +77,8 @@
                    + table[x_i  , y_i+1] * (xm*yp) \
                    + table[x_i+1, y_i+1] * (xp*yp)
 
+ at cython.cdivision(True)
+ at cython.wraparound(False)
 @cython.boundscheck(False)
 def TrilinearlyInterpolate(np.ndarray[np.float64_t, ndim=3] table,
                            np.ndarray[np.float64_t, ndim=1] x_vals,
@@ -114,3 +120,54 @@
                    + table[x_i  ,y_i+1,z_i+1] * (xm*yp*zp) \
                    + table[x_i+1,y_i+1,z_i  ] * (xp*yp*zm) \
                    + table[x_i+1,y_i+1,z_i+1] * (xp*yp*zp)
+
+ at cython.cdivision(True)
+ at cython.wraparound(False)
+ at cython.boundscheck(False)
+def ghost_zone_interpolate(int rf,
+                           np.ndarray[np.float64_t, ndim=3] input_field,
+                           np.ndarray[np.float64_t, ndim=1] input_left,
+                           np.ndarray[np.float64_t, ndim=3] output_field,
+                           np.ndarray[np.float64_t, ndim=1] output_left):
+    cdef int oi, oj, ok
+    cdef int ii, ij, ik
+    cdef np.float64_t xp, xm, yp, ym, zp, zm
+    cdef np.float64_t ods[3], ids[3], iids[3]
+    cdef np.float64_t opos[3], ropos[3], temp
+    cdef int i, j
+    for i in range(3):
+        temp = input_left[i] + (rf * (input_field.shape[i] - 1))
+        ids[i] = (temp - input_left[i])/(input_field.shape[i]-1)
+        temp = output_left[i] + output_field.shape[i] - 1
+        ods[i] = (temp - output_left[i])/(output_field.shape[i]-1)
+        iids[i] = 1.0/ids[i]
+    opos[0] = output_left[0]
+    for oi in range(output_field.shape[0]):
+        ropos[0] = ((opos[0] - input_left[0]) * iids[0])
+        ii = iclip(<int> ropos[0], 0, input_field.shape[0] - 2)
+        xp = ropos[0] - ii
+        xm = 1.0 - xp
+        opos[1] = output_left[1]
+        for oj in range(output_field.shape[1]):
+            ropos[1] = ((opos[1] - input_left[1]) * iids[1])
+            ij = iclip(<int> ropos[1], 0, input_field.shape[1] - 2)
+            yp = ropos[1] - ij
+            ym = 1.0 - yp
+            opos[2] = output_left[2]
+            for ok in range(output_field.shape[2]):
+                ropos[2] = ((opos[2] - input_left[2]) * iids[2])
+                ik = iclip(<int> ropos[2], 0, input_field.shape[2] - 2)
+                zp = ropos[2] - ik
+                zm = 1.0 - zp
+                output_field[oi,oj,ok] = \
+                     input_field[ii  ,ij  ,ik  ] * (xm*ym*zm) \
+                   + input_field[ii+1,ij  ,ik  ] * (xp*ym*zm) \
+                   + input_field[ii  ,ij+1,ik  ] * (xm*yp*zm) \
+                   + input_field[ii  ,ij  ,ik+1] * (xm*ym*zp) \
+                   + input_field[ii+1,ij  ,ik+1] * (xp*ym*zp) \
+                   + input_field[ii  ,ij+1,ik+1] * (xm*yp*zp) \
+                   + input_field[ii+1,ij+1,ik  ] * (xp*yp*zm) \
+                   + input_field[ii+1,ij+1,ik+1] * (xp*yp*zp)
+                opos[2] += ods[2]
+            opos[1] += ods[1]
+        opos[0] += ods[0]


--- a/yt/utilities/_amr_utils/Octree.pyx	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/utilities/_amr_utils/Octree.pyx	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: UCSD
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/utilities/_amr_utils/PointsInVolume.pyx	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/utilities/_amr_utils/PointsInVolume.pyx	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: John Wise <jwise77 at gmail.com>
 Affiliation: Princeton
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2009 John.  All Rights Reserved.
 


--- a/yt/utilities/_amr_utils/QuadTree.pyx	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/utilities/_amr_utils/QuadTree.pyx	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: UCSD
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/utilities/_amr_utils/RayIntegrators.pyx	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/utilities/_amr_utils/RayIntegrators.pyx	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2008 Matthew Turk.  All Rights Reserved.
 
@@ -198,6 +198,7 @@
         # If we've reached t = 1, we are done.
         grid_mask[cur_ind[0], cur_ind[1], cur_ind[2]] = 1
         if (tmax[0] > 1.0) and (tmax[1] > 1.0) and (tmax[2] > 1.0):
+            grid_t[cur_ind[0], cur_ind[1], cur_ind[2]] = 1.0
             grid_dt[cur_ind[0], cur_ind[1], cur_ind[2]] = 1.0 - enter_t
             break
         if tmax[0] < tmax[1]:


--- a/yt/utilities/_amr_utils/VolumeIntegrator.pyx	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/utilities/_amr_utils/VolumeIntegrator.pyx	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2009 Matthew Turk.  All Rights Reserved.
 


--- a/yt/utilities/_amr_utils/fortran_reader.pyx	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/utilities/_amr_utils/fortran_reader.pyx	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: UCSD
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/utilities/_amr_utils/freetype_writer.pyx	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/utilities/_amr_utils/freetype_writer.pyx	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: UCSD
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 
@@ -69,6 +69,7 @@
 
 def initialize_library():
     # We do not clear the library from memory.
+    global initialized
     if initialized == 1: return
     cdef int error = FT_Init_FreeType(&library)
     if error: raise RuntimeError


--- a/yt/utilities/_amr_utils/healpix_interface.pxd	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/utilities/_amr_utils/healpix_interface.pxd	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: Columbia University
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/utilities/_amr_utils/kdtree_utils.pxd	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/utilities/_amr_utils/kdtree_utils.pxd	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: Columbia University
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/utilities/_amr_utils/misc_utilities.pyx	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/utilities/_amr_utils/misc_utilities.pyx	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: Columbia University
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2011 Matthew Turk.  All Rights Reserved.
 
@@ -27,6 +27,9 @@
 cimport numpy as np
 cimport cython
 
+ at cython.boundscheck(False)
+ at cython.wraparound(False)
+ at cython.cdivision(True)
 def get_color_bounds(np.ndarray[np.float64_t, ndim=1] px,
                      np.ndarray[np.float64_t, ndim=1] py,
                      np.ndarray[np.float64_t, ndim=1] pdx,
@@ -51,28 +54,57 @@
             if v > ma: ma = v
     return (mi, ma)
 
+ at cython.boundscheck(False)
+ at cython.wraparound(False)
+ at cython.cdivision(True)
 def get_box_grids_level(np.ndarray[np.float64_t, ndim=1] left_edge,
                         np.ndarray[np.float64_t, ndim=1] right_edge,
                         int level,
                         np.ndarray[np.float64_t, ndim=2] left_edges,
                         np.ndarray[np.float64_t, ndim=2] right_edges,
                         np.ndarray[np.int32_t, ndim=2] levels,
+                        np.ndarray[np.int32_t, ndim=1] mask,
+                        int min_index = 0):
+    cdef int i, n
+    cdef int nx = left_edges.shape[0]
+    cdef int inside 
+    for i in range(nx):
+        if i < min_index or levels[i,0] != level:
+            mask[i] = 0
+            continue
+        inside = 1
+        for n in range(3):
+            if left_edge[n] >= right_edges[i,n] or \
+               right_edge[n] <= left_edges[i,n]:
+                inside = 0
+                break
+        if inside == 1: mask[i] = 1
+        else: mask[i] = 0
+
+ at cython.boundscheck(False)
+ at cython.wraparound(False)
+ at cython.cdivision(True)
+def get_box_grids_below_level(
+                        np.ndarray[np.float64_t, ndim=1] left_edge,
+                        np.ndarray[np.float64_t, ndim=1] right_edge,
+                        int level,
+                        np.ndarray[np.float64_t, ndim=2] left_edges,
+                        np.ndarray[np.float64_t, ndim=2] right_edges,
+                        np.ndarray[np.int32_t, ndim=2] levels,
                         np.ndarray[np.int32_t, ndim=1] mask):
     cdef int i, n
     cdef int nx = left_edges.shape[0]
     cdef int inside 
     for i in range(nx):
-        if levels[i,0] != level:
-            mask[i] = 0
-            continue
-        inside = 1
-        for n in range(3):
-            if left_edge[n] >= right_edges[i,n] or \
-               right_edge[n] <= left_edges[i,n]:
-                inside = 0
-                break
-        if inside == 1: mask[i] = 1
-        else: mask[i] = 0
+        mask[i] = 0
+        if levels[i,0] <= level:
+            inside = 1
+            for n in range(3):
+                if left_edge[n] >= right_edges[i,n] or \
+                   right_edge[n] <= left_edges[i,n]:
+                    inside = 0
+                    break
+            if inside == 1: mask[i] = 1
 
 @cython.boundscheck(False)
 @cython.wraparound(False)


--- a/yt/utilities/_amr_utils/png_writer.pyx	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/utilities/_amr_utils/png_writer.pyx	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: UCSD
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/utilities/amr_kdtree/amr_kdtree.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/utilities/amr_kdtree/amr_kdtree.py	Fri Sep 09 10:33:52 2011 -0400
@@ -6,7 +6,7 @@
 Wil St. Charles <fallen751 at gmail.com>
 Affiliation: University of Colorado at Boulder
 
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Samuel Skillman.  All Rights Reserved.
 
@@ -261,7 +261,7 @@
         self.pf = pf
         self._id_offset = pf.h.grids[0]._id_offset
         if nprocs > len(pf.h.grids):
-            print('Parallel rendering requires that the number of \n \
+            mylog.info('Parallel rendering requires that the number of \n \
             grids in the dataset is greater or equal to the number of \n \
             processors.  Reduce number of processors.')
             raise(KeyError)
@@ -289,18 +289,46 @@
         if le is None:
             self.domain_left_edge = pf.domain_left_edge
         else:
-            self.domain_left_edge = na.clip(na.array(le),pf.domain_left_edge, pf.domain_right_edge)
+            self.domain_left_edge = na.array(le)
+
         if re is None:
             self.domain_right_edge = pf.domain_right_edge
         else:
-            self.domain_right_edge = na.clip(na.array(re),pf.domain_left_edge, pf.domain_right_edge)
+            self.domain_right_edge = na.array(re)
 
+        self.domain_left_edge = na.clip(self.domain_left_edge,pf.domain_left_edge, pf.domain_right_edge)
+        self.domain_right_edge = na.clip(self.domain_right_edge,pf.domain_left_edge, pf.domain_right_edge)
+
+        levels = pf.hierarchy.get_levels()
+        root_grids = levels.next()
+        covering_grids = root_grids
+        vol_needed = na.prod(self.domain_right_edge-self.domain_left_edge)
+
+        for i in range(self.pf.hierarchy.max_level):
+            root_l_data = na.clip(na.array([grid.LeftEdge for grid in root_grids]),self.domain_left_edge, self.domain_right_edge)
+            root_r_data = na.clip(na.array([grid.RightEdge for grid in root_grids]),self.domain_left_edge, self.domain_right_edge)
+            
+            vol = na.prod(root_r_data-root_l_data,axis=1).sum()
+            if vol >= vol_needed:
+                covering_grids = root_grids
+                root_grids = levels.next()
+            else:
+                break
+            
+        root_grids = covering_grids
+        
+        rgdds = root_grids[0].dds
+        self.domain_left_edge = ((self.domain_left_edge)/rgdds).astype('int64')*rgdds
+        self.domain_right_edge = (((self.domain_right_edge)/rgdds).astype('int64')+1)*rgdds
+
+        self.domain_left_edge = na.clip(self.domain_left_edge,pf.domain_left_edge, pf.domain_right_edge)
+        self.domain_right_edge = na.clip(self.domain_right_edge,pf.domain_left_edge, pf.domain_right_edge)
+        
         self.my_l_corner = self.domain_left_edge
         self.my_r_corner = self.domain_right_edge
 
-        mylog.info('Making kd tree from le %s to %s'% (self.domain_left_edge, self.domain_right_edge))
-        root_grids = pf.hierarchy.get_levels().next()
-
+        #mylog.info('Making kd tree from le %s to %s'% (self.domain_left_edge, self.domain_right_edge))
+        
         root_l_data = na.array([grid.LeftEdge for grid in root_grids])
         root_r_data = na.array([grid.RightEdge for grid in root_grids])
         root_we_want = na.all(root_l_data < self.my_r_corner,axis=1)*\
@@ -331,7 +359,7 @@
 
         # Calculate the total volume spanned by the tree
         self.volume = self.count_volume()
-        mylog.debug('Cost is %d' % self.total_cost)
+        #mylog.debug('Cost is %d' % self.total_cost)
         mylog.debug('Volume is %e' % self.volume) 
 
         self.current_saved_grids = []
@@ -782,6 +810,24 @@
                 v += na.prod(node.r_corner - node.l_corner)
         return v
 
+    def count_cells(self):
+        r"""Calculates the numbers of cells of the kd-Tree
+
+        Parameters
+        ----------
+        None
+
+        Returns
+        ----------
+        Total volume of the tree.
+        
+        """
+        c = na.int64(0)
+        for node in self.depth_traverse():
+            if node.grid is not None:
+                c += na.prod(node.ri - node.li).astype('int64')
+        return c
+
     def _build(self, grids, parent, l_corner, r_corner):
         r"""Builds the AMR kd-Tree
 
@@ -979,17 +1025,19 @@
         my_node.owner = 0
         path = na.binary_repr(anprocs+my_rank)
         for i in range(rounds):
-            my_node.left_child.owner = my_node.owner
-            my_node.right_child.owner = my_node.owner + 2**(rounds-(i+1))
-            if path[i+1] is '0': 
-                my_node = my_node.left_child
-                my_node_id = my_node.id
-            else:
-                my_node = my_node.right_child
-                my_node_id = my_node.id
-            
+            try:
+                my_node.left_child.owner = my_node.owner
+                my_node.right_child.owner = my_node.owner + 2**(rounds-(i+1))
+                if path[i+1] is '0': 
+                    my_node = my_node.left_child
+                    my_node_id = my_node.id
+                else:
+                    my_node = my_node.right_child
+                    my_node_id = my_node.id
+            except:
+                rounds = i-1
         for thisround in range(rounds,0,-1):
-            print my_rank, 'my node', my_node_id
+            #print my_rank, 'my node', my_node_id
             parent = my_node.parent
             #print parent['split_ax'], parent['split_pos']
             if viewpoint[parent.split_ax] <= parent.split_pos:
@@ -999,7 +1047,7 @@
                 front = parent.left_child
                 back = parent.right_child 
 
-            mylog.debug('front owner %i back owner %i parent owner %i'%( front.owner, back.owner, parent.owner))
+            # mylog.debug('front owner %i back owner %i parent owner %i'%( front.owner, back.owner, parent.owner))
                 
             # Send the images around
             if front.owner == my_rank:


--- a/yt/utilities/amr_kdtree/api.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/utilities/amr_kdtree/api.py	Fri Sep 09 10:33:52 2011 -0400
@@ -6,7 +6,7 @@
 Wil St. Charles <fallen751 at gmail.com>
 Affiliation: University of Colorado at Boulder
 
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Samuel Skillman.  All Rights Reserved.
 


--- a/yt/utilities/amr_utils.pyx	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/utilities/amr_utils.pyx	Fri Sep 09 10:33:52 2011 -0400
@@ -4,7 +4,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2008 Matthew Turk.  All Rights Reserved.
 


--- a/yt/utilities/answer_testing/__init__.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/utilities/answer_testing/__init__.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: UCSD
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/utilities/answer_testing/api.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/utilities/answer_testing/api.py	Fri Sep 09 10:33:52 2011 -0400
@@ -7,7 +7,7 @@
 Affiliation: KIPAC/SLAC/Stanford
 Author: Britton Smith <brittonsmith at gmail.com>
 Affiliation: MSU
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/utilities/answer_testing/default_tests.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/utilities/answer_testing/default_tests.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: Columbia University
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/utilities/answer_testing/hydro_tests.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/utilities/answer_testing/hydro_tests.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: Columbia University
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 
@@ -32,6 +32,7 @@
 
     field = None
     axis = None
+    weight_field = None
 
     def run(self):
         # First we get our flattened projection -- this is the


--- a/yt/utilities/answer_testing/output_tests.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/utilities/answer_testing/output_tests.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: Columbia University
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/utilities/answer_testing/runner.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/utilities/answer_testing/runner.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: Columbia University
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 
@@ -129,8 +129,11 @@
         self.plot_list[test.name] = test.plot()
         self.results[test.name] = test.result
         success, msg = self._compare(test)
-        if success == True: print "SUCCEEDED"
-        else: print "FAILED"
+        if self.old_results is None:
+            print "NO OLD RESULTS"
+        else:
+            if success == True: print "SUCCEEDED"
+            else: print "FAILED", msg
         self.passed_tests[test.name] = success
         if self.watcher is not None:
             if success == True:
@@ -144,11 +147,11 @@
         try:
             old_result = self.old_results[test.name]
         except FileNotExistException:
-            return (False, "File %s does not exist." % test.name)
+            return (False, sys.exc_info())
         try:
             test.compare(old_result)
         except RegressionTestException, exc:
-            return (False, str(exc))
+            return (False, sys.exc_info())
         return (True, "Pass")
 
     def run_tests_from_file(self, filename):


--- a/yt/utilities/api.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/utilities/api.py	Fri Sep 09 10:33:52 2011 -0400
@@ -7,7 +7,7 @@
 Affiliation: KIPAC/SLAC/Stanford
 Author: Britton Smith <brittonsmith at gmail.com>
 Affiliation: MSU
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/utilities/command_line.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/utilities/command_line.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2008-2011 Matthew Turk.  All Rights Reserved.
 
@@ -91,7 +91,7 @@
                    action="store", type="float",
                    dest="center", default=None,
                    nargs=3,
-                   help="Center, command separated (-1 -1 -1 for max)"),
+                   help="Center, space separated (-1 -1 -1 for max)"),
     bn      = dict(short="-b", long="--basename",
                    action="store", type="string",
                    dest="basename", default=None,
@@ -161,6 +161,37 @@
                    action="store_true",
                    dest="time", default=False,
                    help="Print time in years on image"),
+    contours    = dict(short="", long="--contours",
+                   action="store",type="int",
+                   dest="contours", default=None,
+                   help="Number of Contours for Rendering"),
+    contour_width  = dict(short="", long="--contour_width",
+                   action="store",type="float",
+                   dest="contour_width", default=None,
+                   help="Width of gaussians used for rendering."),
+    enhance   = dict(short="", long="--enhance",
+                   action="store_true",
+                   dest="enhance", default=False,
+                   help="Enhance!"),
+    valrange  = dict(short="-r", long="--range",
+                   action="store", type="float",
+                   dest="valrange", default=None,
+                   nargs=2,
+                   help="Range, space separated"),
+    up  = dict(short="", long="--up",
+                   action="store", type="float",
+                   dest="up", default=None,
+                   nargs=3,
+                   help="Up, space separated"),
+    viewpoint  = dict(short="", long="--viewpoint",
+                   action="store", type="float",
+                   dest="viewpoint", default=[1., 1., 1.],
+                   nargs=3,
+                   help="Viewpoint, space separated"),
+    pixels    = dict(short="", long="--pixels",
+                   action="store",type="int",
+                   dest="pixels", default=None,
+                   help="Number of Pixels for Rendering"),
     halos   = dict(short="", long="--halos",
                    action="store", type="string",
                    dest="halos",default="multiple",
@@ -294,6 +325,35 @@
         req.add_header('Authorization', 'Basic %s' % base64.b64encode(upw).strip())
     return urllib2.urlopen(req).read()
 
+def _get_yt_supp():
+    supp_path = os.path.join(os.environ["YT_DEST"], "src",
+                             "yt-supplemental")
+    # Now we check that the supplemental repository is checked out.
+    if not os.path.isdir(supp_path):
+        print
+        print "*** The yt-supplemental repository is not checked ***"
+        print "*** out.  I can do this for you, but because this ***"
+        print "*** is a delicate act, I require you to respond   ***"
+        print "*** to the prompt with the word 'yes'.            ***"
+        print
+        response = raw_input("Do you want me to try to check it out? ")
+        if response != "yes":
+            print
+            print "Okay, I understand.  You can check it out yourself."
+            print "This command will do it:"
+            print
+            print "$ hg clone http://hg.yt-project.org/yt-supplemental/ ",
+            print "%s" % (supp_path)
+            print
+            sys.exit(1)
+        rv = commands.clone(uu,
+                "http://hg.yt-project.org/yt-supplemental/", supp_path)
+        if rv:
+            print "Something has gone wrong.  Quitting."
+            sys.exit(1)
+    # Now we think we have our supplemental repository.
+    return supp_path
+
 class YTCommands(cmdln.Cmdln):
     name="yt"
 
@@ -301,13 +361,44 @@
         cmdln.Cmdln.__init__(self, *args, **kwargs)
         cmdln.Cmdln.do_help.aliases.append("h")
 
-    def do_loop(self, subcmd, opts, *args):
+    def do_update(self, subcmd, opts):
         """
-        Interactive loop
+        Update the yt installation to the most recent version
 
+        ${cmd_usage}
         ${cmd_option_list}
         """
-        self.cmdloop()
+        import pkg_resources
+        yt_provider = pkg_resources.get_provider("yt")
+        path = os.path.dirname(yt_provider.module_path)
+        print
+        print "yt module located at:"
+        print "    %s" % (path)
+        update_supp = False
+        if "YT_DEST" in os.environ:
+            spath = os.path.join(
+                     os.environ["YT_DEST"], "src", "yt-supplemental")
+            if os.path.isdir(spath):
+                print "The supplemental repositories are located at:"
+                print "    %s" % (spath)
+                update_supp = True
+        vstring = None
+        if "site-packages" not in path:
+            vstring = _get_hg_version(path)
+            print
+            print "The current version of the code is:"
+            print
+            print "---"
+            print vstring.strip()
+            print "---"
+            print
+            print "This installation CAN be automatically updated."
+            print "Updated successfully."
+        else:
+            print
+            print "YT site-packages not in path, so you must"
+            print "update this installation manually."
+            print
 
     @cmdln.option("-u", "--update-source", action="store_true",
                   default = False,
@@ -317,7 +408,7 @@
                   help="File into which the current revision number will be stored")
     def do_instinfo(self, subcmd, opts):
         """
-        ${cmd_name}: Get some information about the yt installation
+        Get some information about the yt installation
 
         ${cmd_usage}
         ${cmd_option_list}
@@ -359,7 +450,7 @@
 
     def do_load(self, subcmd, opts, arg):
         """
-        Load a single dataset into an IPython instance.
+        Load a single dataset into an IPython instance
 
         ${cmd_option_list}
         """
@@ -398,7 +489,7 @@
                       'halos','halo_hop_style','halo_radius','halo_radius_units'])
     def do_halos(self, subcmd, opts, arg):
         """
-        Run HaloProfiler on one dataset.
+        Run HaloProfiler on one dataset
 
         ${cmd_option_list}
         """
@@ -413,55 +504,6 @@
         if opts.make_projections:
             hp.make_projections()
 
-    @add_cmd_options(["maxw", "minw", "proj", "axis", "field", "weight",
-                      "zlim", "nframes", "output", "cmap", "uboxes", "dex",
-                      "text"])
-    def do_zoomin(self, subcmd, opts, arg):
-        """
-        Create a set of zoomin frames
-
-        ${cmd_option_list}
-        """
-        pf = _fix_pf(arg)
-        min_width = opts.min_width * pf.h.get_smallest_dx()
-        if opts.axis == 4:
-            axes = range(3)
-        else:
-            axes = [opts.axis]
-        pc = PlotCollection(pf)
-        for ax in axes: 
-            if opts.projection: p = pc.add_projection(opts.field, ax,
-                                    weight_field=opts.weight)
-            else: p = pc.add_slice(opts.field, ax)
-            if opts.unit_boxes: p.modify["units"](factor=8)
-            if opts.text is not None:
-                p.modify["text"](
-                    (0.02, 0.05), opts.text.replace(r"\n", "\n"),
-                    text_args=dict(size="medium", color="w"))
-        pc.set_width(opts.max_width,'1')
-        # Check the output directory
-        if not os.path.isdir(opts.output):
-            os.mkdir(opts.output)
-        # Figure out our zoom factor
-        # Recall that factor^nframes = min_width / max_width
-        # so factor = (log(min/max)/log(nframes))
-        mylog.info("min_width: %0.3e max_width: %0.3e nframes: %0.3e",
-                   min_width, opts.max_width, opts.nframes)
-        factor=10**(math.log10(min_width/opts.max_width)/opts.nframes)
-        mylog.info("Zoom factor: %0.3e", factor)
-        w = 1.0
-        for i in range(opts.nframes):
-            mylog.info("Setting width to %0.3e", w)
-            mylog.info("Saving frame %06i",i)
-            pc.set_width(w,"1")
-            if opts.zlim:
-                pc.set_zlim(*opts.zlim)
-            if opts.dex:
-                pc.set_zlim('min', None, opts.dex)
-            pc.set_cmap(opts.cmap)
-            pc.save(os.path.join(opts.output,"%s_frame%06i" % (pf,i)))
-            w = factor**i
-
     @add_cmd_options(["width", "unit", "bn", "proj", "center",
                       "zlim", "axis", "field", "weight", "skip",
                       "cmap", "output", "grids", "time"])
@@ -571,7 +613,7 @@
                 "%s (%0.5e years): %0.5e at %s\n" % (pf, t, v, c))
 
     @add_cmd_options([])
-    def do_analyze(self, subcmd, opts, arg):
+    def _do_analyze(self, subcmd, opts, arg):
         """
         Produce a set of analysis for a given output.  This includes
         HaloProfiler results with r200, as per the recipe file in the cookbook,
@@ -663,7 +705,7 @@
                   help="File to output to; else, print.")
     def do_pastebin(self, subcmd, opts, arg):
         """
-        Post a script to an anonymous pastebin.
+        Post a script to an anonymous pastebin
 
         Usage: yt pastebin [options] <script>
 
@@ -678,7 +720,7 @@
         """
         Print an online pastebin to STDOUT for local use. Paste ID is 
         the number at the end of the url.  So to locally access pastebin:
-        http://paste.enzotools.org/show/1688/
+        http://paste.yt-project.org/show/1688/
 
         Usage: yt pastebin_grab <Paste ID> 
         Ex: yt pastebin_grab 1688 > script.py
@@ -692,7 +734,7 @@
                   help="File to output to; else, print.")
     def do_pasteboard_grab(self, subcmd, opts, username, paste_id):
         """
-        Download from your or another user's pasteboard.
+        Download from your or another user's pasteboard
 
         ${cmd_usage} 
         ${cmd_option_list}
@@ -714,7 +756,7 @@
         print "==============================================================="
         print "At any time in advance of the upload of the bug, you should feel free"
         print "to ctrl-C out and submit the bug report manually by going here:"
-        print "   http://hg.enzotools.org/yt/issues/new"
+        print "   http://hg.yt-project.org/yt/issues/new"
         print 
         print "Also, in order to submit a bug through this interface, you"
         print "need a Bitbucket account. If you don't have one, exit this "
@@ -722,12 +764,12 @@
         print
         print "Have you checked the existing bug reports to make"
         print "sure your bug has not already been recorded by someone else?"
-        print "   http://hg.enzotools.org/yt/issues?status=new&status=open"
+        print "   http://hg.yt-project.org/yt/issues?status=new&status=open"
         print
         print "Finally, are you sure that your bug is, in fact, a bug? It might"
         print "simply be a misunderstanding that could be cleared up by"
         print "visiting the yt irc channel or getting advice on the email list:"
-        print "   http://yt.enzotools.org/irc.html"
+        print "   http://yt-project.org/irc.html"
         print "   http://lists.spacepope.org/listinfo.cgi/yt-users-spacepope.org"
         print
         summary = raw_input("Press <enter> if you remain firm in your conviction to continue.")
@@ -802,7 +844,7 @@
         retval = bb_apicall(endpoint, data, use_pass=True)
         import json
         retval = json.loads(retval)
-        url = "http://hg.enzotools.org/yt/issue/%s" % retval['local_id']
+        url = "http://hg.yt-project.org/yt/issue/%s" % retval['local_id']
         print 
         print "==============================================================="
         print
@@ -839,32 +881,7 @@
             print "*** to point to the installation location!        ***"
             print
             sys.exit(1)
-        supp_path = os.path.join(os.environ["YT_DEST"], "src",
-                                 "yt-supplemental")
-        # Now we check that the supplemental repository is checked out.
-        if not os.path.isdir(supp_path):
-            print
-            print "*** The yt-supplemental repository is not checked ***"
-            print "*** out.  I can do this for you, but because this ***"
-            print "*** is a delicate act, I require you to respond   ***"
-            print "*** to the prompt with the word 'yes'.            ***"
-            print
-            response = raw_input("Do you want me to try to check it out? ")
-            if response != "yes":
-                print
-                print "Okay, I understand.  You can check it out yourself."
-                print "This command will do it:"
-                print
-                print "$ hg clone http://hg.enzotools.org/yt-supplemental/ ",
-                print "%s" % (supp_path)
-                print
-                sys.exit(1)
-            rv = commands.clone(uu,
-                    "http://hg.enzotools.org/yt-supplemental/", supp_path)
-            if rv:
-                print "Something has gone wrong.  Quitting."
-                sys.exit(1)
-        # Now we think we have our supplemental repository.
+        supp_path = _get_yt_supp()
         print
         print "I have found the yt-supplemental repository at %s" % (supp_path)
         print
@@ -1166,7 +1183,7 @@
                   help="Add a debugging mode for cell execution")
     def do_serve(self, subcmd, opts):
         """
-        Run the Web GUI
+        Run the Web GUI Reason
         """
         # We have to do a couple things.
         # First, we check that YT_DEST is set.
@@ -1217,7 +1234,7 @@
                     port=int(opts.port), repl=hr)
 
     
-    def do_remote(self, subcmd, opts):
+    def _do_remote(self, subcmd, opts):
         import getpass, sys, socket, time, webbrowser
         import yt.utilities.pexpect as pex
 
@@ -1266,23 +1283,301 @@
         while 1:
             time.sleep(1)
 
-    def do_intents(self, subcmd, opts, *intents):
+    @cmdln.option("-R", "--repo", action="store", type="string",
+                  dest="repo", default=".", help="Repository to upload")
+    def do_hubsubmit(self, subcmd, opts):
         """
-        ${cmd_name}: What are your ... intentions?
+        Submit a mercurial repository to the yt Hub
+        (http://hub.yt-project.org/), creating a BitBucket repo in the process
+        if necessary.
 
         ${cmd_usage}
         ${cmd_option_list}
         """
-        from yt.utilities.cookbook import Intent
-        if len(intents) == 0:
-            Intent.list_intents()
+        import imp
+        from mercurial import hg, ui, commands, error, config
+        uri = "http://hub.yt-project.org/3rdparty/API/api.php"
+        supp_path = _get_yt_supp()
+        try:
+            result = imp.find_module("cedit", [supp_path])
+        except ImportError:
+            print "I was unable to find the 'cedit' module in %s" % (supp_path)
+            print "This may be due to a broken checkout."
+            print "Sorry, but I'm going to bail."
+            sys.exit(1)
+        cedit = imp.load_module("cedit", *result)
+        try:
+            result = imp.find_module("hgbb", [supp_path + "/hgbb"])
+        except ImportError:
+            print "I was unable to find the 'hgbb' module in %s" % (supp_path)
+            print "This may be due to a broken checkout."
+            print "Sorry, but I'm going to bail."
+            sys.exit(1)
+        hgbb = imp.load_module("hgbb", *result)
+        uu = ui.ui()
+        try:
+            repo = hg.repository(uu, opts.repo)
+            conf = config.config()
+            if os.path.exists(os.path.join(opts.repo,".hg","hgrc")):
+                conf.read(os.path.join(opts.repo, ".hg", "hgrc"))
+            needs_bb = True
+            if "paths" in conf.sections():
+                default = conf['paths'].get("default", "")
+                if default.startswith("bb://") or "bitbucket.org" in default:
+                    needs_bb = False
+                    bb_url = default
+                else:
+                    for alias, value in conf["paths"].items():
+                        if value.startswith("bb://") or "bitbucket.org" in value:
+                            needs_bb = False
+                            bb_url = value
+                            break
+        except error.RepoError:
+            print "Unable to find repo at:"
+            print "   %s" % (os.path.abspath(opts.repo))
+            print
+            print "Would you like to initialize one?  If this message"
+            print "surprises you, you should perhaps press Ctrl-C to quit."
+            print "Otherwise, type 'yes' at the prompt."
+            print
+            loki = raw_input("Create repo? ")
+            if loki.upper().strip() != "YES":
+                print "Okay, rad -- we'll let you handle it and get back to",
+                print " us."
+                return 1
+            commands.init(uu, dest=opts.repo)
+            repo = hg.repository(uu, opts.repo)
+            commands.add(uu, repo)
+            commands.commit(uu, repo, message="Initial automated import by yt")
+            needs_bb = True
+        if needs_bb:
+            print
+            print "Your repository is not yet on BitBucket, as near as I can tell."
+            print "Would you like to create a repository there and upload to it?"
+            print "Without this, I don't know what URL to submit!"
+            print
+            print "Type 'yes' to accept."
+            print
+            loki = raw_input("Upload to BitBucket? ")
+            if loki.upper().strip() != "YES": return 1
+            hgrc_path = [cedit.config.defaultpath("user", uu)]
+            hgrc_path = cedit.config.verifypaths(hgrc_path)
+            uu.readconfig(hgrc_path[0])
+            bb_username = uu.config("bb", "username", None)
+            if bb_username is None:
+                print "Can't find your Bitbucket username.  Run the command:"
+                print
+                print "$ yt bootstrap_dev"
+                print
+                print "to get set up and ready to go."
+                return 1
+            bb_repo_name = os.path.basename(os.path.abspath(opts.repo))
+            print
+            print "I am now going to create the repository:"
+            print "    ", bb_repo_name
+            print "on BitBucket.org and upload this repository to that."
+            print "If that is not okay, please press Ctrl-C to quit."
+            print
+            loki = raw_input("Press Enter to continue.")
+            data = dict(name=bb_repo_name)
+            hgbb._bb_apicall(uu, 'repositories', data)
+            print
+            print "Created repository!  Now I will set this as the default path."
+            print
+            bb_url = "https://%s@bitbucket.org/%s/%s" % (
+                        bb_username, bb_username, bb_repo_name)
+            cedit.config.addsource(uu, repo, "default", bb_url)
+            commands.push(uu, repo, bb_url)
+        if bb_url.startswith("bb://"):
+            bb_username, bb_repo_name = bb_url.split("/")[-2:]
+            bb_url = "https://%s@bitbucket.org/%s/%s" % (
+                bb_username, bb_username, bb_repo_name)
+        # Now we can submit
+        import xml.etree.ElementTree as etree
+        print
+        print "Okay.  Now we're ready to submit to the Hub."
+        print "Remember, you can go to the Hub at any time at"
+        print " http://hub.yt-project.org/"
+        print
+        print "(Especially if you don't have a user yet!  We can wait.)"
+        print
+        hub_username = raw_input("What is your Hub username? ")
+        hub_password = getpass.getpass("What is your Hub password? ")
+        data = urllib.urlencode(dict(fn = "list",
+                                     username=hub_username,
+                                     password=hub_password))
+        req = urllib2.Request(uri, data)
+        rv = urllib2.urlopen(req).read()
+        try:
+            cats = etree.fromstring(rv)
+        except:
+            print "I think you entered your password wrong.  Please check!"
+            return
+
+        categories = {}
+
+        for cat in cats.findall("./cate"):
+            cat_id = int(cat.findall("id")[0].text)
+            cat_name = cat.findall("name")[0].text
+            categories[cat_id] = cat_name
+
+        print
+        for i, n in sorted(categories.items()):
+            print "%i. %s" % (i, n)
+        print
+        cat_id = int(raw_input("Which category number does your script fit into? "))
+        print
+        print "What is the title of your submission? (Usually a repository name) "
+        title = raw_input("Title? ")
+        print
+        print "What tags should be applied to this submission?  Separate with commas."
+        print "(e.g., enzo, flash, gadget, ramses, nyx, yt, visualization, analysis,"
+        print " utility, cosmology)"
+        print
+        tags = raw_input("Tags? ")
+        print
+        print "Give us a very brief summary of the project -- enough to get someone"
+        print "interested enough to click the link and see what it's about.  This"
+        print "should be a few sentences at most."
+        print
+        summary = raw_input("Summary? ")
+        print
+        print "Okay, we're going to submit!  Press enter to submit, Ctrl-C to back out."
+        print
+        loki = raw_input()
+
+        data = urllib.urlencode(dict(fn = "post",
+                                     username=hub_username, password=hub_password,
+                                     url = bb_url, category = cat_id, title = title,
+                                     content = summary, tags = tags))
+        req = urllib2.Request(uri, data)
+        rv = urllib2.urlopen(req).read()
+        print rv
+
+    def do_upload_image(self, subcmd, opts, filename):
+        """
+        Upload an image to imgur.com.  Must be PNG.
+
+        ${cmd_usage} 
+        ${cmd_option_list}
+        """
+        if not filename.endswith(".png"):
+            print "File must be a PNG file!"
+            return 1
+        import base64, json, pprint
+        image_data = base64.b64encode(open(filename).read())
+        api_key = 'f62d550859558f28c4c214136bc797c7'
+        parameters = {'key':api_key, 'image':image_data, type:'base64',
+                      'caption': "",
+                      'title': "%s uploaded by yt" % filename}
+        data = urllib.urlencode(parameters)
+        req = urllib2.Request('http://api.imgur.com/2/upload.json', data)
+        try:
+            response = urllib2.urlopen(req).read()
+        except urllib2.HTTPError as e:
+            print "ERROR", e
+            return {'uploaded':False}
+        rv = json.loads(response)
+        if 'upload' in rv and 'links' in rv['upload']:
+            print
+            print "Image successfully uploaded!  You can find it at:"
+            print "    %s" % (rv['upload']['links']['imgur_page'])
+            print
+            print "If you'd like to delete it, visit this page:"
+            print "    %s" % (rv['upload']['links']['delete_page'])
+            print
         else:
-            intent = Intent.select_intent(intents[0])
-            if intent is None:
-                print "Could not find %s" % intents[0]
-                return 1
-            intent_inst = intent(intents[1:])
-            intent_inst.run()
+            print
+            print "Something has gone wrong!  Here is the server response:"
+            print
+            pprint.pprint(rv)
+
+    @add_cmd_options(["width", "unit", "center","enhance",'outputfn',
+                      "field", "cmap", "contours", "viewpoint",
+                      "pixels","up","valrange","log","contour_width"])
+    @check_args
+    def do_render(self, subcmd, opts, arg):
+        """
+        Create a simple volume rendering
+
+        ${cmd_usage}
+        ${cmd_option_list}
+        """
+        pf = _fix_pf(arg)
+        center = opts.center
+        if opts.center == (-1,-1,-1):
+            mylog.info("No center fed in; seeking.")
+            v, center = pf.h.find_max("Density")
+        elif opts.center is None:
+            center = 0.5*(pf.domain_left_edge + pf.domain_right_edge)
+        center = na.array(center)
+
+        L = opts.viewpoint
+        if L is None:
+            L = [1.]*3
+        L = na.array(opts.viewpoint)
+
+        unit = opts.unit
+        if unit is None:
+            unit = '1'
+        width = opts.width
+        if width is None:
+            width = 0.5*(pf.domain_right_edge - pf.domain_left_edge)
+        width /= pf[unit]
+
+        N = opts.pixels
+        if N is None:
+            N = 512 
+        
+        up = opts.up
+        if up is None:
+            up = [0.,0.,1.]
+            
+        field = opts.field
+        if field is None:
+            field = 'Density'
+        
+        log = opts.takelog
+        if log is None:
+            log = True
+
+        myrange = opts.valrange
+        if myrange is None:
+            roi = pf.h.region(center, center-width, center+width)
+            mi, ma = roi.quantities['Extrema'](field)[0]
+            if log:
+                mi, ma = na.log10(mi), na.log10(ma)
+        else:
+            mi, ma = myrange[0], myrange[1]
+
+        n_contours = opts.contours
+        if n_contours is None:
+            n_contours = 7
+
+        contour_width = opts.contour_width
+
+        cmap = opts.cmap
+        if cmap is None:
+            cmap = 'jet'
+        tf = ColorTransferFunction((mi-2, ma+2))
+        tf.add_layers(n_contours,w=contour_width,col_bounds = (mi,ma), colormap=cmap)
+
+        cam = pf.h.camera(center, L, width, (N,N), transfer_function=tf)
+        image = cam.snapshot()
+
+        if opts.enhance:
+            for i in range(3):
+                image[:,:,i] = image[:,:,i]/(image[:,:,i].mean() + 5.*image[:,:,i].std())
+            image[image>1.0]=1.0
+            
+        save_name = opts.output
+        if save_name is None:
+            save_name = "%s"%pf+"_"+field+"_rendering.png"
+        if not '.png' in save_name:
+            save_name += '.png'
+        if cam._mpi_get_rank() != -1:
+            write_bitmap(image,save_name)
+        
 
 def run_main():
     for co in ["--parallel", "--paste"]:


--- a/yt/utilities/cookbook.py	Sat Aug 13 17:51:17 2011 -0600
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,109 +0,0 @@
-"""
-A way to find an utilize recipes
-
-Author: Matthew Turk <matthewturk at gmail.com>
-Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
-License:
-  Copyright (C) 2011 Matthew Turk.  All Rights Reserved.
-
-  This file is part of yt.
-
-  yt is free software; you can redistribute it and/or modify
-  it under the terms of the GNU General Public License as published by
-  the Free Software Foundation; either version 3 of the License, or
-  (at your option) any later version.
-
-  This program is distributed in the hope that it will be useful,
-  but WITHOUT ANY WARRANTY; without even the implied warranty of
-  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-  GNU General Public License for more details.
-
-  You should have received a copy of the GNU General Public License
-  along with this program.  If not, see <http://www.gnu.org/licenses/>.
-"""
-
-# See also:
-#  http://en.wikipedia.org/wiki/Me_(mythology)
-
-import os
-import argparse
-import abc
-import glob
-import imp
-import types
-import sys
-import pprint
-
-def _load_intent(intent_path):
-    mname = os.path.basename(intent_path[:-3])
-    f, filename, desc = imp.find_module(mname,
-            [os.path.dirname(intent_path)])
-    intent = imp.load_module(mname, f, filename, desc)
-    for i in sorted(dir(intent)):
-        obj = getattr(intent, i)
-        if issubclass(obj, Intent) and \
-           isinstance(obj.desc, types.StringTypes):
-             return obj
-    return None
-
-def _find_cookbook_dir():
-    yt_dest = os.environ.get("YT_DEST", None)
-    if yt_dest is None:
-        print "YT_DEST is not set!  Set it and try again."
-        return False
-    cookbook_dir = os.path.join(yt_dest,
-        "src/yt-supplemental/yt-cookbook/intents")
-    if not os.path.isdir(cookbook_dir):
-        print "Cookbook does not contain 'intents' directory."
-        print "Update with 'yt instinfo -u' and try again."
-        print "(%s)" % cookbook_dir
-        return False
-    return cookbook_dir
-
-class Intent(object):
-    __metaclass__ = abc.ABCMeta
-
-    def __init__(self, args):
-        self.args = args
-        if "help" in self.args:
-            print
-            print "The arguments to supply, in order:"
-            print
-            print self.help
-            print
-            sys.exit()
-
-    @abc.abstractmethod
-    def run(self):
-        pass
-
-    @abc.abstractproperty
-    def desc(self): pass
-
-    @abc.abstractproperty
-    def help(self): pass
-
-    @classmethod
-    def list_intents(self):
-        intents = []
-        cookbook_dir = _find_cookbook_dir()
-        if cookbook_dir is False: return 1
-        for fn in sorted(glob.glob(os.path.join(cookbook_dir, "*"))):
-            # We skim them, looking for the 'Intent' subclass
-            if any(("(Intent):" in line for line in open(fn))):
-                intents.append((os.path.basename(fn)[:-3],
-                                _load_intent(fn)))
-        print
-        print "Found these Intents:"
-        print "\n".join(("% 15s: %s" % (a, b.desc) for a, b in intents))
-        print
-
-    @classmethod
-    def select_intent(self, intent_name):
-        cookbook_dir = _find_cookbook_dir()
-        intent = None
-        for fn in glob.glob(os.path.join(cookbook_dir, "*")):
-            if os.path.basename(fn)[:-3] == intent_name:
-                intent = _load_intent(fn)
-        return intent


--- a/yt/utilities/cosmology.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/utilities/cosmology.py	Fri Sep 09 10:33:52 2011 -0400
@@ -4,7 +4,7 @@
 
 Author: Britton Smith <brittons at origins.colorado.edu>
 Affiliation: CASA/University of CO, Boulder
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2008-2011 Britton Smith.  All Rights Reserved.
 


--- a/yt/utilities/data_point_utilities.c	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/utilities/data_point_utilities.c	Fri Sep 09 10:33:52 2011 -0400
@@ -273,6 +273,111 @@
 
 }
 
+static PyObject *_profile1DError;
+
+static PyObject *Py_Bin1DProfile(PyObject *obj, PyObject *args)
+{
+    int i, j;
+    PyObject *obins_x, *owsource, *obsource, *owresult, *obresult, *oused;
+    PyArrayObject *bins_x, *wsource, *bsource, *wresult, *bresult, *used;
+    bins_x = wsource = bsource = wresult = bresult = used = NULL;
+
+    if (!PyArg_ParseTuple(args, "OOOOOO",
+                &obins_x, &owsource, &obsource,
+                &owresult, &obresult, &oused))
+        return PyErr_Format(_profile1DError,
+                "Bin1DProfile: Invalid parameters.");
+    i = 0;
+
+    bins_x = (PyArrayObject *) PyArray_FromAny(obins_x,
+                    PyArray_DescrFromType(NPY_INT64), 1, 1,
+                    NPY_IN_ARRAY, NULL);
+    if(bins_x==NULL) {
+    PyErr_Format(_profile1DError,
+             "Bin1DProfile: One dimension required for bins_x.");
+    goto _fail;
+    }
+    
+    wsource = (PyArrayObject *) PyArray_FromAny(owsource,
+                    PyArray_DescrFromType(NPY_FLOAT64), 1, 1,
+                    NPY_IN_ARRAY, NULL);
+    if((wsource==NULL) || (PyArray_SIZE(bins_x) != PyArray_SIZE(wsource))) {
+    PyErr_Format(_profile1DError,
+             "Bin1DProfile: One dimension required for wsource, same size as bins_x.");
+    goto _fail;
+    }
+    
+    bsource = (PyArrayObject *) PyArray_FromAny(obsource,
+                    PyArray_DescrFromType(NPY_FLOAT64), 1, 1,
+                    NPY_IN_ARRAY, NULL);
+    if((bsource==NULL) || (PyArray_SIZE(bins_x) != PyArray_SIZE(bsource))) {
+    PyErr_Format(_profile1DError,
+             "Bin1DProfile: One dimension required for bsource, same size as bins_x.");
+    goto _fail;
+    }
+
+    wresult = (PyArrayObject *) PyArray_FromAny(owresult,
+                    PyArray_DescrFromType(NPY_FLOAT64), 1,1,
+                    NPY_INOUT_ARRAY | NPY_UPDATEIFCOPY, NULL);
+    if(wresult==NULL){
+    PyErr_Format(_profile1DError,
+             "Bin1DProfile: Two dimensions required for wresult.");
+    goto _fail;
+    }
+
+    bresult = (PyArrayObject *) PyArray_FromAny(obresult,
+                    PyArray_DescrFromType(NPY_FLOAT64), 1,1,
+                    NPY_INOUT_ARRAY | NPY_UPDATEIFCOPY, NULL);
+    if((bresult==NULL) ||(PyArray_SIZE(wresult) != PyArray_SIZE(bresult))
+       || (PyArray_DIM(bresult,0) != PyArray_DIM(wresult,0))){
+    PyErr_Format(_profile1DError,
+             "Bin1DProfile: Two dimensions required for bresult, same shape as wresult.");
+    goto _fail;
+    }
+    
+    used = (PyArrayObject *) PyArray_FromAny(oused,
+                    PyArray_DescrFromType(NPY_FLOAT64), 1,1,
+                    NPY_INOUT_ARRAY | NPY_UPDATEIFCOPY, NULL);
+    if((used==NULL) ||(PyArray_SIZE(used) != PyArray_SIZE(wresult))
+       || (PyArray_DIM(used,0) != PyArray_DIM(wresult,0))){
+    PyErr_Format(_profile1DError,
+             "Bin1DProfile: Two dimensions required for used, same shape as wresult.");
+    goto _fail;
+    }
+
+    npy_float64 wval, bval;
+    int n;
+
+    for(n=0; n<bins_x->dimensions[0]; n++) {
+      i = *(npy_int64*)PyArray_GETPTR1(bins_x, n);
+      bval = *(npy_float64*)PyArray_GETPTR1(bsource, n);
+      wval = *(npy_float64*)PyArray_GETPTR1(wsource, n);
+      *(npy_float64*)PyArray_GETPTR1(wresult, i) += wval;
+      *(npy_float64*)PyArray_GETPTR1(bresult, i) += wval*bval;
+      *(npy_float64*)PyArray_GETPTR1(used, i) = 1.0;
+    }
+
+      Py_DECREF(bins_x); 
+      Py_DECREF(wsource); 
+      Py_DECREF(bsource); 
+      Py_DECREF(wresult); 
+      Py_DECREF(bresult); 
+      Py_DECREF(used);
+    
+      PyObject *onum_found = PyInt_FromLong((long)1);
+      return onum_found;
+    
+    _fail:
+      Py_XDECREF(bins_x); 
+      Py_XDECREF(wsource); 
+      Py_XDECREF(bsource); 
+      Py_XDECREF(wresult); 
+      Py_XDECREF(bresult); 
+      Py_XDECREF(used);
+      return NULL;
+
+}
+
 static PyObject *_profile2DError;
 
 static PyObject *Py_Bin2DProfile(PyObject *obj, PyObject *args)
@@ -1674,6 +1779,7 @@
     {"Interpolate", Py_Interpolate, METH_VARARGS},
     {"DataCubeRefine", Py_DataCubeRefine, METH_VARARGS},
     {"DataCubeReplace", Py_DataCubeReplace, METH_VARARGS},
+    {"Bin1DProfile", Py_Bin1DProfile, METH_VARARGS},
     {"Bin2DProfile", Py_Bin2DProfile, METH_VARARGS},
     {"Bin3DProfile", Py_Bin3DProfile, METH_VARARGS},
     {"FindContours", Py_FindContours, METH_VARARGS},


--- a/yt/utilities/definitions.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/utilities/definitions.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 
 @todo: Move into yt.Defs, along with enki.EnkiDefs
 License:


--- a/yt/utilities/exceptions.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/utilities/exceptions.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2009 Matthew Turk.  All Rights Reserved.
 


--- a/yt/utilities/io_handler.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/utilities/io_handler.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2007-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/utilities/libconfig_wrapper.pyx	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/utilities/libconfig_wrapper.pyx	Fri Sep 09 10:33:52 2011 -0400
@@ -4,7 +4,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: Columbia University University
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/utilities/linear_interpolators.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/utilities/linear_interpolators.py	Fri Sep 09 10:33:52 2011 -0400
@@ -4,7 +4,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2007-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/utilities/lodgeit.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/utilities/lodgeit.py	Fri Sep 09 10:33:52 2011 -0400
@@ -33,7 +33,7 @@
 
 SCRIPT_NAME = os.path.basename(sys.argv[0])
 VERSION = '0.3'
-SERVICE_URL = 'http://paste.enzotools.org/'
+SERVICE_URL = 'http://paste.yt-project.org/'
 SETTING_KEYS = ['author', 'title', 'language', 'private', 'clipboard',
                 'open_browser']
 


--- a/yt/utilities/logger.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/utilities/logger.py	Fri Sep 09 10:33:52 2011 -0400
@@ -4,7 +4,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2007-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/utilities/math_utils.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/utilities/math_utils.py	Fri Sep 09 10:33:52 2011 -0400
@@ -5,7 +5,7 @@
 Affiliation: UCSD Physics/CASS
 Author: Stephen Skory <s at skory.us>
 Affiliation: UCSD Physics/CASS
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2008-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/utilities/parallel_tools/__init__.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/utilities/parallel_tools/__init__.py	Fri Sep 09 10:33:52 2011 -0400
@@ -7,7 +7,7 @@
 Affiliation: KIPAC/SLAC/Stanford
 Author: Britton Smith <brittonsmith at gmail.com>
 Affiliation: MSU
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/utilities/parallel_tools/distributed_object_collection.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/utilities/parallel_tools/distributed_object_collection.py	Fri Sep 09 10:33:52 2011 -0400
@@ -4,7 +4,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/utilities/parallel_tools/parallel_analysis_interface.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/utilities/parallel_tools/parallel_analysis_interface.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2008-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/utilities/parallel_tools/task_queue.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/utilities/parallel_tools/task_queue.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: Columbia University
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/utilities/parameter_file_storage.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/utilities/parameter_file_storage.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2008-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/utilities/pasteboard.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/utilities/pasteboard.py	Fri Sep 09 10:33:52 2011 -0400
@@ -1,7 +1,7 @@
 """
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: Columbia University
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/utilities/performance_counters.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/utilities/performance_counters.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2009 Matthew Turk.  All Rights Reserved.
 


--- a/yt/utilities/rpdb.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/utilities/rpdb.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2009 Matthew Turk.  All Rights Reserved.
 


--- a/yt/utilities/voropp.pyx	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/utilities/voropp.pyx	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: Columbia University
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/visualization/__init__.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/visualization/__init__.py	Fri Sep 09 10:33:52 2011 -0400
@@ -8,7 +8,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2007-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/visualization/api.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/visualization/api.py	Fri Sep 09 10:33:52 2011 -0400
@@ -7,7 +7,7 @@
 Affiliation: KIPAC/SLAC/Stanford
 Author: Britton Smith <brittonsmith at gmail.com>
 Affiliation: MSU
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/visualization/easy_plots.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/visualization/easy_plots.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: UCSD
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/visualization/eps_writer.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/visualization/eps_writer.py	Fri Sep 09 10:33:52 2011 -0400
@@ -4,7 +4,7 @@
 Author: John Wise <jwise at astro.princeton.edu>
 Date: April 2010
 Affiliation: Princeton
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 
 Requirements: PyX
 
@@ -719,7 +719,7 @@
 def multiplot(ncol, nrow, yt_plots=None, images=None, xranges=None,
               yranges=None, xlabels=None, ylabels=None, colorbars=None,
               shrink_cb=0.95, figsize=(8,8), margins=(0,0), titles=None,
-              savefig=None, yt_nocbar=False, bare_axes=False,
+              savefig=None, format="eps", yt_nocbar=False, bare_axes=False,
               cb_flags=None):
     r"""Convenience routine to create a multi-panel figure from yt plots or
     JPEGs.  The images are first placed from the origin, and then
@@ -756,6 +756,8 @@
         Titles that are placed in textboxes in each panel.
     savefig : string
         Name of the saved file without the extension.
+    format : string
+        File format of the figure. eps or pdf accepted.
     yt_nocbar : boolean
         Flag to indicate whether or not colorbars are created.
     bare_axes : boolean
@@ -908,7 +910,7 @@
                                        shrink=shrink_cb)
 
     if savefig != None:
-        d.save_fig(savefig)
+        d.save_fig(savefig, format=format)
 
     return d
 


--- a/yt/visualization/fixed_resolution.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/visualization/fixed_resolution.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2008-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/visualization/image_panner/__init__.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/visualization/image_panner/__init__.py	Fri Sep 09 10:33:52 2011 -0400
@@ -1,7 +1,7 @@
 """
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/visualization/image_panner/api.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/visualization/image_panner/api.py	Fri Sep 09 10:33:52 2011 -0400
@@ -7,7 +7,7 @@
 Affiliation: KIPAC/SLAC/Stanford
 Author: Britton Smith <brittonsmith at gmail.com>
 Affiliation: MSU
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/visualization/image_panner/pan_and_scan_widget.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/visualization/image_panner/pan_and_scan_widget.py	Fri Sep 09 10:33:52 2011 -0400
@@ -1,7 +1,7 @@
 """
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/visualization/image_panner/vm_panner.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/visualization/image_panner/vm_panner.py	Fri Sep 09 10:33:52 2011 -0400
@@ -1,7 +1,7 @@
 """
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/visualization/plot_collection.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/visualization/plot_collection.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2007-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/visualization/plot_modifications.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/visualization/plot_modifications.py	Fri Sep 09 10:33:52 2011 -0400
@@ -7,7 +7,7 @@
 Affiliation: UC Berkeley
 Author: Stephen Skory <s at skory.us>
 Affiliation: UC San Diego
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2008-2011 Matthew Turk, JS Oishi, Stephen Skory.  All Rights Reserved.
 
@@ -81,18 +81,20 @@
         else:
             xv = "%s-velocity" % (x_names[plot.data.axis])
             yv = "%s-velocity" % (y_names[plot.data.axis])
-            qcb = QuiverCallback(xv, yv, self.factor, self.scale, self.scale_units)
+            qcb = QuiverCallback(xv, yv, self.factor, scale=self.scale, scale_units=self.scale_units)
         return qcb(plot)
 
 class MagFieldCallback(PlotCallback):
     _type_name = "magnetic_field"
-    def __init__(self, factor=16):
+    def __init__(self, factor=16, scale=None, scale_units=None):
         """
         Adds a 'quiver' plot of magnetic field to the plot, skipping all but
         every *factor* datapoint
         """
         PlotCallback.__init__(self)
         self.factor = factor
+        self.scale  = scale
+        self.scale_units = scale_units
 
     def __call__(self, plot):
         # Instantiation of these is cheap
@@ -101,12 +103,12 @@
         else:
             xv = "B%s" % (x_names[plot.data.axis])
             yv = "B%s" % (y_names[plot.data.axis])
-            qcb = QuiverCallback(xv, yv, self.factor)
+            qcb = QuiverCallback(xv, yv, self.factor, scale=self.scale, scale_units=self.scale_units)
         return qcb(plot)
 
 class QuiverCallback(PlotCallback):
     _type_name = "quiver"
-    def __init__(self, field_x, field_y, factor, scale, scale_units):
+    def __init__(self, field_x, field_y, factor, scale=None, scale_units=None):
         """
         Adds a 'quiver' plot to any plot, using the *field_x* and *field_y*
         from the associated data, skipping every *factor* datapoints


--- a/yt/visualization/plot_types.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/visualization/plot_types.py	Fri Sep 09 10:33:52 2011 -0400
@@ -5,7 +5,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2007-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/visualization/plot_window.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/visualization/plot_window.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: J. S. Oishi <jsoishi at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 J. S. Oishi.  All Rights Reserved.
 


--- a/yt/visualization/profile_plotter.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/visualization/profile_plotter.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: Columbia University
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/visualization/streamlines.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/visualization/streamlines.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Samuel Skillman <samskillman at gmail.com>
 Affiliation: University of Colorado
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Samuel Skillman.  All Rights Reserved.
 


--- a/yt/visualization/volume_rendering/CUDARayCast.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/visualization/volume_rendering/CUDARayCast.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2009 Matthew Turk.  All Rights Reserved.
 


--- a/yt/visualization/volume_rendering/UBVRI.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/visualization/volume_rendering/UBVRI.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: JS Oishi
 Affiliation: UC Berkeley
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 J.S. Oishi.  All Rights Reserved.
 


--- a/yt/visualization/volume_rendering/__init__.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/visualization/volume_rendering/__init__.py	Fri Sep 09 10:33:52 2011 -0400
@@ -7,7 +7,7 @@
 Affiliation: KIPAC/SLAC/Stanford
 Author: Britton Smith <brittonsmith at gmail.com>
 Affiliation: MSU
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/visualization/volume_rendering/_cuda_caster.cu	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/visualization/volume_rendering/_cuda_caster.cu	Fri Sep 09 10:33:52 2011 -0400
@@ -4,7 +4,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2009 Matthew Turk.  All Rights Reserved.
 


--- a/yt/visualization/volume_rendering/api.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/visualization/volume_rendering/api.py	Fri Sep 09 10:33:52 2011 -0400
@@ -7,7 +7,7 @@
 Affiliation: KIPAC/SLAC/Stanford
 Author: Britton Smith <brittonsmith at gmail.com>
 Affiliation: MSU
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Matthew Turk.  All Rights Reserved.
 


--- a/yt/visualization/volume_rendering/camera.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/visualization/volume_rendering/camera.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2009 Matthew Turk.  All Rights Reserved.
 


--- a/yt/visualization/volume_rendering/grid_partitioner.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/visualization/volume_rendering/grid_partitioner.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2009 Matthew Turk.  All Rights Reserved.
 


--- a/yt/visualization/volume_rendering/image_handling.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/visualization/volume_rendering/image_handling.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Samuel Skillman <samskillman at gmail.com>
 Affiliation: University of Colorado at Boulder
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2010-2011 Samuel Skillman.  All Rights Reserved.
 


--- a/yt/visualization/volume_rendering/software_sampler.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/visualization/volume_rendering/software_sampler.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2009 Matthew Turk.  All Rights Reserved.
 


--- a/yt/visualization/volume_rendering/transfer_function_widget.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/visualization/volume_rendering/transfer_function_widget.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2009 Matthew Turk.  All Rights Reserved.
 


--- a/yt/visualization/volume_rendering/transfer_functions.py	Sat Aug 13 17:51:17 2011 -0600
+++ b/yt/visualization/volume_rendering/transfer_functions.py	Fri Sep 09 10:33:52 2011 -0400
@@ -3,7 +3,7 @@
 
 Author: Matthew Turk <matthewturk at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
+Homepage: http://yt-project.org/
 License:
   Copyright (C) 2009 Matthew Turk.  All Rights Reserved.
 


http://bitbucket.org/yt_analysis/yt/changeset/a9cba07253b6/
changeset:   a9cba07253b6
branch:      yt
user:        MatthewTurk
date:        2011-09-09 16:35:09
summary:     Merging from latest development
affected #:  4 files (-1 bytes)

--- a/yt/data_objects/data_containers.py	Sun Sep 04 23:23:46 2011 -0400
+++ b/yt/data_objects/data_containers.py	Fri Sep 09 10:35:09 2011 -0400
@@ -40,7 +40,7 @@
 from yt.data_objects.particle_io import particle_handler_registry
 from yt.utilities.amr_utils import find_grids_in_inclined_box, \
     grid_points_in_volume, planar_points_in_volume, VoxelTraversal, \
-    QuadTree, get_box_grids_below_level
+    QuadTree, get_box_grids_below_level, ghost_zone_interpolate
 from yt.utilities.data_point_utilities import CombineGrids, \
     DataCubeRefine, DataCubeReplace, FillRegion, FillBuffer
 from yt.utilities.definitions import axis_names, x_dict, y_dict
@@ -3104,6 +3104,9 @@
     _type_name = "smoothed_covering_grid"
     @wraps(AMRCoveringGridBase.__init__)
     def __init__(self, *args, **kwargs):
+        self._base_dx = (
+              (self.pf.domain_right_edge - self.pf.domain_left_edge) /
+               self.pf.domain_dimensions.astype("float64"))
         AMRCoveringGridBase.__init__(self, *args, **kwargs)
         self._final_start_index = self.global_startindex
 
@@ -3155,8 +3158,10 @@
             if self._use_pbar: pbar.finish()
 
     def _update_level_state(self, level, field = None):
-        dx = self.pf.h.select_grids(level)[0].dds
-        for ax, v in zip('xyz', dx): self['cd%s'%ax] = v
+        dx = self._base_dx / self.pf.refine_by**level
+        self.data['cdx'] = dx[0]
+        self.data['cdy'] = dx[1]
+        self.data['cdz'] = dx[2]
         LL = self.left_edge - self.pf.domain_left_edge
         self._old_global_startindex = self.global_startindex
         self.global_startindex = na.rint(LL / dx).astype('int64') - 1
@@ -3165,41 +3170,29 @@
         if level == 0 and self.level > 0:
             # We use one grid cell at LEAST, plus one buffer on all sides
             idims = na.rint((self.right_edge-self.left_edge)/dx).astype('int64') + 2
-            self[field] = na.zeros(idims,dtype='float64')-999
+            self.data[field] = na.zeros(idims,dtype='float64')-999
             self._cur_dims = idims.astype("int32")
         elif level == 0 and self.level == 0:
             DLE = self.pf.domain_left_edge
             self.global_startindex = na.array(na.floor(LL/ dx), dtype='int64')
             idims = na.rint((self.right_edge-self.left_edge)/dx).astype('int64')
-            self[field] = na.zeros(idims,dtype='float64')-999
+            self.data[field] = na.zeros(idims,dtype='float64')-999
             self._cur_dims = idims.astype("int32")
 
     def _refine(self, dlevel, field):
         rf = float(self.pf.refine_by**dlevel)
 
-        old_dims = na.array(self[field].shape) - 1
-        old_left = (self._old_global_startindex + 0.5) * rf 
-        old_right = rf*old_dims + old_left
-        old_bounds = [old_left[0], old_right[0],
-                      old_left[1], old_right[1],
-                      old_left[2], old_right[2]]
+        input_left = (self._old_global_startindex + 0.5) * rf 
+        dx = na.fromiter((self['cd%s' % ax] for ax in 'xyz'), count=3, dtype='float64')
+        output_dims = na.rint((self.right_edge-self.left_edge)/dx).astype('int32') + 2
 
-        dx = na.array([self['cd%s' % ax] for ax in 'xyz'], dtype='float64')
-        new_dims = na.rint((self.right_edge-self.left_edge)/dx).astype('int64') + 2
+        self._cur_dims = output_dims
 
-        # x, y, z are the new bounds
-        x,y,z = (na.mgrid[0:new_dims[0], 0:new_dims[1], 0:new_dims[2]]
-                    ).astype('float64') + 0.5
-        x += self.global_startindex[0]
-        y += self.global_startindex[1]
-        z += self.global_startindex[2]
-        fake_grid = {'x':x,'y':y,'z':z}
-
-        interpolator = TrilinearFieldInterpolator(
-                        self[field], old_bounds, ['x','y','z'],
-                        truncate = True)
-        self._cur_dims = new_dims.astype("int32")
-        self[field] = interpolator(fake_grid)
+        output_field = na.zeros(output_dims, dtype="float64")
+        output_left = self.global_startindex + 0.5
+        ghost_zone_interpolate(rf, self[field], input_left,
+                               output_field, output_left)
+        self[field] = output_field
 
     def _get_data_from_grid(self, grid, fields, level):
         fields = ensure_list(fields)


--- a/yt/utilities/_amr_utils/Interpolators.pyx	Sun Sep 04 23:23:46 2011 -0400
+++ b/yt/utilities/_amr_utils/Interpolators.pyx	Fri Sep 09 10:35:09 2011 -0400
@@ -27,6 +27,8 @@
 cimport numpy as np
 cimport cython
 
+ at cython.cdivision(True)
+ at cython.wraparound(False)
 @cython.boundscheck(False)
 def UnilinearlyInterpolate(np.ndarray[np.float64_t, ndim=1] table,
                            np.ndarray[np.float64_t, ndim=1] x_vals,
@@ -44,6 +46,8 @@
         output[i]  = table[x_i  ] * (xm) \
                    + table[x_i+1] * (xp)
 
+ at cython.cdivision(True)
+ at cython.wraparound(False)
 @cython.boundscheck(False)
 def BilinearlyInterpolate(np.ndarray[np.float64_t, ndim=2] table,
                           np.ndarray[np.float64_t, ndim=1] x_vals,
@@ -73,6 +77,8 @@
                    + table[x_i  , y_i+1] * (xm*yp) \
                    + table[x_i+1, y_i+1] * (xp*yp)
 
+ at cython.cdivision(True)
+ at cython.wraparound(False)
 @cython.boundscheck(False)
 def TrilinearlyInterpolate(np.ndarray[np.float64_t, ndim=3] table,
                            np.ndarray[np.float64_t, ndim=1] x_vals,
@@ -114,3 +120,54 @@
                    + table[x_i  ,y_i+1,z_i+1] * (xm*yp*zp) \
                    + table[x_i+1,y_i+1,z_i  ] * (xp*yp*zm) \
                    + table[x_i+1,y_i+1,z_i+1] * (xp*yp*zp)
+
+ at cython.cdivision(True)
+ at cython.wraparound(False)
+ at cython.boundscheck(False)
+def ghost_zone_interpolate(int rf,
+                           np.ndarray[np.float64_t, ndim=3] input_field,
+                           np.ndarray[np.float64_t, ndim=1] input_left,
+                           np.ndarray[np.float64_t, ndim=3] output_field,
+                           np.ndarray[np.float64_t, ndim=1] output_left):
+    cdef int oi, oj, ok
+    cdef int ii, ij, ik
+    cdef np.float64_t xp, xm, yp, ym, zp, zm
+    cdef np.float64_t ods[3], ids[3], iids[3]
+    cdef np.float64_t opos[3], ropos[3], temp
+    cdef int i, j
+    for i in range(3):
+        temp = input_left[i] + (rf * (input_field.shape[i] - 1))
+        ids[i] = (temp - input_left[i])/(input_field.shape[i]-1)
+        temp = output_left[i] + output_field.shape[i] - 1
+        ods[i] = (temp - output_left[i])/(output_field.shape[i]-1)
+        iids[i] = 1.0/ids[i]
+    opos[0] = output_left[0]
+    for oi in range(output_field.shape[0]):
+        ropos[0] = ((opos[0] - input_left[0]) * iids[0])
+        ii = iclip(<int> ropos[0], 0, input_field.shape[0] - 2)
+        xp = ropos[0] - ii
+        xm = 1.0 - xp
+        opos[1] = output_left[1]
+        for oj in range(output_field.shape[1]):
+            ropos[1] = ((opos[1] - input_left[1]) * iids[1])
+            ij = iclip(<int> ropos[1], 0, input_field.shape[1] - 2)
+            yp = ropos[1] - ij
+            ym = 1.0 - yp
+            opos[2] = output_left[2]
+            for ok in range(output_field.shape[2]):
+                ropos[2] = ((opos[2] - input_left[2]) * iids[2])
+                ik = iclip(<int> ropos[2], 0, input_field.shape[2] - 2)
+                zp = ropos[2] - ik
+                zm = 1.0 - zp
+                output_field[oi,oj,ok] = \
+                     input_field[ii  ,ij  ,ik  ] * (xm*ym*zm) \
+                   + input_field[ii+1,ij  ,ik  ] * (xp*ym*zm) \
+                   + input_field[ii  ,ij+1,ik  ] * (xm*yp*zm) \
+                   + input_field[ii  ,ij  ,ik+1] * (xm*ym*zp) \
+                   + input_field[ii+1,ij  ,ik+1] * (xp*ym*zp) \
+                   + input_field[ii  ,ij+1,ik+1] * (xm*yp*zp) \
+                   + input_field[ii+1,ij+1,ik  ] * (xp*yp*zm) \
+                   + input_field[ii+1,ij+1,ik+1] * (xp*yp*zp)
+                opos[2] += ods[2]
+            opos[1] += ods[1]
+        opos[0] += ods[0]


--- a/yt/utilities/command_line.py	Sun Sep 04 23:23:46 2011 -0400
+++ b/yt/utilities/command_line.py	Fri Sep 09 10:35:09 2011 -0400
@@ -91,7 +91,7 @@
                    action="store", type="float",
                    dest="center", default=None,
                    nargs=3,
-                   help="Center, command separated (-1 -1 -1 for max)"),
+                   help="Center, space separated (-1 -1 -1 for max)"),
     bn      = dict(short="-b", long="--basename",
                    action="store", type="string",
                    dest="basename", default=None,
@@ -161,6 +161,37 @@
                    action="store_true",
                    dest="time", default=False,
                    help="Print time in years on image"),
+    contours    = dict(short="", long="--contours",
+                   action="store",type="int",
+                   dest="contours", default=None,
+                   help="Number of Contours for Rendering"),
+    contour_width  = dict(short="", long="--contour_width",
+                   action="store",type="float",
+                   dest="contour_width", default=None,
+                   help="Width of gaussians used for rendering."),
+    enhance   = dict(short="", long="--enhance",
+                   action="store_true",
+                   dest="enhance", default=False,
+                   help="Enhance!"),
+    valrange  = dict(short="-r", long="--range",
+                   action="store", type="float",
+                   dest="valrange", default=None,
+                   nargs=2,
+                   help="Range, space separated"),
+    up  = dict(short="", long="--up",
+                   action="store", type="float",
+                   dest="up", default=None,
+                   nargs=3,
+                   help="Up, space separated"),
+    viewpoint  = dict(short="", long="--viewpoint",
+                   action="store", type="float",
+                   dest="viewpoint", default=[1., 1., 1.],
+                   nargs=3,
+                   help="Viewpoint, space separated"),
+    pixels    = dict(short="", long="--pixels",
+                   action="store",type="int",
+                   dest="pixels", default=None,
+                   help="Number of Pixels for Rendering"),
     halos   = dict(short="", long="--halos",
                    action="store", type="string",
                    dest="halos",default="multiple",
@@ -1448,6 +1479,93 @@
             print
             pprint.pprint(rv)
 
+    @add_cmd_options(["width", "unit", "center","enhance",'outputfn',
+                      "field", "cmap", "contours", "viewpoint",
+                      "pixels","up","valrange","log","contour_width"])
+    @check_args
+    def do_render(self, subcmd, opts, arg):
+        """
+        Create a simple volume rendering
+
+        ${cmd_usage}
+        ${cmd_option_list}
+        """
+        pf = _fix_pf(arg)
+        center = opts.center
+        if opts.center == (-1,-1,-1):
+            mylog.info("No center fed in; seeking.")
+            v, center = pf.h.find_max("Density")
+        elif opts.center is None:
+            center = 0.5*(pf.domain_left_edge + pf.domain_right_edge)
+        center = na.array(center)
+
+        L = opts.viewpoint
+        if L is None:
+            L = [1.]*3
+        L = na.array(opts.viewpoint)
+
+        unit = opts.unit
+        if unit is None:
+            unit = '1'
+        width = opts.width
+        if width is None:
+            width = 0.5*(pf.domain_right_edge - pf.domain_left_edge)
+        width /= pf[unit]
+
+        N = opts.pixels
+        if N is None:
+            N = 512 
+        
+        up = opts.up
+        if up is None:
+            up = [0.,0.,1.]
+            
+        field = opts.field
+        if field is None:
+            field = 'Density'
+        
+        log = opts.takelog
+        if log is None:
+            log = True
+
+        myrange = opts.valrange
+        if myrange is None:
+            roi = pf.h.region(center, center-width, center+width)
+            mi, ma = roi.quantities['Extrema'](field)[0]
+            if log:
+                mi, ma = na.log10(mi), na.log10(ma)
+        else:
+            mi, ma = myrange[0], myrange[1]
+
+        n_contours = opts.contours
+        if n_contours is None:
+            n_contours = 7
+
+        contour_width = opts.contour_width
+
+        cmap = opts.cmap
+        if cmap is None:
+            cmap = 'jet'
+        tf = ColorTransferFunction((mi-2, ma+2))
+        tf.add_layers(n_contours,w=contour_width,col_bounds = (mi,ma), colormap=cmap)
+
+        cam = pf.h.camera(center, L, width, (N,N), transfer_function=tf)
+        image = cam.snapshot()
+
+        if opts.enhance:
+            for i in range(3):
+                image[:,:,i] = image[:,:,i]/(image[:,:,i].mean() + 5.*image[:,:,i].std())
+            image[image>1.0]=1.0
+            
+        save_name = opts.output
+        if save_name is None:
+            save_name = "%s"%pf+"_"+field+"_rendering.png"
+        if not '.png' in save_name:
+            save_name += '.png'
+        if cam._mpi_get_rank() != -1:
+            write_bitmap(image,save_name)
+        
+
 def run_main():
     for co in ["--parallel", "--paste"]:
         if co in sys.argv: del sys.argv[sys.argv.index(co)]


--- a/yt/visualization/eps_writer.py	Sun Sep 04 23:23:46 2011 -0400
+++ b/yt/visualization/eps_writer.py	Fri Sep 09 10:35:09 2011 -0400
@@ -719,7 +719,7 @@
 def multiplot(ncol, nrow, yt_plots=None, images=None, xranges=None,
               yranges=None, xlabels=None, ylabels=None, colorbars=None,
               shrink_cb=0.95, figsize=(8,8), margins=(0,0), titles=None,
-              savefig=None, yt_nocbar=False, bare_axes=False,
+              savefig=None, format="eps", yt_nocbar=False, bare_axes=False,
               cb_flags=None):
     r"""Convenience routine to create a multi-panel figure from yt plots or
     JPEGs.  The images are first placed from the origin, and then
@@ -756,6 +756,8 @@
         Titles that are placed in textboxes in each panel.
     savefig : string
         Name of the saved file without the extension.
+    format : string
+        File format of the figure. eps or pdf accepted.
     yt_nocbar : boolean
         Flag to indicate whether or not colorbars are created.
     bare_axes : boolean
@@ -908,7 +910,7 @@
                                        shrink=shrink_cb)
 
     if savefig != None:
-        d.save_fig(savefig)
+        d.save_fig(savefig, format=format)
 
     return d
 


http://bitbucket.org/yt_analysis/yt/changeset/9b4c0d8b94d2/
changeset:   9b4c0d8b94d2
branch:      yt
user:        MatthewTurk
date:        2011-09-09 16:35:19
summary:     Merging GDF changes
affected #:  6 files (-1 bytes)

--- a/yt/frontends/gdf/api.py	Fri Sep 09 10:35:09 2011 -0400
+++ b/yt/frontends/gdf/api.py	Fri Sep 09 10:35:19 2011 -0400
@@ -29,14 +29,15 @@
 """
 
 from .data_structures import \
-      ChomboGrid, \
-      ChomboHierarchy, \
-      ChomboStaticOutput
+      GDFGrid, \
+      GDFHierarchy, \
+      GDFStaticOutput
 
 from .fields import \
-      ChomboFieldContainer, \
-      ChomboFieldInfo, \
-      add_chombo_field
+      GDFFieldContainer, \
+      GDFFieldInfo, \
+      add_gdf_field
 
 from .io import \
-      IOHandlerChomboHDF5
+      IOHandlerGDFHDF5
+


--- a/yt/frontends/gdf/data_structures.py	Fri Sep 09 10:35:09 2011 -0400
+++ b/yt/frontends/gdf/data_structures.py	Fri Sep 09 10:35:19 2011 -0400
@@ -24,6 +24,9 @@
   along with this program.  If not, see <http://www.gnu.org/licenses/>.
 """
 
+import h5py
+import numpy as na
+import weakref
 from yt.funcs import *
 from yt.data_objects.grid_patch import \
            AMRGridPatch
@@ -33,6 +36,7 @@
            StaticOutput
 
 from .fields import GDFFieldContainer
+import pdb
 
 class GDFGrid(AMRGridPatch):
     _id_offset = 0
@@ -58,6 +62,7 @@
             self.dds = na.array((RE-LE)/self.ActiveDimensions)
         if self.pf.dimensionality < 2: self.dds[1] = 1.0
         if self.pf.dimensionality < 3: self.dds[2] = 1.0
+        # pdb.set_trace()
         self.data['dx'], self.data['dy'], self.data['dz'] = self.dds
 
 class GDFHierarchy(AMRHierarchy):
@@ -66,6 +71,7 @@
     
     def __init__(self, pf, data_style='grid_data_format'):
         self.parameter_file = weakref.proxy(pf)
+        self.data_style = data_style
         # for now, the hierarchy file is the parameter file!
         self.hierarchy_filename = self.parameter_file.parameter_filename
         self.directory = os.path.dirname(self.hierarchy_filename)
@@ -78,8 +84,7 @@
         pass
 
     def _detect_fields(self):
-        ncomp = int(self._fhandle['/'].attrs['num_components'])
-        self.field_list = [c[1] for c in self._fhandle['/'].attrs.listitems()[-ncomp:]]
+        self.field_list = self._fhandle['field_types'].keys()
     
     def _setup_classes(self):
         dd = self._get_data_reader_dict()
@@ -87,9 +92,7 @@
         self.object_types.sort()
 
     def _count_grids(self):
-        self.num_grids = 0
-        for lev in self._levels:
-            self.num_grids += self._fhandle[lev]['Processors'].len()
+        self.num_grids = self._fhandle['/grid_parent_id'].shape[0]
         
     def _parse_hierarchy(self):
         f = self._fhandle # shortcut
@@ -98,24 +101,22 @@
         # 'Chombo_global'
         levels = f.listnames()[1:]
         self.grids = []
-        i = 0
-        for lev in levels:
-            level_number = int(re.match('level_(\d+)',lev).groups()[0])
-            boxes = f[lev]['boxes'].value
-            dx = f[lev].attrs['dx']
-            for level_id, box in enumerate(boxes):
-                si = na.array([box['lo_%s' % ax] for ax in 'ijk'])
-                ei = na.array([box['hi_%s' % ax] for ax in 'ijk'])
-                pg = self.grid(len(self.grids),self,level=level_number,
-                               start = si, stop = ei)
-                self.grids.append(pg)
-                self.grids[-1]._level_id = level_id
-                self.grid_left_edge[i] = dx*si.astype(self.float_type)
-                self.grid_right_edge[i] = dx*(ei.astype(self.float_type) + 1)
-                self.grid_particle_count[i] = 0
-                self.grid_dimensions[i] = ei - si + 1
-                i += 1
+        for i, grid in enumerate(f['data'].keys()):
+            self.grids.append(self.grid(i, self, f['grid_level'][i],
+                                        f['grid_left_index'][i],
+                                        f['grid_dimensions'][i]))
+            self.grids[-1]._level_id = f['grid_level'][i]
+
+        dx = (self.parameter_file.domain_right_edge-
+              self.parameter_file.domain_left_edge)/self.parameter_file.domain_dimensions
+        dx = dx/self.parameter_file.refine_by**(f['grid_level'][:])
+
+        self.grid_left_edge = self.parameter_file.domain_left_edge + dx*f['grid_left_index'][:]
+        self.grid_dimensions = f['grid_dimensions'][:]
+        self.grid_right_edge = self.grid_left_edge + dx*self.grid_dimensions
+        self.grid_particle_count = f['grid_particle_count'][:]
         self.grids = na.array(self.grids, dtype='object')
+        # pdb.set_trace()
 
     def _populate_grid_objects(self):
         for g in self.grids:
@@ -147,11 +148,9 @@
     def __init__(self, filename, data_style='grid_data_format',
                  storage_filename = None):
         StaticOutput.__init__(self, filename, data_style)
-        self._handle = h5py.File(self.filename, "r")
         self.storage_filename = storage_filename
+        self.filename = filename
         self.field_info = self._fieldinfo_class()
-        self._handle.close()
-        del self._handle
         
     def _set_units(self):
         """
@@ -163,21 +162,25 @@
             self._parse_parameter_file()
         self.time_units['1'] = 1
         self.units['1'] = 1.0
-        self.units['unitary'] = 1.0 / (self.domain_right_edge - self.domain_right_edge).max()
+        self.units['unitary'] = 1.0 / (self.domain_right_edge - self.domain_left_edge).max()
         seconds = 1
         self.time_units['years'] = seconds / (365*3600*24.0)
         self.time_units['days']  = seconds / (3600*24.0)
         # This should be improved.
+        self._handle = h5py.File(self.parameter_filename, "r")
         for field_name in self._handle["/field_types"]:
-            self.units[field_name] = self._handle["/%s/field_to_cgs" % field_name]
-
+            self.units[field_name] = self._handle["/field_types/%s" % field_name].attrs['field_to_cgs']
+        del self._handle
+        
     def _parse_parameter_file(self):
+        self._handle = h5py.File(self.parameter_filename, "r")
         sp = self._handle["/simulation_parameters"].attrs
         self.domain_left_edge = sp["domain_left_edge"][:]
         self.domain_right_edge = sp["domain_right_edge"][:]
-        self.refine_by = sp["refine_by"][:]
-        self.dimensionality = sp["dimensionality"][:]
-        self.current_time = sp["current_time"][:]
+        self.domain_dimensions = sp["domain_dimensions"][:]
+        self.refine_by = sp["refine_by"]
+        self.dimensionality = sp["dimensionality"]
+        self.current_time = sp["current_time"]
         self.unique_identifier = sp["unique_identifier"]
         self.cosmological_simulation = sp["cosmological_simulation"]
         if sp["num_ghost_zones"] != 0: raise RuntimeError
@@ -191,7 +194,8 @@
         else:
             self.current_redshift = self.omega_lambda = self.omega_matter = \
                 self.hubble_constant = self.cosmological_simulation = 0.0
-        
+        del self._handle
+            
     @classmethod
     def _is_valid(self, *args, **kwargs):
         try:
@@ -202,4 +206,6 @@
             pass
         return False
 
-
+    def __repr__(self):
+        return self.basename.rsplit(".", 1)[0]
+        


--- a/yt/frontends/gdf/fields.py	Fri Sep 09 10:35:09 2011 -0400
+++ b/yt/frontends/gdf/fields.py	Fri Sep 09 10:35:19 2011 -0400
@@ -1,5 +1,5 @@
 """
-Chombo-specific fields
+GDF-specific fields
 
 Author: J. S. Oishi <jsoishi at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
@@ -32,82 +32,45 @@
     ValidateGridType
 import yt.data_objects.universal_fields
 
-class ChomboFieldContainer(CodeFieldInfoContainer):
+class GDFFieldContainer(CodeFieldInfoContainer):
     _shared_state = {}
     _field_list = {}
-ChomboFieldInfo = ChomboFieldContainer()
-add_chombo_field = ChomboFieldInfo.add_field
+GDFFieldInfo = GDFFieldContainer()
+add_gdf_field = GDFFieldInfo.add_field
 
-add_field = add_chombo_field
+add_field = add_gdf_field
 
 add_field("density", function=lambda a,b: None, take_log=True,
           validators = [ValidateDataField("density")],
           units=r"\rm{g}/\rm{cm}^3")
 
-ChomboFieldInfo["density"]._projected_units =r"\rm{g}/\rm{cm}^2"
+GDFFieldInfo["density"]._projected_units =r"\rm{g}/\rm{cm}^2"
 
-add_field("X-momentum", function=lambda a,b: None, take_log=False,
-          validators = [ValidateDataField("X-Momentum")],
-          units=r"",display_name=r"B_x")
-ChomboFieldInfo["X-momentum"]._projected_units=r""
+add_field("specific_energy", function=lambda a,b: None, take_log=True,
+          validators = [ValidateDataField("specific_energy")],
+          units=r"\rm{erg}/\rm{g}")
 
-add_field("Y-momentum", function=lambda a,b: None, take_log=False,
-          validators = [ValidateDataField("Y-Momentum")],
-          units=r"",display_name=r"B_y")
-ChomboFieldInfo["Y-momentum"]._projected_units=r""
+add_field("velocity_x", function=lambda a,b: None, take_log=True,
+          validators = [ValidateDataField("velocity_x")],
+          units=r"\rm{cm}/\rm{s}")
 
-add_field("Z-momentum", function=lambda a,b: None, take_log=False,
-          validators = [ValidateDataField("Z-Momentum")],
-          units=r"",display_name=r"B_z")
-ChomboFieldInfo["Z-momentum"]._projected_units=r""
+add_field("velocity_y", function=lambda a,b: None, take_log=True,
+          validators = [ValidateDataField("velocity_y")],
+          units=r"\rm{cm}/\rm{s}")
 
-add_field("X-magnfield", function=lambda a,b: None, take_log=False,
-          validators = [ValidateDataField("X-Magnfield")],
-          units=r"",display_name=r"B_x")
-ChomboFieldInfo["X-magnfield"]._projected_units=r""
+add_field("velocity_z", function=lambda a,b: None, take_log=True,
+          validators = [ValidateDataField("velocity_z")],
+          units=r"\rm{cm}/\rm{s}")
 
-add_field("Y-magnfield", function=lambda a,b: None, take_log=False,
-          validators = [ValidateDataField("Y-Magnfield")],
-          units=r"",display_name=r"B_y")
-ChomboFieldInfo["Y-magnfield"]._projected_units=r""
+add_field("mag_field_x", function=lambda a,b: None, take_log=True,
+          validators = [ValidateDataField("mag_field_x")],
+          units=r"\rm{cm}/\rm{s}")
 
-add_field("Z-magnfield", function=lambda a,b: None, take_log=False,
-          validators = [ValidateDataField("Z-Magnfield")],
-          units=r"",display_name=r"B_z")
-ChomboFieldInfo["Z-magnfield"]._projected_units=r""
+add_field("mag_field_y", function=lambda a,b: None, take_log=True,
+          validators = [ValidateDataField("mag_field_y")],
+          units=r"\rm{cm}/\rm{s}")
 
-def _MagneticEnergy(field,data):
-    return (data["X-magnfield"]**2 +
-            data["Y-magnfield"]**2 +
-            data["Z-magnfield"]**2)/2.
-add_field("MagneticEnergy", function=_MagneticEnergy, take_log=True,
-          units=r"",display_name=r"B^2/8\pi")
-ChomboFieldInfo["MagneticEnergy"]._projected_units=r""
-
-def _xVelocity(field, data):
-    """generate x-velocity from x-momentum and density
-
-    """
-    return data["X-momentum"]/data["density"]
-add_field("x-velocity",function=_xVelocity, take_log=False,
-          units=r'\rm{cm}/\rm{s}')
-
-def _yVelocity(field,data):
-    """generate y-velocity from y-momentum and density
-
-    """
-    #try:
-    #    return data["xvel"]
-    #except KeyError:
-    return data["Y-momentum"]/data["density"]
-add_field("y-velocity",function=_yVelocity, take_log=False,
-          units=r'\rm{cm}/\rm{s}')
-
-def _zVelocity(field,data):
-    """generate z-velocity from z-momentum and density
-
-    """
-    return data["Z-momentum"]/data["density"]
-add_field("z-velocity",function=_zVelocity, take_log=False,
-          units=r'\rm{cm}/\rm{s}')
+add_field("mag_field_z", function=lambda a,b: None, take_log=True,
+          validators = [ValidateDataField("mag_field_z")],
+          units=r"\rm{cm}/\rm{s}")
     


--- a/yt/frontends/gdf/io.py	Fri Sep 09 10:35:09 2011 -0400
+++ b/yt/frontends/gdf/io.py	Fri Sep 09 10:35:19 2011 -0400
@@ -25,44 +25,48 @@
 """
 from yt.utilities.io_handler import \
            BaseIOHandler
+import h5py
 
-class IOHandlerChomboHDF5(BaseIOHandler):
-    _data_style = "chombo_hdf5"
+class IOHandlerGDFHDF5(BaseIOHandler):
+    _data_style = "grid_data_format"
     _offset_string = 'data:offsets=0'
     _data_string = 'data:datatype=0'
 
     def _field_dict(self,fhandle):
-        ncomp = int(fhandle['/'].attrs['num_components'])
-        temp =  fhandle['/'].attrs.listitems()[-ncomp:]
-        val, keys = zip(*temp)
-        val = [int(re.match('component_(\d+)',v).groups()[0]) for v in val]
+        keys = fhandle['field_types'].keys()
+        val = fhandle['field_types'].keys()
+        # ncomp = int(fhandle['/'].attrs['num_components'])
+        # temp =  fhandle['/'].attrs.listitems()[-ncomp:]
+        # val, keys = zip(*temp)
+        # val = [int(re.match('component_(\d+)',v).groups()[0]) for v in val]
         return dict(zip(keys,val))
         
     def _read_field_names(self,grid):
         fhandle = h5py.File(grid.filename,'r')
-        ncomp = int(fhandle['/'].attrs['num_components'])
-
-        return [c[1] for c in f['/'].attrs.listitems()[-ncomp:]]
+        return fhandle['field_types'].keys()
     
     def _read_data_set(self,grid,field):
         fhandle = h5py.File(grid.hierarchy.hierarchy_filename,'r')
+        return fhandle['/data/grid_%010i/'%grid.id+field][:]
+        # field_dict = self._field_dict(fhandle)
+        # lstring = 'level_%i' % grid.Level
+        # lev = fhandle[lstring]
+        # dims = grid.ActiveDimensions
+        # boxsize = dims.prod()
+        
+        # grid_offset = lev[self._offset_string][grid._level_id]
+        # start = grid_offset+field_dict[field]*boxsize
+        # stop = start + boxsize
+        # data = lev[self._data_string][start:stop]
 
-        field_dict = self._field_dict(fhandle)
-        lstring = 'level_%i' % grid.Level
-        lev = fhandle[lstring]
-        dims = grid.ActiveDimensions
-        boxsize = dims.prod()
-        
-        grid_offset = lev[self._offset_string][grid._level_id]
-        start = grid_offset+field_dict[field]*boxsize
-        stop = start + boxsize
-        data = lev[self._data_string][start:stop]
-
-        return data.reshape(dims, order='F')
+        # return data.reshape(dims, order='F')
                                           
 
     def _read_data_slice(self, grid, field, axis, coord):
         sl = [slice(None), slice(None), slice(None)]
         sl[axis] = slice(coord, coord + 1)
-        return self._read_data_set(grid,field)[sl]
+        fhandle = h5py.File(grid.hierarchy.hierarchy_filename,'r')
+        return fhandle['/data/grid_%010i/'%grid.id+field][:][sl]
 
+    # return self._read_data_set(grid,field)[sl]
+


--- a/yt/frontends/setup.py	Fri Sep 09 10:35:09 2011 -0400
+++ b/yt/frontends/setup.py	Fri Sep 09 10:35:19 2011 -0400
@@ -6,6 +6,7 @@
     config = Configuration('frontends',parent_package,top_path)
     config.make_config_py() # installs __config__.py
     #config.make_svn_version_py()
+    config.add_subpackage("gdf")
     config.add_subpackage("chombo")
     config.add_subpackage("enzo")
     config.add_subpackage("flash")


--- a/yt/mods.py	Fri Sep 09 10:35:09 2011 -0400
+++ b/yt/mods.py	Fri Sep 09 10:35:19 2011 -0400
@@ -77,6 +77,9 @@
 from yt.frontends.chombo.api import \
     ChomboStaticOutput, ChomboFieldInfo, add_chombo_field
 
+from yt.frontends.gdf.api import \
+    GDFStaticOutput, GDFFieldInfo, add_gdf_field
+
 from yt.frontends.art.api import \
     ARTStaticOutput, ARTFieldInfo, add_art_field
 


http://bitbucket.org/yt_analysis/yt/changeset/df38ff10e4e2/
changeset:   df38ff10e4e2
branch:      deliberate_fields
user:        MatthewTurk
date:        2011-09-09 16:50:04
summary:     Merging GDF changes into deliberate fields
affected #:  10 files (-1 bytes)

--- a/yt/convenience.py	Fri Sep 09 10:33:52 2011 -0400
+++ b/yt/convenience.py	Fri Sep 09 10:50:04 2011 -0400
@@ -32,8 +32,7 @@
 from yt.funcs import *
 from yt.config import ytcfg
 from yt.utilities.parameter_file_storage import \
-    output_type_registry, \
-    EnzoRunDatabase
+    output_type_registry
 
 def all_pfs(basedir='.', skip=None, max_depth=1, name_spec="*.hierarchy", **kwargs):
     """
@@ -90,15 +89,6 @@
     if len(candidates) == 1:
         return output_type_registry[candidates[0]](*args, **kwargs)
     if len(candidates) == 0:
-        if ytcfg.get("yt", "enzo_db") != '' \
-           and len(args) == 1 \
-           and isinstance(args[0], types.StringTypes):
-            erdb = EnzoRunDatabase()
-            fn = erdb.find_uuid(args[0])
-            n = "EnzoStaticOutput"
-            if n in output_type_registry \
-               and output_type_registry[n]._is_valid(fn):
-                return output_type_registry[n](fn)
         mylog.error("Couldn't figure out output type for %s", args[0])
         return None
     mylog.error("Multiple output type candidates for %s:", args[0])


--- a/yt/data_objects/static_output.py	Fri Sep 09 10:33:52 2011 -0400
+++ b/yt/data_objects/static_output.py	Fri Sep 09 10:50:04 2011 -0400
@@ -108,6 +108,8 @@
         return self.basename
 
     def _hash(self):
+        if "MetaDataDatasetUUID" in self.parameters:
+            return self["MetaDataDatasetUUID"]
         s = "%s;%s;%s" % (self.basename,
             self.current_time, self.unique_identifier)
         try:


--- a/yt/frontends/gdf/api.py	Fri Sep 09 10:33:52 2011 -0400
+++ b/yt/frontends/gdf/api.py	Fri Sep 09 10:50:04 2011 -0400
@@ -29,13 +29,15 @@
 """
 
 from .data_structures import \
-      ChomboGrid, \
-      ChomboHierarchy, \
-      ChomboStaticOutput
+      GDFGrid, \
+      GDFHierarchy, \
+      GDFStaticOutput
 
 from .fields import \
-      ChomboFieldInfo, \
-      add_chombo_field
+      GDFFieldContainer, \
+      GDFFieldInfo, \
+      add_gdf_field
 
 from .io import \
-      IOHandlerChomboHDF5
+      IOHandlerGDFHDF5
+


--- a/yt/frontends/gdf/data_structures.py	Fri Sep 09 10:33:52 2011 -0400
+++ b/yt/frontends/gdf/data_structures.py	Fri Sep 09 10:50:04 2011 -0400
@@ -68,6 +68,7 @@
     
     def __init__(self, pf, data_style='grid_data_format'):
         self.parameter_file = weakref.proxy(pf)
+        self.data_style = data_style
         # for now, the hierarchy file is the parameter file!
         self.hierarchy_filename = self.parameter_file.parameter_filename
         self.directory = os.path.dirname(self.hierarchy_filename)
@@ -80,8 +81,7 @@
         pass
 
     def _detect_fields(self):
-        ncomp = int(self._fhandle['/'].attrs['num_components'])
-        self.field_list = [c[1] for c in self._fhandle['/'].attrs.listitems()[-ncomp:]]
+        self.field_list = self._fhandle['field_types'].keys()
     
     def _setup_classes(self):
         dd = self._get_data_reader_dict()
@@ -89,9 +89,7 @@
         self.object_types.sort()
 
     def _count_grids(self):
-        self.num_grids = 0
-        for lev in self._levels:
-            self.num_grids += self._fhandle[lev]['Processors'].len()
+        self.num_grids = self._fhandle['/grid_parent_id'].shape[0]
         
     def _parse_hierarchy(self):
         f = self._fhandle # shortcut
@@ -100,23 +98,20 @@
         # 'Chombo_global'
         levels = f.listnames()[1:]
         self.grids = []
-        i = 0
-        for lev in levels:
-            level_number = int(re.match('level_(\d+)',lev).groups()[0])
-            boxes = f[lev]['boxes'].value
-            dx = f[lev].attrs['dx']
-            for level_id, box in enumerate(boxes):
-                si = na.array([box['lo_%s' % ax] for ax in 'ijk'])
-                ei = na.array([box['hi_%s' % ax] for ax in 'ijk'])
-                pg = self.grid(len(self.grids),self,level=level_number,
-                               start = si, stop = ei)
-                self.grids.append(pg)
-                self.grids[-1]._level_id = level_id
-                self.grid_left_edge[i] = dx*si.astype(self.float_type)
-                self.grid_right_edge[i] = dx*(ei.astype(self.float_type) + 1)
-                self.grid_particle_count[i] = 0
-                self.grid_dimensions[i] = ei - si + 1
-                i += 1
+        for i, grid in enumerate(f['data'].keys()):
+            self.grids.append(self.grid(i, self, f['grid_level'][i],
+                                        f['grid_left_index'][i],
+                                        f['grid_dimensions'][i]))
+            self.grids[-1]._level_id = f['grid_level'][i]
+
+        dx = (self.parameter_file.domain_right_edge-
+              self.parameter_file.domain_left_edge)/self.parameter_file.domain_dimensions
+        dx = dx/self.parameter_file.refine_by**(f['grid_level'][:])
+
+        self.grid_left_edge = self.parameter_file.domain_left_edge + dx*f['grid_left_index'][:]
+        self.grid_dimensions = f['grid_dimensions'][:]
+        self.grid_right_edge = self.grid_left_edge + dx*self.grid_dimensions
+        self.grid_particle_count = f['grid_particle_count'][:]
         self.grids = na.array(self.grids, dtype='object')
 
     def _populate_grid_objects(self):
@@ -130,6 +125,9 @@
                 g1.Parent.append(g)
         self.max_level = self.grid_levels.max()
 
+    def _setup_unknown_fields(self):
+        pass
+
     def _setup_derived_fields(self):
         self.derived_field_list = []
 
@@ -147,10 +145,8 @@
     def __init__(self, filename, data_style='grid_data_format',
                  storage_filename = None):
         StaticOutput.__init__(self, filename, data_style)
-        self._handle = h5py.File(self.filename, "r")
         self.storage_filename = storage_filename
-        self._handle.close()
-        del self._handle
+        self.filename = filename
         
     def _set_units(self):
         """
@@ -162,21 +158,26 @@
             self._parse_parameter_file()
         self.time_units['1'] = 1
         self.units['1'] = 1.0
-        self.units['unitary'] = 1.0 / (self.domain_right_edge - self.domain_right_edge).max()
+        self.units['unitary'] = 1.0 / (self.domain_right_edge - self.domain_left_edge).max()
         seconds = 1
         self.time_units['years'] = seconds / (365*3600*24.0)
         self.time_units['days']  = seconds / (3600*24.0)
         # This should be improved.
+        self._handle = h5py.File(self.parameter_filename, "r")
         for field_name in self._handle["/field_types"]:
-            self.units[field_name] = self._handle["/%s/field_to_cgs" % field_name]
+            self.units[field_name] = self._handle["/field_types/%s" % field_name].attrs['field_to_cgs']
+        self._handle.close()
+        del self._handle
 
     def _parse_parameter_file(self):
+        self._handle = h5py.File(self.parameter_filename, "r")
         sp = self._handle["/simulation_parameters"].attrs
         self.domain_left_edge = sp["domain_left_edge"][:]
         self.domain_right_edge = sp["domain_right_edge"][:]
-        self.refine_by = sp["refine_by"][:]
-        self.dimensionality = sp["dimensionality"][:]
-        self.current_time = sp["current_time"][:]
+        self.domain_dimensions = sp["domain_dimensions"][:]
+        self.refine_by = sp["refine_by"]
+        self.dimensionality = sp["dimensionality"]
+        self.current_time = sp["current_time"]
         self.unique_identifier = sp["unique_identifier"]
         self.cosmological_simulation = sp["cosmological_simulation"]
         if sp["num_ghost_zones"] != 0: raise RuntimeError
@@ -190,6 +191,8 @@
         else:
             self.current_redshift = self.omega_lambda = self.omega_matter = \
                 self.hubble_constant = self.cosmological_simulation = 0.0
+        self._handle.close()
+        del self._handle
         
     @classmethod
     def _is_valid(self, *args, **kwargs):
@@ -201,4 +204,6 @@
             pass
         return False
 
-
+    def __repr__(self):
+        return self.basename.rsplit(".", 1)[0]
+        


--- a/yt/frontends/gdf/fields.py	Fri Sep 09 10:33:52 2011 -0400
+++ b/yt/frontends/gdf/fields.py	Fri Sep 09 10:50:04 2011 -0400
@@ -1,5 +1,5 @@
 """
-Chombo-specific fields
+GDF-specific fields
 
 Author: J. S. Oishi <jsoishi at gmail.com>
 Affiliation: KIPAC/SLAC/Stanford
@@ -30,82 +30,48 @@
     ValidateDataField, \
     ValidateProperty, \
     ValidateSpatial, \
-    ValidateGridType
+    ValidateGridType, \
+    NullFunc, \
+    TranslationFunc
 import yt.data_objects.universal_fields
 
-ChomboFieldInfo = FieldInfoContainer.create_with_fallback(FieldInfo)
-add_chombo_field = ChomboFieldInfo.add_field
+GDFFieldInfo = FieldInfoContainer.create_with_fallback(FieldInfo)
+add_field = GDFFieldInfo.add_field
 
-add_field = add_chombo_field
+KnownGDFFields = FieldInfoContainer()
+add_gdf_field = KnownGDFFields.add_field
 
-add_field("density", function=lambda a,b: None, take_log=True,
+
+add_gdf_field("density", function=NullFunc, take_log=True,
           validators = [ValidateDataField("density")],
-          units=r"\rm{g}/\rm{cm}^3")
+          units=r"\rm{g}/\rm{cm}^3",
+          projected_units =r"\rm{g}/\rm{cm}^2")
 
-ChomboFieldInfo["density"]._projected_units =r"\rm{g}/\rm{cm}^2"
+add_gdf_field("specific_energy", function=NullFunc, take_log=True,
+          validators = [ValidateDataField("specific_energy")],
+          units=r"\rm{erg}/\rm{g}")
 
-add_field("X-momentum", function=lambda a,b: None, take_log=False,
-          validators = [ValidateDataField("X-Momentum")],
-          units=r"",display_name=r"B_x")
-ChomboFieldInfo["X-momentum"]._projected_units=r""
+add_gdf_field("velocity_x", function=NullFunc, take_log=True,
+          validators = [ValidateDataField("velocity_x")],
+          units=r"\rm{cm}/\rm{s}")
 
-add_field("Y-momentum", function=lambda a,b: None, take_log=False,
-          validators = [ValidateDataField("Y-Momentum")],
-          units=r"",display_name=r"B_y")
-ChomboFieldInfo["Y-momentum"]._projected_units=r""
+add_gdf_field("velocity_y", function=NullFunc, take_log=True,
+          validators = [ValidateDataField("velocity_y")],
+          units=r"\rm{cm}/\rm{s}")
 
-add_field("Z-momentum", function=lambda a,b: None, take_log=False,
-          validators = [ValidateDataField("Z-Momentum")],
-          units=r"",display_name=r"B_z")
-ChomboFieldInfo["Z-momentum"]._projected_units=r""
+add_gdf_field("velocity_z", function=NullFunc, take_log=True,
+          validators = [ValidateDataField("velocity_z")],
+          units=r"\rm{cm}/\rm{s}")
 
-add_field("X-magnfield", function=lambda a,b: None, take_log=False,
-          validators = [ValidateDataField("X-Magnfield")],
-          units=r"",display_name=r"B_x")
-ChomboFieldInfo["X-magnfield"]._projected_units=r""
+add_gdf_field("mag_field_x", function=NullFunc, take_log=True,
+          validators = [ValidateDataField("mag_field_x")],
+          units=r"\rm{cm}/\rm{s}")
 
-add_field("Y-magnfield", function=lambda a,b: None, take_log=False,
-          validators = [ValidateDataField("Y-Magnfield")],
-          units=r"",display_name=r"B_y")
-ChomboFieldInfo["Y-magnfield"]._projected_units=r""
+add_gdf_field("mag_field_y", function=NullFunc, take_log=True,
+          validators = [ValidateDataField("mag_field_y")],
+          units=r"\rm{cm}/\rm{s}")
 
-add_field("Z-magnfield", function=lambda a,b: None, take_log=False,
-          validators = [ValidateDataField("Z-Magnfield")],
-          units=r"",display_name=r"B_z")
-ChomboFieldInfo["Z-magnfield"]._projected_units=r""
+add_gdf_field("mag_field_z", function=NullFunc, take_log=True,
+          validators = [ValidateDataField("mag_field_z")],
+          units=r"\rm{cm}/\rm{s}")
 
-def _MagneticEnergy(field,data):
-    return (data["X-magnfield"]**2 +
-            data["Y-magnfield"]**2 +
-            data["Z-magnfield"]**2)/2.
-add_field("MagneticEnergy", function=_MagneticEnergy, take_log=True,
-          units=r"",display_name=r"B^2/8\pi")
-ChomboFieldInfo["MagneticEnergy"]._projected_units=r""
-
-def _xVelocity(field, data):
-    """generate x-velocity from x-momentum and density
-
-    """
-    return data["X-momentum"]/data["density"]
-add_field("x-velocity",function=_xVelocity, take_log=False,
-          units=r'\rm{cm}/\rm{s}')
-
-def _yVelocity(field,data):
-    """generate y-velocity from y-momentum and density
-
-    """
-    #try:
-    #    return data["xvel"]
-    #except KeyError:
-    return data["Y-momentum"]/data["density"]
-add_field("y-velocity",function=_yVelocity, take_log=False,
-          units=r'\rm{cm}/\rm{s}')
-
-def _zVelocity(field,data):
-    """generate z-velocity from z-momentum and density
-
-    """
-    return data["Z-momentum"]/data["density"]
-add_field("z-velocity",function=_zVelocity, take_log=False,
-          units=r'\rm{cm}/\rm{s}')
-    


--- a/yt/frontends/gdf/io.py	Fri Sep 09 10:33:52 2011 -0400
+++ b/yt/frontends/gdf/io.py	Fri Sep 09 10:50:04 2011 -0400
@@ -25,44 +25,48 @@
 """
 from yt.utilities.io_handler import \
            BaseIOHandler
+import h5py
 
-class IOHandlerChomboHDF5(BaseIOHandler):
-    _data_style = "chombo_hdf5"
+class IOHandlerGDFHDF5(BaseIOHandler):
+    _data_style = "grid_data_format"
     _offset_string = 'data:offsets=0'
     _data_string = 'data:datatype=0'
 
     def _field_dict(self,fhandle):
-        ncomp = int(fhandle['/'].attrs['num_components'])
-        temp =  fhandle['/'].attrs.listitems()[-ncomp:]
-        val, keys = zip(*temp)
-        val = [int(re.match('component_(\d+)',v).groups()[0]) for v in val]
+        keys = fhandle['field_types'].keys()
+        val = fhandle['field_types'].keys()
+        # ncomp = int(fhandle['/'].attrs['num_components'])
+        # temp =  fhandle['/'].attrs.listitems()[-ncomp:]
+        # val, keys = zip(*temp)
+        # val = [int(re.match('component_(\d+)',v).groups()[0]) for v in val]
         return dict(zip(keys,val))
         
     def _read_field_names(self,grid):
         fhandle = h5py.File(grid.filename,'r')
-        ncomp = int(fhandle['/'].attrs['num_components'])
-
-        return [c[1] for c in f['/'].attrs.listitems()[-ncomp:]]
+        return fhandle['field_types'].keys()
     
     def _read_data_set(self,grid,field):
         fhandle = h5py.File(grid.hierarchy.hierarchy_filename,'r')
+        return fhandle['/data/grid_%010i/'%grid.id+field][:]
+        # field_dict = self._field_dict(fhandle)
+        # lstring = 'level_%i' % grid.Level
+        # lev = fhandle[lstring]
+        # dims = grid.ActiveDimensions
+        # boxsize = dims.prod()
+        
+        # grid_offset = lev[self._offset_string][grid._level_id]
+        # start = grid_offset+field_dict[field]*boxsize
+        # stop = start + boxsize
+        # data = lev[self._data_string][start:stop]
 
-        field_dict = self._field_dict(fhandle)
-        lstring = 'level_%i' % grid.Level
-        lev = fhandle[lstring]
-        dims = grid.ActiveDimensions
-        boxsize = dims.prod()
-        
-        grid_offset = lev[self._offset_string][grid._level_id]
-        start = grid_offset+field_dict[field]*boxsize
-        stop = start + boxsize
-        data = lev[self._data_string][start:stop]
-
-        return data.reshape(dims, order='F')
+        # return data.reshape(dims, order='F')
                                           
 
     def _read_data_slice(self, grid, field, axis, coord):
         sl = [slice(None), slice(None), slice(None)]
         sl[axis] = slice(coord, coord + 1)
-        return self._read_data_set(grid,field)[sl]
+        fhandle = h5py.File(grid.hierarchy.hierarchy_filename,'r')
+        return fhandle['/data/grid_%010i/'%grid.id+field][:][sl]
 
+    # return self._read_data_set(grid,field)[sl]
+


--- a/yt/frontends/setup.py	Fri Sep 09 10:33:52 2011 -0400
+++ b/yt/frontends/setup.py	Fri Sep 09 10:50:04 2011 -0400
@@ -6,6 +6,7 @@
     config = Configuration('frontends',parent_package,top_path)
     config.make_config_py() # installs __config__.py
     #config.make_svn_version_py()
+    config.add_subpackage("gdf")
     config.add_subpackage("chombo")
     config.add_subpackage("enzo")
     config.add_subpackage("flash")


--- a/yt/mods.py	Fri Sep 09 10:33:52 2011 -0400
+++ b/yt/mods.py	Fri Sep 09 10:50:04 2011 -0400
@@ -77,6 +77,9 @@
 from yt.frontends.chombo.api import \
     ChomboStaticOutput, ChomboFieldInfo, add_chombo_field
 
+from yt.frontends.gdf.api import \
+    GDFStaticOutput, GDFFieldInfo, add_gdf_field
+
 from yt.frontends.art.api import \
     ARTStaticOutput, ARTFieldInfo, add_art_field
 


--- a/yt/utilities/parameter_file_storage.py	Fri Sep 09 10:33:52 2011 -0400
+++ b/yt/utilities/parameter_file_storage.py	Fri Sep 09 10:50:04 2011 -0400
@@ -32,6 +32,8 @@
 from yt.utilities.parallel_tools.parallel_analysis_interface import \
     parallel_simple_proxy
 
+import yt.utilities.peewee as peewee
+
 output_type_registry = {}
 _field_names = ('hash', 'bn', 'fp', 'tt', 'ctid', 'class_name', 'last_seen')
 
@@ -48,6 +50,20 @@
     def __repr__(self):
         return "%s" % self.name
 
+_field_spec = dict(
+    dset_uuid = peewee.TextField(),
+    output_type = peewee.TextField(),
+    pf_path = peewee.TextField(),
+    creation_time = peewee.IntegerField(),
+    last_seen_time = peewee.IntegerField(),
+    simulation_uuid = peewee.TextField(),
+    redshift = peewee.FloatField(),
+    time = peewee.FloatField(),
+    topgrid0 = peewee.IntegerField(),
+    topgrid1 = peewee.IntegerField(),
+    topgrid2 = peewee.IntegerField(),
+)
+
 class ParameterFileStore(object):
     """
     This class is designed to be a semi-persistent storage for parameter
@@ -62,6 +78,7 @@
     _distributed = True
     _processing = False
     _owner = 0
+    conn = None
 
     def __new__(cls, *p, **k):
         self = object.__new__(cls, *p, **k)
@@ -77,7 +94,6 @@
         if ytcfg.getboolean("yt", "StoreParameterFiles"):
             self._read_only = False
             self.init_db()
-            self._records = self.read_db()
         else:
             self._read_only = True
             self._records = {}
@@ -93,9 +109,18 @@
             if not os.path.isdir(dbdir): os.mkdir(dbdir)
         except OSError:
             raise NoParameterShelf()
-        open(dbn, 'ab') # make sure it exists, allow to close
-        # Now we read in all our records and return them
-        # these will be broadcast
+        self.conn = peewee.SqliteDatabase(dbn)
+        class SimulationOutputsMeta:
+            database = self.conn
+            db_table = "simulation_outputs"
+        _field_spec["Meta"] = SimulationOutputsMeta
+        self.output_model = type(
+            "SimulationOutputs",
+            (peewee.Model,),
+            _field_spec,
+        )
+        self.output_model._meta.pk_name = "dset_uuid"
+        self.conn.connect()
 
     def _get_db_name(self):
         base_file_name = ytcfg.get("yt", "ParameterFileStore")
@@ -105,39 +130,23 @@
 
     def get_pf_hash(self, hash):
         """ This returns a parameter file based on a hash. """
-        return self._convert_pf(self._records[hash])
+        output = self.output_model.get(dset_uuid = hash)
+        return self._convert_pf(output)
 
-    def get_pf_ctid(self, ctid):
-        """ This returns a parameter file based on a CurrentTimeIdentifier. """
-        for h in self._records:
-            if self._records[h]['ctid'] == ctid:
-                return self._convert_pf(self._records[h])
-
-    def _adapt_pf(self, pf):
-        """ This turns a parameter file into a CSV entry. """
-        return dict(bn=pf.basename,
-                    fp=pf.fullpath,
-                    tt=pf.current_time,
-                    ctid=pf.unique_identifier,
-                    class_name=pf.__class__.__name__,
-                    last_seen=pf._instantiated)
-
-    def _convert_pf(self, pf_dict):
-        """ This turns a CSV entry into a parameter file. """
-        bn = pf_dict['bn']
-        fp = pf_dict['fp']
-        fn = os.path.join(fp, bn)
-        class_name = pf_dict['class_name']
-        if class_name not in output_type_registry:
-            raise UnknownStaticOutputType(class_name)
+    def _convert_pf(self, inst):
+        """ This turns a model into a parameter file. """
+        fn = inst.pf_path
+        if inst.output_type not in output_type_registry:
+            raise UnknownStaticOutputType(inst.output_type)
         mylog.info("Checking %s", fn)
         if os.path.exists(fn):
-            pf = output_type_registry[class_name](os.path.join(fp, bn))
+            pf = output_type_registry[inst.output_type](fn)
         else:
             raise IOError
         # This next one is to ensure that we manually update the last_seen
         # record *now*, for during write_out.
-        self._records[pf._hash()]['last_seen'] = pf._instantiated
+        self.output_model.update(last_seen_time = pf._instantiated).where(
+            dset_uuid = inst.dset_uuid).execute()
         return pf
 
     def check_pf(self, pf):
@@ -146,86 +155,34 @@
         recorded in the storage unit.  In doing so, it will update path
         and "last_seen" information.
         """
-        hash = pf._hash()
-        if hash not in self._records:
+        q = self.output_model.select().where(dset_uuid = pf._hash())
+        q.execute()
+        if q.count() == 0:
             self.insert_pf(pf)
             return
-        pf_dict = self._records[hash]
-        self._records[hash]['last_seen'] = pf._instantiated
-        if pf_dict['bn'] != pf.basename \
-          or pf_dict['fp'] != pf.fullpath:
-            self.wipe_hash(hash)
-            self.insert_pf(pf)
+        # Otherwise we update
+        self.output_model.update(
+            last_seen_time = pf._instantiated,
+            pf_path = os.path.join(pf.basename, pf.fullpath)
+        ).where(
+            dset_uuid = pf._hash()).execute(
+        )
 
     def insert_pf(self, pf):
         """ This will insert a new *pf* and flush the database to disk. """
-        self._records[pf._hash()] = self._adapt_pf(pf)
-        self.flush_db()
-
-    def wipe_hash(self, hash):
-        """
-        This removes a *hash* corresponding to a parameter file from the
-        storage.
-        """
-        if hash not in self._records: return
-        del self._records[hash]
-        self.flush_db()
-
-    def flush_db(self):
-        """ This flushes the storage to disk. """
-        if self._read_only: return
-        self._write_out()
-        self.read_db()
-
-    def get_recent(self, n=10):
-        recs = sorted(self._records.values(), key=lambda a: -a['last_seen'])[:n]
-        return recs
-
-    @parallel_simple_proxy
-    def _write_out(self):
-        if self._read_only: return
-        fn = self._get_db_name()
-        f = open("%s.tmp" % fn, 'wb')
-        w = csv.DictWriter(f, _field_names)
-        maxn = ytcfg.getint("yt","MaximumStoredPFs") # number written
-        for h,v in islice(sorted(self._records.items(),
-                          key=lambda a: -a[1]['last_seen']), 0, maxn):
-            v['hash'] = h
-            w.writerow(v)
-        f.close()
-        os.rename("%s.tmp" % fn, fn)
-
-    @parallel_simple_proxy
-    def read_db(self):
-        """ This will read the storage device from disk. """
-        f = open(self._get_db_name(), 'rb')
-        vals = csv.DictReader(f, _field_names)
-        db = {}
-        for v in vals:
-            db[v.pop('hash')] = v
-            if v['last_seen'] is None:
-                v['last_seen'] = 0.0
-            else: v['last_seen'] = float(v['last_seen'])
-        return db
-
-class ObjectStorage(object):
-    pass
-
-class EnzoRunDatabase(object):
-    conn = None
-
-    def __init__(self, path = None):
-        if path is None:
-            path = ytcfg.get("yt", "enzo_db")
-            if len(path) == 0: raise Runtime
-        import sqlite3
-        self.conn = sqlite3.connect(path)
-
-    def find_uuid(self, u):
-        cursor = self.conn.execute(
-            "select pf_path from enzo_outputs where dset_uuid = '%s'" % (
-                u))
-        # It's a 'unique key'
-        result = cursor.fetchone()
-        if result is None: return None
-        return result[0]
+        q = self.output_model.insert(
+                    dset_uuid = pf._hash(),
+                    output_type = pf.__class__.__name__,
+                    pf_path = os.path.join(
+                        pf.fullpath, pf.basename),
+                    creation_time = pf.parameters.get(
+                        "CurrentTimeIdentifier", 0), # Get os.stat
+                    last_seen_time = pf._instantiated,
+                    simulation_uuid = pf.parameters.get(
+                        "SimulationUUID", ""), # NULL
+                    redshift = pf.current_redshift,
+                    time = pf.current_time,
+                    topgrid0 = pf.domain_dimensions[0],
+                    topgrid1 = pf.domain_dimensions[1],
+                    topgrid2 = pf.domain_dimensions[2])
+        q.execute()


--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/yt/utilities/peewee.py	Fri Sep 09 10:50:04 2011 -0400
@@ -0,0 +1,1425 @@
+#     (\
+#     (  \  /(o)\     caw!
+#     (   \/  ()/ /)
+#      (   `;.))'".) 
+#       `(/////.-'
+#    =====))=))===() 
+#      ///'       
+#     //
+#    '
+from datetime import datetime
+import logging
+import os
+import re
+import time
+
+try:
+    import sqlite3
+except ImportError:
+    sqlite3 = None
+
+try:
+    import psycopg2
+except ImportError:
+    psycopg2 = None
+
+try:
+    import MySQLdb as mysql
+except ImportError:
+    mysql = None
+
+if sqlite3 is None and psycopg2 is None and mysql is None:
+    raise ImproperlyConfigured('Either sqlite3, psycopg2 or MySQLdb must be installed')
+
+
+DATABASE_NAME = os.environ.get('PEEWEE_DATABASE', 'peewee.db')
+logger = logging.getLogger('peewee.logger')
+
+
+class BaseAdapter(object):
+    """
+    The various subclasses of `BaseAdapter` provide a bridge between the high-
+    level `Database` abstraction and the underlying python libraries like
+    psycopg2.  It also provides a way to unify the pythonic field types with
+    the underlying column types used by the database engine.
+    
+    The `BaseAdapter` provides two types of mappings:    
+    - mapping between filter operations and their database equivalents
+    - mapping between basic field types and their database column types
+    
+    The `BaseAdapter` also is the mechanism used by the `Database` class to:
+    - handle connections with the database
+    - extract information from the database cursor
+    """
+    operations = {'eq': '= %s'}
+    interpolation = '%s'
+    
+    def get_field_types(self):
+        field_types = {
+            'integer': 'INTEGER',
+            'float': 'REAL',
+            'decimal': 'NUMERIC',
+            'string': 'VARCHAR',
+            'text': 'TEXT',
+            'datetime': 'DATETIME',
+            'primary_key': 'INTEGER',
+            'foreign_key': 'INTEGER',
+            'boolean': 'SMALLINT',
+        }
+        field_types.update(self.get_field_overrides())
+        return field_types
+    
+    def get_field_overrides(self):
+        return {}
+    
+    def connect(self, database, **kwargs):
+        raise NotImplementedError
+    
+    def close(self, conn):
+        conn.close()
+    
+    def lookup_cast(self, lookup, value):
+        if lookup in ('contains', 'icontains'):
+            return '%%%s%%' % value
+        elif lookup in ('startswith', 'istartswith'):
+            return '%s%%' % value
+        return value
+    
+    def last_insert_id(self, cursor, model):
+        return cursor.lastrowid
+    
+    def rows_affected(self, cursor):
+        return cursor.rowcount
+
+
+class SqliteAdapter(BaseAdapter):
+    # note the sqlite library uses a non-standard interpolation string
+    operations = {
+        'lt': '< ?',
+        'lte': '<= ?',
+        'gt': '> ?',
+        'gte': '>= ?',
+        'eq': '= ?',
+        'ne': '!= ?', # watch yourself with this one
+        'in': 'IN (%s)', # special-case to list q-marks
+        'is': 'IS ?',
+        'icontains': "LIKE ? ESCAPE '\\'", # surround param with %'s
+        'contains': "GLOB ?", # surround param with *'s
+        'istartswith': "LIKE ? ESCAPE '\\'",
+        'startswith': "GLOB ?",
+    }
+    interpolation = '?'
+    
+    def connect(self, database, **kwargs):
+        return sqlite3.connect(database, **kwargs)
+    
+    def lookup_cast(self, lookup, value):
+        if lookup == 'contains':
+            return '*%s*' % value
+        elif lookup == 'icontains':
+            return '%%%s%%' % value
+        elif lookup == 'startswith':
+            return '%s*' % value
+        elif lookup == 'istartswith':
+            return '%s%%' % value
+        return value
+
+
+class PostgresqlAdapter(BaseAdapter):
+    operations = {
+        'lt': '< %s',
+        'lte': '<= %s',
+        'gt': '> %s',
+        'gte': '>= %s',
+        'eq': '= %s',
+        'ne': '!= %s', # watch yourself with this one
+        'in': 'IN (%s)', # special-case to list q-marks
+        'is': 'IS %s',
+        'icontains': 'ILIKE %s', # surround param with %'s
+        'contains': 'LIKE %s', # surround param with *'s
+        'istartswith': 'ILIKE %s',
+        'startswith': 'LIKE %s',
+    }
+        
+    def connect(self, database, **kwargs):
+        return psycopg2.connect(database=database, **kwargs)
+    
+    def get_field_overrides(self):
+        return {
+            'primary_key': 'SERIAL',
+            'datetime': 'TIMESTAMP'
+        }
+    
+    def last_insert_id(self, cursor, model):
+        cursor.execute("SELECT CURRVAL('\"%s_%s_seq\"')" % (
+            model._meta.db_table, model._meta.pk_name))
+        return cursor.fetchone()[0]
+    
+
+class MySQLAdapter(BaseAdapter):
+    operations = {
+        'lt': '< %s',
+        'lte': '<= %s',
+        'gt': '> %s',
+        'gte': '>= %s',
+        'eq': '= %s',
+        'ne': '!= %s', # watch yourself with this one
+        'in': 'IN (%s)', # special-case to list q-marks
+        'is': 'IS %s',
+        'icontains': 'LIKE %s', # surround param with %'s
+        'contains': 'LIKE BINARY %s', # surround param with *'s
+        'istartswith': 'LIKE %s',
+        'startswith': 'LIKE BINARY %s',
+    }
+
+    def connect(self, database, **kwargs):
+        return mysql.connect(db=database, **kwargs)
+
+    def get_field_overrides(self):
+        return {
+            'primary_key': 'integer AUTO_INCREMENT',
+            'boolean': 'bool',
+            'float': 'double precision',
+            'text': 'longtext',
+        }
+
+
+class Database(object):
+    """
+    A high-level api for working with the supported database engines.  `Database`
+    provides a wrapper around some of the functions performed by the `Adapter`,
+    in addition providing support for:
+    - execution of SQL queries
+    - creating and dropping tables and indexes
+    """
+    def __init__(self, adapter, database, **connect_kwargs):
+        self.adapter = adapter
+        self.database = database
+        self.connect_kwargs = connect_kwargs
+    
+    def connect(self):
+        self.conn = self.adapter.connect(self.database, **self.connect_kwargs)
+    
+    def close(self):
+        self.adapter.close(self.conn)
+    
+    def execute(self, sql, params=None, commit=False):
+        cursor = self.conn.cursor()
+        res = cursor.execute(sql, params or ())
+        if commit:
+            self.conn.commit()
+        logger.debug((sql, params))
+        return cursor
+    
+    def last_insert_id(self, cursor, model):
+        return self.adapter.last_insert_id(cursor, model)
+    
+    def rows_affected(self, cursor):
+        return self.adapter.rows_affected(cursor)
+    
+    def column_for_field(self, db_field):
+        try:
+            return self.adapter.get_field_types()[db_field]
+        except KeyError:
+            raise AttributeError('Unknown field type: "%s", valid types are: %s' % \
+                db_field, ', '.join(self.adapter.get_field_types().keys())
+            )
+    
+    def create_table(self, model_class):
+        framing = "CREATE TABLE %s (%s);"
+        columns = []
+
+        for field in model_class._meta.fields.values():
+            columns.append(field.to_sql())
+
+        query = framing % (model_class._meta.db_table, ', '.join(columns))
+        
+        self.execute(query, commit=True)
+    
+    def create_index(self, model_class, field, unique=False):
+        framing = 'CREATE %(unique)s INDEX %(model)s_%(field)s ON %(model)s(%(field)s);'
+        
+        if field not in model_class._meta.fields:
+            raise AttributeError(
+                'Field %s not on model %s' % (field, model_class)
+            )
+        
+        unique_expr = ternary(unique, 'UNIQUE', '')
+        
+        query = framing % {
+            'unique': unique_expr,
+            'model': model_class._meta.db_table,
+            'field': field
+        }
+        
+        self.execute(query, commit=True)
+    
+    def drop_table(self, model_class, fail_silently=False):
+        framing = fail_silently and 'DROP TABLE IF EXISTS %s;' or 'DROP TABLE %s;'
+        self.execute(framing % model_class._meta.db_table, commit=True)
+    
+    def get_indexes_for_table(self, table):
+        raise NotImplementedError
+
+
+class SqliteDatabase(Database):
+    def __init__(self, database, **connect_kwargs):
+        super(SqliteDatabase, self).__init__(SqliteAdapter(), database, **connect_kwargs)
+    
+    def get_indexes_for_table(self, table):
+        res = self.execute('PRAGMA index_list(%s);' % table)
+        rows = sorted([(r[1], r[2] == 1) for r in res.fetchall()])
+        return rows
+
+
+class PostgresqlDatabase(Database):
+    def __init__(self, database, **connect_kwargs):
+        super(PostgresqlDatabase, self).__init__(PostgresqlAdapter(), database, **connect_kwargs)
+    
+    def get_indexes_for_table(self, table):
+        res = self.execute("""
+            SELECT c2.relname, i.indisprimary, i.indisunique
+            FROM pg_catalog.pg_class c, pg_catalog.pg_class c2, pg_catalog.pg_index i
+            WHERE c.relname = %s AND c.oid = i.indrelid AND i.indexrelid = c2.oid
+            ORDER BY i.indisprimary DESC, i.indisunique DESC, c2.relname""", (table,))
+        return sorted([(r[0], r[1]) for r in res.fetchall()])
+
+class MySQLDatabase(Database):
+    def __init__(self, database, **connect_kwargs):
+        super(MySQLDatabase, self).__init__(MySQLAdapter(), database, **connect_kwargs)
+    
+    def get_indexes_for_table(self, table):
+        res = self.execute('SHOW INDEXES IN %s;' % table)
+        rows = sorted([(r[2], r[1] == 0) for r in res.fetchall()])
+        return rows
+
+
+class QueryResultWrapper(object):
+    """
+    Provides an iterator over the results of a raw Query, additionally doing
+    two things:
+    - converts rows from the database into model instances
+    - ensures that multiple iterations do not result in multiple queries
+    """
+    def __init__(self, model, cursor):
+        self.model = model
+        self.cursor = cursor
+        self._result_cache = []
+        self._populated = False
+    
+    def model_from_rowset(self, model_class, row_dict):
+        instance = model_class()
+        for attr, value in row_dict.iteritems():
+            if attr in instance._meta.fields:
+                field = instance._meta.fields[attr]
+                setattr(instance, attr, field.python_value(value))
+            else:
+                setattr(instance, attr, value)
+        return instance    
+    
+    def _row_to_dict(self, row, result_cursor):
+        return dict((result_cursor.description[i][0], value)
+            for i, value in enumerate(row))
+    
+    def __iter__(self):
+        if not self._populated:
+            return self
+        else:
+            return iter(self._result_cache)
+    
+    def next(self):
+        row = self.cursor.fetchone()
+        if row:
+            row_dict = self._row_to_dict(row, self.cursor)
+            instance = self.model_from_rowset(self.model, row_dict)
+            self._result_cache.append(instance)
+            return instance
+        else:
+            self._populated = True
+            raise StopIteration
+
+
+# create
+class DoesNotExist(Exception):
+    pass
+
+
+# semantic wrappers for ordering the results of a `SelectQuery`
+def asc(f):
+    return (f, 'ASC')
+
+def desc(f):
+    return (f, 'DESC')
+
+# wrappers for performing aggregation in a `SelectQuery`
+def Count(f, alias='count'):
+    return ('COUNT', f, alias)
+
+def Max(f, alias='max'):
+    return ('MAX', f, alias)
+
+def Min(f, alias='min'):
+    return ('MIN', f, alias)
+
+# decorator for query methods to indicate that they change the state of the
+# underlying data structures
+def returns_clone(func):
+    def inner(self, *args, **kwargs):
+        clone = self.clone()
+        res = func(clone, *args, **kwargs)
+        return clone
+    return inner
+
+# helpers
+ternary = lambda cond, t, f: (cond and [t] or [f])[0]
+
+
+class Node(object):
+    def __init__(self, connector='AND'):
+        self.connector = connector
+        self.children = []
+        self.negated = False
+    
+    def connect(self, rhs, connector):
+        if isinstance(rhs, Q):
+            if connector == self.connector:
+                self.children.append(rhs)
+                return self
+            else:
+                p = Node(connector)
+                p.children = [self, rhs]
+                return p
+        elif isinstance(rhs, Node):
+            p = Node(connector)
+            p.children = [self, rhs]
+            return p
+    
+    def __or__(self, rhs):
+        return self.connect(rhs, 'OR')
+
+    def __and__(self, rhs):
+        return self.connect(rhs, 'AND')
+    
+    def __invert__(self):
+        self.negated = not self.negated
+        return self
+    
+    def __unicode__(self):
+        query = []
+        nodes = []
+        for child in self.children:
+            if isinstance(child, Q):
+                query.append(unicode(child))
+            elif isinstance(child, Node):
+                nodes.append('(%s)' % unicode(child))
+        query.extend(nodes)
+        connector = ' %s ' % self.connector
+        query = connector.join(query)
+        if self.negated:
+            query = 'NOT %s' % query
+        return query
+    
+
+class Q(object):
+    def __init__(self, **kwargs):
+        self.query = kwargs
+        self.parent = None
+        self.negated = False
+    
+    def connect(self, connector):
+        if self.parent is None:
+            self.parent = Node(connector)
+            self.parent.children.append(self)
+    
+    def __or__(self, rhs):
+        self.connect('OR')
+        return self.parent | rhs
+    
+    def __and__(self, rhs):
+        self.connect('AND')
+        return self.parent & rhs
+    
+    def __invert__(self):
+        self.negated = not self.negated
+        return self
+    
+    def __unicode__(self):
+        bits = ['%s = %s' % (k, v) for k, v in self.query.items()]
+        if len(self.query.items()) > 1:
+            connector = ' AND '
+            expr = '(%s)' % connector.join(bits)
+        else:
+            expr = bits[0]
+        if self.negated:
+            expr = 'NOT %s' % expr
+        return expr
+
+
+def parseq(*args, **kwargs):
+    node = Node()
+    
+    for piece in args:
+        if isinstance(piece, (Q, Node)):
+            node.children.append(piece)
+        else:
+            raise TypeError('Unknown object: %s', piece)
+
+    if kwargs:
+        node.children.append(Q(**kwargs))
+
+    return node
+
+
+class EmptyResultException(Exception):
+    pass
+
+
+class BaseQuery(object):
+    query_separator = '__'
+    requires_commit = True
+    force_alias = False
+    
+    def __init__(self, model):
+        self.model = model
+        self.query_context = model
+        self.database = self.model._meta.database
+        self.operations = self.database.adapter.operations
+        self.interpolation = self.database.adapter.interpolation
+        
+        self._dirty = True
+        self._where = {}
+        self._joins = []
+    
+    def clone(self):
+        raise NotImplementedError
+    
+    def lookup_cast(self, lookup, value):
+        return self.database.adapter.lookup_cast(lookup, value)
+    
+    def parse_query_args(self, model, **query):
+        parsed = {}
+        for lhs, rhs in query.iteritems():
+            if self.query_separator in lhs:
+                lhs, op = lhs.rsplit(self.query_separator, 1)
+            else:
+                op = 'eq'
+            
+            try:
+                field = model._meta.get_field_by_name(lhs)
+            except AttributeError:
+                field = model._meta.get_related_field_by_name(lhs)
+                if field is None:
+                    raise
+                if isinstance(rhs, Model):
+                    rhs = rhs.get_pk()
+            
+            if op == 'in':
+                if isinstance(rhs, SelectQuery):
+                    lookup_value = rhs
+                    operation = 'IN (%s)'
+                else:
+                    if not rhs:
+                        raise EmptyResultException
+                    lookup_value = [field.db_value(o) for o in rhs]
+                    operation = self.operations[op] % \
+                        (','.join([self.interpolation for v in lookup_value]))
+            elif op == 'is':
+                if rhs is not None:
+                    raise ValueError('__is lookups only accept None')
+                operation = 'IS NULL'
+                lookup_value = []
+            else:
+                lookup_value = field.db_value(rhs)
+                operation = self.operations[op]
+            
+            parsed[field.name] = (operation, self.lookup_cast(op, lookup_value))
+        
+        return parsed
+    
+    @returns_clone
+    def where(self, *args, **kwargs):
+        self._where.setdefault(self.query_context, [])
+        self._where[self.query_context].append(parseq(*args, **kwargs))
+
+    @returns_clone
+    def join(self, model, join_type=None, on=None):
+        if self.query_context._meta.rel_exists(model):
+            self._joins.append((model, join_type, on))
+            self.query_context = model
+        else:
+            raise AttributeError('No foreign key found between %s and %s' % \
+                (self.query_context.__name__, model.__name__))
+
+    @returns_clone
+    def switch(self, model):
+        if model == self.model:
+            self.query_context = model
+            return
+
+        for klass, join_type, on in self._joins:
+            if model == klass:
+                self.query_context = model
+                return
+        raise AttributeError('You must JOIN on %s' % model.__name__)
+    
+    def use_aliases(self):
+        return len(self._joins) > 0 or self.force_alias
+
+    def combine_field(self, alias, field_name):
+        if alias:
+            return '%s.%s' % (alias, field_name)
+        return field_name
+    
+    def compile_where(self):
+        alias_count = 0
+        alias_map = {}
+
+        alias_required = self.use_aliases()
+
+        joins = list(self._joins)
+        if self._where or len(joins):
+            joins.insert(0, (self.model, None, None))
+        
+        where_with_alias = []
+        where_data = []
+        computed_joins = []
+
+        for i, (model, join_type, on) in enumerate(joins):
+            if alias_required:
+                alias_count += 1
+                alias_map[model] = 't%d' % alias_count
+            else:
+                alias_map[model] = ''
+            
+            if i > 0:
+                from_model = joins[i-1][0]
+                field = from_model._meta.get_related_field_for_model(model, on)
+                if field:
+                    left_field = field.name
+                    right_field = model._meta.pk_name
+                else:
+                    field = from_model._meta.get_reverse_related_field_for_model(model, on)
+                    left_field = from_model._meta.pk_name
+                    right_field = field.name
+                
+                if join_type is None:
+                    if field.null and model not in self._where:
+                        join_type = 'LEFT OUTER'
+                    else:
+                        join_type = 'INNER'
+                
+                computed_joins.append(
+                    '%s JOIN %s AS %s ON %s = %s' % (
+                        join_type,
+                        model._meta.db_table,
+                        alias_map[model],
+                        self.combine_field(alias_map[from_model], left_field),
+                        self.combine_field(alias_map[model], right_field),
+                    )
+                )
+        
+        for (model, join_type, on) in joins:
+            if model in self._where:
+                for node in self._where[model]:
+                    query, data = self.parse_node(node, model, alias_map)
+                    where_with_alias.append(query)
+                    where_data.extend(data)
+        
+        return computed_joins, where_with_alias, where_data, alias_map
+    
+    def convert_where_to_params(self, where_data):
+        flattened = []
+        for clause in where_data:
+            if isinstance(clause, (tuple, list)):
+                flattened.extend(clause)
+            else:
+                flattened.append(clause)
+        return flattened
+    
+    def parse_node(self, node, model, alias_map):
+        query = []
+        query_data = []
+        nodes = []
+        for child in node.children:
+            if isinstance(child, Q):
+                parsed, data = self.parse_q(child, model, alias_map)
+                query.append(parsed)
+                query_data.extend(data)
+            elif isinstance(child, Node):
+                parsed, data = self.parse_node(child, model, alias_map)
+                query.append('(%s)' % parsed)
+                query_data.extend(data)
+        query.extend(nodes)
+        connector = ' %s ' % node.connector
+        query = connector.join(query)
+        if node.negated:
+            query = 'NOT (%s)' % query
+        return query, query_data
+    
+    def parse_q(self, q, model, alias_map):
+        query = []
+        query_data = []
+        parsed = self.parse_query_args(model, **q.query)
+        for (name, lookup) in parsed.iteritems():
+            operation, value = lookup
+            if isinstance(value, SelectQuery):
+                sql, value = self.convert_subquery(value)
+                operation = operation % sql
+
+            query_data.append(value)
+            
+            combined = self.combine_field(alias_map[model], name)
+            query.append('%s %s' % (combined, operation))
+        
+        if len(query) > 1:
+            query = '(%s)' % (' AND '.join(query))
+        else:
+            query = query[0]
+        
+        if q.negated:
+            query = 'NOT %s' % query
+        
+        return query, query_data
+
+    def convert_subquery(self, subquery):
+        subquery.query, orig_query = subquery.model._meta.pk_name, subquery.query
+        subquery.force_alias, orig_alias = True, subquery.force_alias
+        sql, data = subquery.sql()
+        subquery.query = orig_query
+        subquery.force_alias = orig_alias
+        return sql, data
+    
+    def raw_execute(self):
+        query, params = self.sql()
+        return self.database.execute(query, params, self.requires_commit)
+
+
+class RawQuery(BaseQuery):
+    def __init__(self, model, query, *params):
+        self._sql = query
+        self._params = list(params)
+        super(RawQuery, self).__init__(model)
+    
+    def sql(self):
+        return self._sql, self._params
+    
+    def execute(self):
+        return QueryResultWrapper(self.model, self.raw_execute())
+    
+    def join(self):
+        raise AttributeError('Raw queries do not support joining programmatically')
+    
+    def where(self):
+        raise AttributeError('Raw queries do not support querying programmatically')
+    
+    def switch(self):
+        raise AttributeError('Raw queries do not support switching contexts')
+    
+    def __iter__(self):
+        return self.execute()
+
+
+class SelectQuery(BaseQuery):
+    requires_commit = False
+    
+    def __init__(self, model, query=None):
+        self.query = query or '*'
+        self._group_by = []
+        self._having = []
+        self._order_by = []
+        self._pagination = None # return all by default
+        self._distinct = False
+        self._qr = None
+        super(SelectQuery, self).__init__(model)
+    
+    def clone(self):
+        query = SelectQuery(self.model, self.query)
+        query.query_context = self.query_context
+        query._group_by = list(self._group_by)
+        query._having = list(self._having)
+        query._order_by = list(self._order_by)
+        query._pagination = self._pagination and tuple(self._pagination) or None
+        query._distinct = self._distinct
+        query._qr = self._qr
+        query._where = dict(self._where)
+        query._joins = list(self._joins)
+        return query
+    
+    @returns_clone
+    def paginate(self, page_num, paginate_by=20):
+        self._pagination = (page_num, paginate_by)
+    
+    def count(self):
+        tmp_pagination = self._pagination
+        self._pagination = None
+        
+        tmp_query = self.query
+        
+        if self.use_aliases():
+            self.query = 'COUNT(t1.%s)' % (self.model._meta.pk_name)
+        else:
+            self.query = 'COUNT(%s)' % (self.model._meta.pk_name)
+        
+        res = self.database.execute(*self.sql())
+        
+        self.query = tmp_query
+        self._pagination = tmp_pagination
+        
+        return res.fetchone()[0]
+    
+    @returns_clone
+    def group_by(self, clause):
+        model = self.query_context
+        
+        if isinstance(clause, basestring):
+            fields = (clause,)
+        elif isinstance(clause, (list, tuple)):
+            fields = clause
+        elif issubclass(clause, Model):
+            model = clause
+            fields = clause._meta.get_field_names()
+        
+        self._group_by.append((model, fields))
+    
+    @returns_clone
+    def having(self, clause):
+        self._having.append(clause)
+    
+    @returns_clone
+    def distinct(self):
+        self._distinct = True
+    
+    @returns_clone
+    def order_by(self, field_or_string):
+        if isinstance(field_or_string, tuple):
+            field_or_string, ordering = field_or_string
+        else:
+            ordering = 'ASC'
+        
+        self._order_by.append(
+            (self.query_context, field_or_string, ordering)
+        )
+
+    def parse_select_query(self, alias_map):
+        if isinstance(self.query, basestring):
+            if self.query in ('*', self.model._meta.pk_name) and self.use_aliases():
+                return '%s.%s' % (alias_map[self.model], self.query)
+            return self.query
+        elif isinstance(self.query, dict):
+            qparts = []
+            aggregates = []
+            for model, cols in self.query.iteritems():
+                alias = alias_map.get(model, '')
+                for col in cols:
+                    if isinstance(col, tuple):
+                        func, col, col_alias = col
+                        aggregates.append('%s(%s) AS %s' % \
+                            (func, self.combine_field(alias, col), col_alias)
+                        )
+                    else:
+                        qparts.append(self.combine_field(alias, col))
+            return ', '.join(qparts + aggregates)
+        else:
+            raise TypeError('Unknown type encountered parsing select query')
+    
+    def sql(self):
+        joins, where, where_data, alias_map = self.compile_where()
+        
+        table = self.model._meta.db_table
+
+        params = []
+        group_by = []
+        
+        if self.use_aliases():
+            table = '%s AS %s' % (table, alias_map[self.model])
+            for model, clause in self._group_by:
+                alias = alias_map[model]
+                for field in clause:
+                    group_by.append(self.combine_field(alias, field))
+        else:
+            group_by = [c[1] for c in self._group_by]
+
+        parsed_query = self.parse_select_query(alias_map)
+        
+        if self._distinct:
+            sel = 'SELECT DISTINCT'
+        else:
+            sel = 'SELECT'
+        
+        select = '%s %s FROM %s' % (sel, parsed_query, table)
+        joins = '\n'.join(joins)
+        where = ' AND '.join(where)
+        group_by = ', '.join(group_by)
+        having = ' AND '.join(self._having)
+        
+        order_by = []
+        for piece in self._order_by:
+            model, field, ordering = piece
+            if self.use_aliases() and field in model._meta.fields:
+                field = '%s.%s' % (alias_map[model], field)
+            order_by.append('%s %s' % (field, ordering))
+        
+        pieces = [select]
+        
+        if joins:
+            pieces.append(joins)
+        if where:
+            pieces.append('WHERE %s' % where)
+            params.extend(self.convert_where_to_params(where_data))
+        
+        if group_by:
+            pieces.append('GROUP BY %s' % group_by)
+        if having:
+            pieces.append('HAVING %s' % having)
+        if order_by:
+            pieces.append('ORDER BY %s' % ', '.join(order_by))
+        if self._pagination:
+            page, paginate_by = self._pagination
+            if page > 0:
+                page -= 1
+            pieces.append('LIMIT %d OFFSET %d' % (paginate_by, page * paginate_by))
+        
+        return ' '.join(pieces), params
+    
+    def execute(self):
+        if self._dirty or not self._qr:
+            try:
+                self._qr = QueryResultWrapper(self.model, self.raw_execute())
+                self._dirty = False
+                return self._qr
+            except EmptyResultException:
+                return iter([])
+        else:
+            # call the __iter__ method directly
+            return iter(self._qr)
+    
+    def __iter__(self):
+        return self.execute()
+
+
+class UpdateQuery(BaseQuery):
+    def __init__(self, model, **kwargs):
+        self.update_query = kwargs
+        super(UpdateQuery, self).__init__(model)
+    
+    def clone(self):
+        query = UpdateQuery(self.model, **self.update_query)
+        query._where = dict(self._where)
+        query._joins = list(self._joins)
+        return query
+    
+    def parse_update(self):
+        sets = {}
+        for k, v in self.update_query.iteritems():
+            try:
+                field = self.model._meta.get_field_by_name(k)
+            except AttributeError:
+                field = self.model._meta.get_related_field_by_name(k)
+                if field is None:
+                    raise
+            
+            sets[field.name] = field.db_value(v)
+        
+        return sets
+    
+    def sql(self):
+        joins, where, where_data, alias_map = self.compile_where()
+        set_statement = self.parse_update()
+
+        params = []
+        update_params = []
+
+        for k, v in set_statement.iteritems():
+            params.append(v)
+            update_params.append('%s=%s' % (k, self.interpolation))
+        
+        update = 'UPDATE %s SET %s' % (
+            self.model._meta.db_table, ', '.join(update_params))
+        where = ' AND '.join(where)
+        
+        pieces = [update]
+        
+        if where:
+            pieces.append('WHERE %s' % where)
+            params.extend(self.convert_where_to_params(where_data))
+        
+        return ' '.join(pieces), params
+    
+    def join(self, *args, **kwargs):
+        raise AttributeError('Update queries do not support JOINs in sqlite')
+    
+    def execute(self):
+        result = self.raw_execute()
+        return self.database.rows_affected(result)
+
+
+class DeleteQuery(BaseQuery):
+    def clone(self):
+        query = DeleteQuery(self.model)
+        query._where = dict(self._where)
+        query._joins = list(self._joins)
+        return query
+    
+    def sql(self):
+        joins, where, where_data, alias_map = self.compile_where()
+
+        params = []
+        
+        delete = 'DELETE FROM %s' % (self.model._meta.db_table)
+        where = ' AND '.join(where)
+        
+        pieces = [delete]
+        
+        if where:
+            pieces.append('WHERE %s' % where)
+            params.extend(self.convert_where_to_params(where_data))
+        
+        return ' '.join(pieces), params
+    
+    def join(self, *args, **kwargs):
+        raise AttributeError('Update queries do not support JOINs in sqlite')
+    
+    def execute(self):
+        result = self.raw_execute()
+        return self.database.rows_affected(result)
+
+
+class InsertQuery(BaseQuery):
+    def __init__(self, model, **kwargs):
+        self.insert_query = kwargs
+        super(InsertQuery, self).__init__(model)
+    
+    def parse_insert(self):
+        cols = []
+        vals = []
+        for k, v in self.insert_query.iteritems():
+            field = self.model._meta.get_field_by_name(k)
+            cols.append(k)
+            vals.append(field.db_value(v))
+        
+        return cols, vals
+    
+    def sql(self):
+        cols, vals = self.parse_insert()
+        
+        insert = 'INSERT INTO %s (%s) VALUES (%s)' % (
+            self.model._meta.db_table,
+            ','.join(cols),
+            ','.join(self.interpolation for v in vals)
+        )
+        
+        return insert, vals
+    
+    def where(self, *args, **kwargs):
+        raise AttributeError('Insert queries do not support WHERE clauses')
+    
+    def join(self, *args, **kwargs):
+        raise AttributeError('Insert queries do not support JOINs')
+    
+    def execute(self):
+        result = self.raw_execute()
+        return self.database.last_insert_id(result, self.model)
+
+
+class Field(object):
+    db_field = ''
+    default = None
+    field_template = "%(column_type)s%(nullable)s"
+
+    def get_attributes(self):
+        return {}
+    
+    def __init__(self, null=False, db_index=False, *args, **kwargs):
+        self.null = null
+        self.db_index = db_index
+        self.attributes = self.get_attributes()
+        self.default = kwargs.get('default', None)
+        
+        kwargs['nullable'] = ternary(self.null, '', ' NOT NULL')
+        self.attributes.update(kwargs)
+    
+    def add_to_class(self, klass, name):
+        self.name = name
+        self.model = klass
+        setattr(klass, name, None)
+    
+    def render_field_template(self):
+        col_type = self.model._meta.database.column_for_field(self.db_field)
+        self.attributes['column_type'] = col_type
+        return self.field_template % self.attributes
+    
+    def to_sql(self):
+        rendered = self.render_field_template()
+        return '%s %s' % (self.name, rendered)
+    
+    def null_wrapper(self, value, default=None):
+        if (self.null and value is None) or default is None:
+            return value
+        return value or default
+    
+    def db_value(self, value):
+        return value
+    
+    def python_value(self, value):
+        return value
+    
+    def lookup_value(self, lookup_type, value):
+        return self.db_value(value)
+
+
+class CharField(Field):
+    db_field = 'string'
+    field_template = '%(column_type)s(%(max_length)d)%(nullable)s'
+    
+    def get_attributes(self):
+        return {'max_length': 255}
+    
+    def db_value(self, value):
+        if self.null and value is None:
+            return value
+        value = value or ''
+        return value[:self.attributes['max_length']]
+    
+    def lookup_value(self, lookup_type, value):
+        if lookup_type == 'contains':
+            return '*%s*' % self.db_value(value)
+        elif lookup_type == 'icontains':
+            return '%%%s%%' % self.db_value(value)
+        else:
+            return self.db_value(value)
+    
+
+class TextField(Field):
+    db_field = 'text'
+    
+    def db_value(self, value):
+        return self.null_wrapper(value, '')
+    
+    def lookup_value(self, lookup_type, value):
+        if lookup_type == 'contains':
+            return '*%s*' % self.db_value(value)
+        elif lookup_type == 'icontains':
+            return '%%%s%%' % self.db_value(value)
+        else:
+            return self.db_value(value)
+
+
+class DateTimeField(Field):
+    db_field = 'datetime'
+    
+    def python_value(self, value):
+        if isinstance(value, basestring):
+            value = value.rsplit('.', 1)[0]
+            return datetime(*time.strptime(value, '%Y-%m-%d %H:%M:%S')[:6])
+        return value
+
+
+class IntegerField(Field):
+    db_field = 'integer'
+    
+    def db_value(self, value):
+        return self.null_wrapper(value, 0)
+    
+    def python_value(self, value):
+        if value is not None:
+            return int(value)
+
+
+class BooleanField(IntegerField):
+    db_field = 'boolean'
+    
+    def db_value(self, value):
+        if value:
+            return 1
+        return 0
+    
+    def python_value(self, value):
+        return bool(value)
+
+
+class FloatField(Field):
+    db_field = 'float'
+    
+    def db_value(self, value):
+        return self.null_wrapper(value, 0.0)
+    
+    def python_value(self, value):
+        if value is not None:
+            return float(value)
+
+
+class PrimaryKeyField(IntegerField):
+    db_field = 'primary_key'
+    field_template = "%(column_type)s NOT NULL PRIMARY KEY"
+
+
+class ForeignRelatedObject(object):    
+    def __init__(self, to, name):
+        self.field_name = name
+        self.to = to
+        self.cache_name = '_cache_%s' % name
+    
+    def __get__(self, instance, instance_type=None):
+        if not getattr(instance, self.cache_name, None):
+            id = getattr(instance, self.field_name, 0)
+            qr = self.to.select().where(**{self.to._meta.pk_name: id}).execute()
+            setattr(instance, self.cache_name, qr.next())
+        return getattr(instance, self.cache_name)
+    
+    def __set__(self, instance, obj):
+        assert isinstance(obj, self.to), "Cannot assign %s, invalid type" % obj
+        setattr(instance, self.field_name, obj.get_pk())
+        setattr(instance, self.cache_name, obj)
+
+
+class ReverseForeignRelatedObject(object):
+    def __init__(self, related_model, name):
+        self.field_name = name
+        self.related_model = related_model
+    
+    def __get__(self, instance, instance_type=None):
+        query = {self.field_name: instance.get_pk()}
+        qr = self.related_model.select().where(**query)
+        return qr
+
+
+class ForeignKeyField(IntegerField):
+    db_field = 'foreign_key'
+    field_template = '%(column_type)s%(nullable)s REFERENCES %(to_table)s (%(to_pk)s)'
+    
+    def __init__(self, to, null=False, related_name=None, *args, **kwargs):
+        self.to = to
+        self.related_name = related_name
+        kwargs.update({
+            'to_table': to._meta.db_table,
+            'to_pk': to._meta.pk_name
+        })
+        super(ForeignKeyField, self).__init__(null=null, *args, **kwargs)
+    
+    def add_to_class(self, klass, name):
+        self.descriptor = name
+        self.name = name + '_id'
+        self.model = klass
+        
+        if self.related_name is None:
+            self.related_name = klass._meta.db_table + '_set'
+        
+        klass._meta.rel_fields[name] = self.name
+        setattr(klass, self.descriptor, ForeignRelatedObject(self.to, self.name))
+        setattr(klass, self.name, None)
+        
+        reverse_rel = ReverseForeignRelatedObject(klass, self.name)
+        setattr(self.to, self.related_name, reverse_rel)
+    
+    def lookup_value(self, lookup_type, value):
+        if isinstance(value, Model):
+            return value.get_pk()
+        return value or None
+    
+    def db_value(self, value):
+        if isinstance(value, Model):
+            return value.get_pk()
+        return value
+
+
+# define a default database object in the module scope
+database = SqliteDatabase(DATABASE_NAME)
+
+
+class BaseModelOptions(object):
+    def __init__(self, model_class, options=None):
+        # configurable options
+        options = options or {'database': database}
+        for k, v in options.items():
+            setattr(self, k, v)
+        
+        self.rel_fields = {}
+        self.fields = {}
+        self.model_class = model_class
+    
+    def get_field_names(self):
+        fields = [self.pk_name]
+        fields.extend([f for f in sorted(self.fields.keys()) if f != self.pk_name])
+        return fields
+    
+    def get_field_by_name(self, name):
+        if name in self.fields:
+            return self.fields[name]
+        raise AttributeError('Field named %s not found' % name)
+    
+    def get_related_field_by_name(self, name):
+        if name in self.rel_fields:
+            return self.fields[self.rel_fields[name]]
+    
+    def get_related_field_for_model(self, model, name=None):
+        for field in self.fields.values():
+            if isinstance(field, ForeignKeyField) and field.to == model:
+                if name is None or name == field.name or name == field.descriptor:
+                    return field
+    
+    def get_reverse_related_field_for_model(self, model, name=None):
+        for field in model._meta.fields.values():
+            if isinstance(field, ForeignKeyField) and field.to == self.model_class:
+                if name is None or name == field.name or name == field.descriptor:
+                    return field
+    
+    def rel_exists(self, model):
+        return self.get_related_field_for_model(model) or \
+               self.get_reverse_related_field_for_model(model)
+
+
+class BaseModel(type):
+    inheritable_options = ['database']
+    
+    def __new__(cls, name, bases, attrs):
+        cls = super(BaseModel, cls).__new__(cls, name, bases, attrs)
+
+        attr_dict = {}
+        meta = attrs.pop('Meta', None)
+        if meta:
+            attr_dict = meta.__dict__
+        
+        for b in bases:
+            base_meta = getattr(b, '_meta', None)
+            if not base_meta:
+                continue
+            
+            for (k, v) in base_meta.__dict__.items():
+                if k in cls.inheritable_options and k not in attr_dict:
+                    attr_dict[k] = v
+        
+        _meta = BaseModelOptions(cls, attr_dict)
+        
+        if not hasattr(_meta, 'db_table'):
+            _meta.db_table = re.sub('[^a-z]+', '_', cls.__name__.lower())
+
+        setattr(cls, '_meta', _meta)
+        
+        _meta.pk_name = None
+
+        for name, attr in cls.__dict__.items():
+            if isinstance(attr, Field):
+                attr.add_to_class(cls, name)
+                _meta.fields[attr.name] = attr
+                if isinstance(attr, PrimaryKeyField):
+                    _meta.pk_name = attr.name
+        
+        if _meta.pk_name is None:
+            _meta.pk_name = 'id'
+            pk = PrimaryKeyField()
+            pk.add_to_class(cls, _meta.pk_name)
+            _meta.fields[_meta.pk_name] = pk
+
+        _meta.model_name = cls.__name__
+                
+        if hasattr(cls, '__unicode__'):
+            setattr(cls, '__repr__', lambda self: '<%s: %s>' % (
+                _meta.model_name, self.__unicode__()))
+
+        exception_class = type('%sDoesNotExist' % _meta.model_name, (DoesNotExist,), {})
+        cls.DoesNotExist = exception_class
+        
+        return cls
+
+
+class Model(object):
+    __metaclass__ = BaseModel
+    
+    def __init__(self, *args, **kwargs):
+        for k, v in kwargs.items():
+            setattr(self, k, v)
+    
+    def __eq__(self, other):
+        return other.__class__ == self.__class__ and \
+               self.get_pk() and \
+               other.get_pk() == self.get_pk()
+    
+    def get_field_dict(self):
+        def get_field_val(field):
+            field_value = getattr(self, field.name)
+            if not self.get_pk() and field_value is None and field.default is not None:
+                if callable(field.default):
+                    field_value = field.default()
+                else:
+                    field_value = field.default
+                setattr(self, field.name, field_value)
+            return (field.name, field_value)
+        
+        pairs = map(get_field_val, self._meta.fields.values())
+        return dict(pairs)
+    
+    @classmethod
+    def create_table(cls):
+        cls._meta.database.create_table(cls)
+        
+        for field_name, field_obj in cls._meta.fields.items():
+            if isinstance(field_obj, PrimaryKeyField):
+                cls._meta.database.create_index(cls, field_obj.name, True)
+            elif isinstance(field_obj, ForeignKeyField):
+                cls._meta.database.create_index(cls, field_obj.name)
+            elif field_obj.db_index:
+                cls._meta.database.create_index(cls, field_obj.name)
+    
+    @classmethod
+    def drop_table(cls, fail_silently=False):
+        cls._meta.database.drop_table(cls, fail_silently)
+    
+    @classmethod
+    def select(cls, query=None):
+        return SelectQuery(cls, query)
+    
+    @classmethod
+    def update(cls, **query):
+        return UpdateQuery(cls, **query)
+    
+    @classmethod
+    def insert(cls, **query):
+        return InsertQuery(cls, **query)
+    
+    @classmethod
+    def delete(cls, **query):
+        return DeleteQuery(cls, **query)
+    
+    @classmethod
+    def raw(cls, sql, *params):
+        return RawQuery(cls, sql, *params)
+
+    @classmethod
+    def create(cls, **query):
+        inst = cls(**query)
+        inst.save()
+        return inst
+
+    @classmethod
+    def get_or_create(cls, **query):
+        try:
+            inst = cls.get(**query)
+        except cls.DoesNotExist:
+            inst = cls.create(**query)
+        return inst
+    
+    @classmethod            
+    def get(cls, *args, **kwargs):
+        query = cls.select().where(*args, **kwargs).paginate(1, 1)
+        try:
+            return query.execute().next()
+        except StopIteration:
+            raise cls.DoesNotExist('instance matching query does not exist:\nSQL: %s\nPARAMS: %s' % (
+                query.sql()
+            ))
+    
+    def get_pk(self):
+        return getattr(self, self._meta.pk_name, None)
+    
+    def save(self):
+        field_dict = self.get_field_dict()
+        field_dict.pop(self._meta.pk_name)
+        if self.get_pk():
+            update = self.update(
+                **field_dict
+            ).where(**{self._meta.pk_name: self.get_pk()})
+            update.execute()
+        else:
+            insert = self.insert(**field_dict)
+            new_pk = insert.execute()
+            setattr(self, self._meta.pk_name, new_pk)


http://bitbucket.org/yt_analysis/yt/changeset/140b2ced6729/
changeset:   140b2ced6729
branch:      deliberate_fields
user:        MatthewTurk
date:        2011-09-09 17:35:36
summary:     Tests for Enzo datasets run
affected #:  6 files (-1 bytes)

--- a/yt/data_objects/field_info_container.py	Fri Sep 09 10:50:04 2011 -0400
+++ b/yt/data_objects/field_info_container.py	Fri Sep 09 11:35:36 2011 -0400
@@ -76,6 +76,11 @@
         obj.fallback = fallback
         return obj
 
+    def __contains__(self, key):
+        if dict.__contains__(self, key): return True
+        if self.fallback is None: return False
+        return self.fallback.has_key(key)
+
 def TranslationFunc(field_name):
     def _TranslationFunc(field, data):
         return data[field]
@@ -179,7 +184,7 @@
 
     def __missing__(self, item):
         FI = getattr(self.pf, "field_info", FieldInfo)
-        if FI.has_key(item) and FI[item]._function.func_name != '<lambda>':
+        if FI.has_key(item) and FI[item]._function.func_name != 'NullFunc':
             try:
                 vv = FI[item](self)
             except NeedsGridType as exc:


--- a/yt/data_objects/universal_fields.py	Fri Sep 09 10:50:04 2011 -0400
+++ b/yt/data_objects/universal_fields.py	Fri Sep 09 11:35:36 2011 -0400
@@ -139,88 +139,6 @@
 add_field("SoundSpeed", function=_SoundSpeed,
           units=r"\rm{cm}/\rm{s}")
 
-def particle_func(p_field, dtype='float64'):
-    def _Particles(field, data):
-        io = data.hierarchy.io
-        if not data.NumberOfParticles > 0:
-            return na.array([], dtype=dtype)
-        try:
-            return io._read_data_set(data, p_field).astype(dtype)
-        except io._read_exception:
-            pass
-        # This is bad.  But it's the best idea I have right now.
-        return data._read_data(p_field.replace("_"," ")).astype(dtype)
-    return _Particles
-for pf in ["type", "mass"] + \
-          ["position_%s" % ax for ax in 'xyz']:
-    pfunc = particle_func("particle_%s" % (pf))
-    add_field("particle_%s" % pf, function=pfunc,
-              validators = [ValidateSpatial(0)],
-              particle_type=True)
-
-def _convRetainInt(data):
-    return 1
-add_field("particle_index", function=particle_func("particle_index", "int64"),
-          validators = [ValidateSpatial(0)], particle_type=True,
-          convert_function=_convRetainInt)
-
-def _get_vel_convert(ax):
-    def _convert_p_vel(data):
-        return data.convert("%s-velocity" % ax)
-    return _convert_p_vel
-for ax in 'xyz':
-    pf = "particle_velocity_%s" % ax
-    pfunc = particle_func(pf)
-    cfunc = _get_vel_convert(ax)
-    add_field(pf, function=pfunc, convert_function=cfunc,
-              validators = [ValidateSpatial(0)],
-              particle_type=True)
-
-for pf in ["creation_time", "dynamical_time", "metallicity_fraction"]:
-    pfunc = particle_func(pf)
-    add_field(pf, function=pfunc,
-              validators = [ValidateSpatial(0),
-                            ValidateDataField(pf)],
-              particle_type=True)
-add_field("particle_mass", function=particle_func("particle_mass"),
-          validators=[ValidateSpatial(0)], particle_type=True)
-
-def _ParticleAge(field, data):
-    current_time = data.pf.current_time
-    return (current_time - data["creation_time"])
-def _convertParticleAge(data):
-    return data.convert("years")
-add_field("ParticleAge", function=_ParticleAge,
-          validators=[ValidateDataField("creation_time")],
-          particle_type=True, convert_function=_convertParticleAge)
-
-def _ParticleMass(field, data):
-    particles = data["particle_mass"].astype('float64') * \
-                just_one(data["CellVolumeCode"].ravel())
-    # Note that we mandate grid-type here, so this is okay
-    return particles
-
-def _convertParticleMass(data):
-    return data.convert("Density")*(data.convert("cm")**3.0)
-def _IOLevelParticleMass(grid):
-    dd = dict(particle_mass = na.ones(1), CellVolumeCode=grid["CellVolumeCode"])
-    cf = (_ParticleMass(None, dd) * _convertParticleMass(grid))[0]
-    return cf
-def _convertParticleMassMsun(data):
-    return data.convert("Density")*((data.convert("cm")**3.0)/1.989e33)
-def _IOLevelParticleMassMsun(grid):
-    dd = dict(particle_mass = na.ones(1), CellVolumeCode=grid["CellVolumeCode"])
-    cf = (_ParticleMass(None, dd) * _convertParticleMassMsun(grid))[0]
-    return cf
-add_field("ParticleMass",
-          function=_ParticleMass, validators=[ValidateSpatial(0)],
-          particle_type=True, convert_function=_convertParticleMass,
-          particle_convert_function=_IOLevelParticleMass)
-add_field("ParticleMassMsun",
-          function=_ParticleMass, validators=[ValidateSpatial(0)],
-          particle_type=True, convert_function=_convertParticleMassMsun,
-          particle_convert_function=_IOLevelParticleMassMsun)
-
 def _RadialMachNumber(field, data):
     """M{|v|/t_sound}"""
     return na.abs(data["RadialVelocity"]) / data["SoundSpeed"]


--- a/yt/frontends/enzo/fields.py	Fri Sep 09 10:50:04 2011 -0400
+++ b/yt/frontends/enzo/fields.py	Fri Sep 09 11:35:36 2011 -0400
@@ -38,6 +38,8 @@
 import yt.data_objects.universal_fields
 from yt.utilities.physical_constants import \
     mh
+from yt.funcs import *
+
 import yt.utilities.amr_utils as amr_utils
 
 EnzoFieldInfo = FieldInfoContainer.create_with_fallback(FieldInfo)
@@ -399,7 +401,90 @@
 
 add_field("Bmag", function=_Bmag,display_name=r"|B|",units=r"\mathrm{Gau\ss}")
 
+# Particle functions
+
+def particle_func(p_field, dtype='float64'):
+    def _Particles(field, data):
+        io = data.hierarchy.io
+        if not data.NumberOfParticles > 0:
+            return na.array([], dtype=dtype)
+        try:
+            return io._read_data_set(data, p_field).astype(dtype)
+        except io._read_exception:
+            pass
+        # This is bad.  But it's the best idea I have right now.
+        return data._read_data(p_field.replace("_"," ")).astype(dtype)
+    return _Particles
+for pf in ["type", "mass"] + \
+          ["position_%s" % ax for ax in 'xyz']:
+    pfunc = particle_func("particle_%s" % (pf))
+    add_enzo_field("particle_%s" % pf, function=pfunc,
+              validators = [ValidateSpatial(0)],
+              particle_type=True)
     
+def _convRetainInt(data):
+    return 1
+add_enzo_field("particle_index", function=particle_func("particle_index", "int64"),
+          validators = [ValidateSpatial(0)], particle_type=True,
+          convert_function=_convRetainInt)
+
+def _get_vel_convert(ax):
+    def _convert_p_vel(data):
+        return data.convert("%s-velocity" % ax)
+    return _convert_p_vel
+for ax in 'xyz':
+    pf = "particle_velocity_%s" % ax
+    pfunc = particle_func(pf)
+    cfunc = _get_vel_convert(ax)
+    add_enzo_field(pf, function=pfunc, convert_function=cfunc,
+              validators = [ValidateSpatial(0)],
+              particle_type=True)
+
+for pf in ["creation_time", "dynamical_time", "metallicity_fraction"]:
+    pfunc = particle_func(pf)
+    add_enzo_field(pf, function=pfunc,
+              validators = [ValidateSpatial(0),
+                            ValidateDataField(pf)],
+              particle_type=True)
+add_field("particle_mass", function=particle_func("particle_mass"),
+          validators=[ValidateSpatial(0)], particle_type=True)
+
+def _ParticleAge(field, data):
+    current_time = data.pf.current_time
+    return (current_time - data["creation_time"])
+def _convertParticleAge(data):
+    return data.convert("years")
+add_field("ParticleAge", function=_ParticleAge,
+          validators=[ValidateDataField("creation_time")],
+          particle_type=True, convert_function=_convertParticleAge)
+
+def _ParticleMass(field, data):
+    particles = data["particle_mass"].astype('float64') * \
+                just_one(data["CellVolumeCode"].ravel())
+    # Note that we mandate grid-type here, so this is okay
+    return particles
+
+def _convertParticleMass(data):
+    return data.convert("Density")*(data.convert("cm")**3.0)
+def _IOLevelParticleMass(grid):
+    dd = dict(particle_mass = na.ones(1), CellVolumeCode=grid["CellVolumeCode"])
+    cf = (_ParticleMass(None, dd) * _convertParticleMass(grid))[0]
+    return cf
+def _convertParticleMassMsun(data):
+    return data.convert("Density")*((data.convert("cm")**3.0)/1.989e33)
+def _IOLevelParticleMassMsun(grid):
+    dd = dict(particle_mass = na.ones(1), CellVolumeCode=grid["CellVolumeCode"])
+    cf = (_ParticleMass(None, dd) * _convertParticleMassMsun(grid))[0]
+    return cf
+add_field("ParticleMass",
+          function=_ParticleMass, validators=[ValidateSpatial(0)],
+          particle_type=True, convert_function=_convertParticleMass,
+          particle_convert_function=_IOLevelParticleMass)
+add_field("ParticleMassMsun",
+          function=_ParticleMass, validators=[ValidateSpatial(0)],
+          particle_type=True, convert_function=_convertParticleMassMsun,
+          particle_convert_function=_IOLevelParticleMassMsun)
+
 #
 # Now we do overrides for 2D fields
 #


--- a/yt/frontends/gdf/api.py	Fri Sep 09 10:50:04 2011 -0400
+++ b/yt/frontends/gdf/api.py	Fri Sep 09 11:35:36 2011 -0400
@@ -34,8 +34,8 @@
       GDFStaticOutput
 
 from .fields import \
-      GDFFieldContainer, \
       GDFFieldInfo, \
+      KnownGDFFields, \
       add_gdf_field
 
 from .io import \


--- a/yt/frontends/gdf/data_structures.py	Fri Sep 09 10:50:04 2011 -0400
+++ b/yt/frontends/gdf/data_structures.py	Fri Sep 09 11:35:36 2011 -0400
@@ -32,7 +32,7 @@
 from yt.data_objects.static_output import \
            StaticOutput
 
-from .fields import GDFFieldContainer, KnownGDFFields
+from .fields import GDFFieldInfo, KnownGDFFields
 from yt.data_objects.field_info_container import \
     FieldInfoContainer, NullFunc
 
@@ -139,8 +139,8 @@
 
 class GDFStaticOutput(StaticOutput):
     _hierarchy_class = GDFHierarchy
-    _fieldinfo_fallback = GDFFieldContainer
-    _fieldinfo_known = GDFKnownFields
+    _fieldinfo_fallback = GDFFieldInfo
+    _fieldinfo_known = KnownGDFFields
     
     def __init__(self, filename, data_style='grid_data_format',
                  storage_filename = None):


--- a/yt/utilities/parameter_file_storage.py	Fri Sep 09 10:50:04 2011 -0400
+++ b/yt/utilities/parameter_file_storage.py	Fri Sep 09 11:35:36 2011 -0400
@@ -103,6 +103,7 @@
         """
         This function ensures that the storage database exists and can be used.
         """
+        return
         dbn = self._get_db_name()
         dbdir = os.path.dirname(dbn)
         try:
@@ -130,11 +131,13 @@
 
     def get_pf_hash(self, hash):
         """ This returns a parameter file based on a hash. """
+        return
         output = self.output_model.get(dset_uuid = hash)
         return self._convert_pf(output)
 
     def _convert_pf(self, inst):
         """ This turns a model into a parameter file. """
+        return
         fn = inst.pf_path
         if inst.output_type not in output_type_registry:
             raise UnknownStaticOutputType(inst.output_type)
@@ -155,6 +158,7 @@
         recorded in the storage unit.  In doing so, it will update path
         and "last_seen" information.
         """
+        return
         q = self.output_model.select().where(dset_uuid = pf._hash())
         q.execute()
         if q.count() == 0:


http://bitbucket.org/yt_analysis/yt/changeset/2b620fac156d/
changeset:   2b620fac156d
branch:      yt
user:        MatthewTurk
date:        2011-09-17 01:16:19
summary:     First pass at interpolation-in-batch for ray casting.
affected #:  1 file (-1 bytes)

--- a/yt/utilities/_amr_utils/VolumeIntegrator.pyx	Fri Sep 09 10:35:19 2011 -0400
+++ b/yt/utilities/_amr_utils/VolumeIntegrator.pyx	Fri Sep 16 19:16:19 2011 -0400
@@ -710,15 +710,33 @@
         dt = (exit_t - enter_t) / tf.ns # 4 samples should be dt=0.25
         cdef int offset = ci[0] * (self.dims[1] + 1) * (self.dims[2] + 1) \
                         + ci[1] * (self.dims[2] + 1) + ci[2]
+        # The initial and final values can be linearly interpolated between; so
+        # we just have to calculate our initial and final values.
+        cdef np.float64_t slopes[6]
         for i in range(3):
-            cell_left[i] = ci[i] * self.dds[i] + self.left_edge[i]
-            # this gets us dp as the current first sample position
-            pos[i] = (enter_t + 0.5 * dt) * v_dir[i] + v_pos[i]
-            dp[i] = pos[i] - cell_left[i]
+            dp[i] = (enter_t + 0.5 * dt) * v_dir[i] + v_pos[i]
+            dp[i] -= ci[i] * self.dds[i] + self.left_edge[i]
             dp[i] *= self.idds[i]
             ds[i] = v_dir[i] * self.idds[i] * dt
-            local_dds[i] = v_dir[i] * dt
+        for i in range(self.n_fields):
+            slopes[i] = offset_interpolate(self.dims, dp,
+                            self.data[i] + offset)
+        for i in range(3):
+            dp[i] += ds[i] * tf.ns
+        cdef np.float64_t temp
+        for i in range(self.n_fields):
+            temp = slopes[i]
+            slopes[i] -= offset_interpolate(self.dims, dp,
+                             self.data[i] + offset)
+            slopes[i] *= -1.0/tf.ns
+            self.dvs[i] = temp
         if self.star_list != NULL:
+            for i in range(3):
+                cell_left[i] = ci[i] * self.dds[i] + self.left_edge[i]
+                # this gets us dp as the current first sample position
+                pos[i] = (enter_t + 0.5 * dt) * v_dir[i] + v_pos[i]
+                dp[i] -= tf.ns * ds[i]
+                local_dds[i] = v_dir[i] * dt
             ballq = kdtree_utils.kd_nearest_range3(
                 self.star_list, cell_left[0] + self.dds[0]*0.5,
                                 cell_left[1] + self.dds[1]*0.5,
@@ -726,15 +744,16 @@
                                 self.star_er + 0.9*self.dds[0])
                                             # ~0.866 + a bit
         for dti in range(tf.ns): 
-            for i in range(self.n_fields):
-                self.dvs[i] = offset_interpolate(self.dims, dp, self.data[i] + offset)
             #if (dv < tf.x_bounds[0]) or (dv > tf.x_bounds[1]):
             #    continue
-            if self.star_list != NULL: self.add_stars(ballq, dt, pos, rgba)
+            if self.star_list != NULL:
+                self.add_stars(ballq, dt, pos, rgba)
+                for i in range(3):
+                    dp[i] += ds[i]
+                    pos[i] += local_dds[i]
             tf.eval_transfer(dt, self.dvs, rgba, grad)
-            for i in range(3):
-                dp[i] += ds[i]
-                pos[i] += local_dds[i]
+            for i in range(self.n_fields):
+                self.dvs[i] += slopes[i]
         if ballq != NULL: kdtree_utils.kd_res_free(ballq)
 
     @cython.boundscheck(False)


http://bitbucket.org/yt_analysis/yt/changeset/b964d98beb86/
changeset:   b964d98beb86
branch:      yt
user:        MatthewTurk
date:        2011-09-17 01:25:38
summary:     Turning off parameter file storage for now.
affected #:  1 file (-1 bytes)

--- a/yt/utilities/parameter_file_storage.py	Fri Sep 16 19:16:19 2011 -0400
+++ b/yt/utilities/parameter_file_storage.py	Fri Sep 16 19:25:38 2011 -0400
@@ -120,7 +120,15 @@
             _field_spec,
         )
         self.output_model._meta.pk_name = "dset_uuid"
-        self.conn.connect()
+        try:
+            self.conn.connect()
+        except:
+            self.conn = None
+        try:
+            self.output_model.create_table()
+        except:
+            pass
+        self.conn = None
 
     def _get_db_name(self):
         base_file_name = ytcfg.get("yt", "ParameterFileStore")
@@ -129,12 +137,14 @@
         return os.path.expanduser("~/.yt/%s" % base_file_name)
 
     def get_pf_hash(self, hash):
+        if self.conn is None: return
         """ This returns a parameter file based on a hash. """
         output = self.output_model.get(dset_uuid = hash)
         return self._convert_pf(output)
 
     def _convert_pf(self, inst):
         """ This turns a model into a parameter file. """
+        if self.conn is None: return
         fn = inst.pf_path
         if inst.output_type not in output_type_registry:
             raise UnknownStaticOutputType(inst.output_type)
@@ -155,6 +165,7 @@
         recorded in the storage unit.  In doing so, it will update path
         and "last_seen" information.
         """
+        if self.conn is None: return
         q = self.output_model.select().where(dset_uuid = pf._hash())
         q.execute()
         if q.count() == 0:
@@ -170,6 +181,7 @@
 
     def insert_pf(self, pf):
         """ This will insert a new *pf* and flush the database to disk. """
+        if self.conn is None: return
         q = self.output_model.insert(
                     dset_uuid = pf._hash(),
                     output_type = pf.__class__.__name__,


http://bitbucket.org/yt_analysis/yt/changeset/6bef3b8e16d0/
changeset:   6bef3b8e16d0
branch:      deliberate_fields
user:        xarthisius
date:        2011-09-15 23:13:19
summary:     h5.H5Error was removed in h5py-2.0 in favour of Python native exceptions
affected #:  1 file (-1 bytes)

--- a/yt/data_objects/hierarchy.py	Fri Sep 09 11:35:36 2011 -0400
+++ b/yt/data_objects/hierarchy.py	Thu Sep 15 23:13:19 2011 +0200
@@ -229,10 +229,6 @@
         """
 
         if self._data_mode != 'a': return
-        if "ArgsError" in dir(h5py.h5):
-            exception = (h5py.h5.ArgsError, KeyError)
-        else:
-            exception = (h5py.h5.H5Error, KeyError)
         try:
             node_loc = self._data_file[node]
             if name in node_loc and force:
@@ -240,7 +236,7 @@
                 del self._data_file[node][name]
             elif name in node_loc and passthrough:
                 return
-        except exception:
+        except:
             pass
         myGroup = self._data_file['/']
         for q in node.split('/'):


http://bitbucket.org/yt_analysis/yt/changeset/9f2b01c50a18/
changeset:   9f2b01c50a18
branch:      stable
user:        xarthisius
date:        2011-09-15 23:17:17
summary:     h5.H5Error was removed in h5py-2.0 in favour of Python native exceptions
affected #:  1 file (-1 bytes)

--- a/yt/data_objects/hierarchy.py	Sat Sep 03 08:46:55 2011 -0400
+++ b/yt/data_objects/hierarchy.py	Thu Sep 15 23:17:17 2011 +0200
@@ -203,10 +203,6 @@
         """
 
         if self._data_mode != 'a': return
-        if "ArgsError" in dir(h5py.h5):
-            exception = (h5py.h5.ArgsError, KeyError)
-        else:
-            exception = (h5py.h5.H5Error, KeyError)
         try:
             node_loc = self._data_file[node]
             if name in node_loc and force:
@@ -214,7 +210,7 @@
                 del self._data_file[node][name]
             elif name in node_loc and passthrough:
                 return
-        except exception:
+        except:
             pass
         myGroup = self._data_file['/']
         for q in node.split('/'):


http://bitbucket.org/yt_analysis/yt/changeset/af86d6cac0c7/
changeset:   af86d6cac0c7
branch:      stable
user:        MatthewTurk
date:        2011-10-05 23:01:08
summary:     Merging stable to one head
affected #:  1 file (-1 bytes)

http://bitbucket.org/yt_analysis/yt/changeset/059eee386c7d/
changeset:   059eee386c7d
branch:      yt
user:        jsoishi
date:        2011-10-14 21:46:08
summary:     fixed domain_dimensions for 2D case
affected #:  1 file (-1 bytes)

--- a/yt/frontends/enzo/data_structures.py	Wed Oct 05 17:00:00 2011 -0400
+++ b/yt/frontends/enzo/data_structures.py	Fri Oct 14 12:46:08 2011 -0700
@@ -794,6 +794,10 @@
         self.dimensionality = self.parameters["TopGridRank"]
         if self.dimensionality > 1:
             self.domain_dimensions = self.parameters["TopGridDimensions"]
+            if len(self.domain_dimensions) < 3:
+                tmp = self.domain_dimensions.tolist()
+                tmp.append(1)
+                self.domain_dimensions = na.array(tmp)
             self.domain_left_edge = na.array(self.parameters["DomainLeftEdge"],
                                              "float64").copy()
             self.domain_right_edge = na.array(self.parameters["DomainRightEdge"],


http://bitbucket.org/yt_analysis/yt/changeset/cadffe09ce54/
changeset:   cadffe09ce54
branch:      yt
user:        jsoishi
date:        2011-10-14 22:01:17
summary:     merged.
affected #:  12 files (-1 bytes)

--- a/yt/data_objects/data_containers.py	Fri Oct 14 12:46:08 2011 -0700
+++ b/yt/data_objects/data_containers.py	Fri Oct 14 13:01:17 2011 -0700
@@ -40,7 +40,8 @@
 from yt.data_objects.particle_io import particle_handler_registry
 from yt.utilities.amr_utils import find_grids_in_inclined_box, \
     grid_points_in_volume, planar_points_in_volume, VoxelTraversal, \
-    QuadTree, get_box_grids_below_level, ghost_zone_interpolate
+    QuadTree, get_box_grids_below_level, ghost_zone_interpolate, \
+    march_cubes_grid, march_cubes_grid_flux
 from yt.utilities.data_point_utilities import CombineGrids, \
     DataCubeRefine, DataCubeReplace, FillRegion, FillBuffer
 from yt.utilities.definitions import axis_names, x_dict, y_dict
@@ -828,6 +829,25 @@
         for grid in self._grids:
             temp = grid[field]
 
+    def to_frb(self, width, resolution, center = None):
+        if center is None:
+            center = self.get_field_parameter("center")
+            if center is None:
+                center = (self.pf.domain_right_edge
+                        + self.pf.domain_left_edge)/2.0
+        if iterable(width):
+            w, u = width
+            width = w/self.pf[u]
+        if not iterable(resolution):
+            resolution = (resolution, resolution)
+        from yt.visualization.fixed_resolution import FixedResolutionBuffer
+        xax = x_dict[self.axis]
+        yax = y_dict[self.axis]
+        bounds = (center[xax] - width/2.0, center[xax] + width/2.0,
+                  center[yax] - width/2.0, center[yax] + width/2.0)
+        frb = FixedResolutionBuffer(self, bounds, resolution)
+        return frb
+
     def interpolate_discretize(self, LE, RE, field, side, log_spacing=True):
         """
         This returns a uniform grid of points between *LE* and *RE*,
@@ -2411,6 +2431,165 @@
     __quantities = None
     quantities = property(__get_quantities)
 
+    def extract_isocontours(self, field, value, filename = None,
+                            rescale = False, sample_values = None):
+        r"""This identifies isocontours on a cell-by-cell basis, with no
+        consideration of global connectedness, and returns the vertices of the
+        Triangles in that isocontour.
+
+        This function simply returns the vertices of all the triangles
+        calculated by the marching cubes algorithm; for more complex
+        operations, such as identifying connected sets of cells above a given
+        threshold, see the extract_connected_sets function.  This is more
+        useful for calculating, for instance, total isocontour area, or
+        visualizing in an external program (such as `MeshLab
+        <http://meshlab.sf.net>`_.)
+        
+        Parameters
+        ----------
+        field : string
+            Any field that can be obtained in a data object.  This is the field
+            which will be isocontoured.
+        value : float
+            The value at which the isocontour should be calculated.
+        filename : string, optional
+            If supplied, this file will be filled with the vertices in .obj
+            format.  Suitable for loading into meshlab.
+        rescale : bool, optional
+            If true, the vertices will be rescaled within their min/max.
+
+        Returns
+        -------
+        verts : array of floats
+            The array of vertices, x,y,z.  Taken in threes, these are the
+            triangle vertices.
+
+        References
+        ----------
+
+        .. [1] Marching Cubes: http://en.wikipedia.org/wiki/Marching_cubes
+
+        Examples
+        --------
+        This will create a data object, find a nice value in the center, and
+        output the vertices to "triangles.obj" after rescaling them.
+
+        >>> dd = pf.h.all_data()
+        >>> rho = dd.quantities["WeightedAverageQuantity"](
+        ...     "Density", weight="CellMassMsun")
+        >>> verts = dd.extract_isocontours("Density", rho,
+        ...             "triangles.obj", True)
+        """
+        verts = []
+        samples = []
+        for g in self._grids:
+            mask = self._get_cut_mask(g) * g.child_mask
+            vals = g.get_vertex_centered_data(field)
+            if sample_values is not None:
+                svals = g.get_vertex_centered_data(sample_values)
+            else:
+                svals = None
+            my_verts = march_cubes_grid(value, vals, mask, g.LeftEdge, g.dds,
+                                        svals)
+            if sample_values is not None:
+                my_verts, svals = my_verts
+                samples.append(svals)
+            verts.append(my_verts)
+        verts = na.concatenate(verts)
+        if sample_values is not None:
+            samples = na.concatenate(samples)
+        if rescale:
+            mi = na.min(verts, axis=0)
+            ma = na.max(verts, axis=0)
+            verts = (verts - mi) / (ma - mi).max()
+        if filename is not None:
+            f = open(filename, "w")
+            for v1 in verts:
+                f.write("v %0.16e %0.16e %0.16e\n" % (v1[0], v1[1], v1[2]))
+            for i in range(len(verts)/3):
+                f.write("f %s %s %s\n" % (i*3+1, i*3+2, i*3+3))
+        if sample_values is not None:
+            return verts, samples
+        return verts
+
+    def calculate_isocontour_flux(self, field, value,
+                    field_x, field_y, field_z, fluxing_field = None):
+        r"""This identifies isocontours on a cell-by-cell basis, with no
+        consideration of global connectedness, and calculates the flux over
+        those contours.
+
+        This function will conduct marching cubes on all the cells in a given
+        data container (grid-by-grid), and then for each identified triangular
+        segment of an isocontour in a given cell, calculate the gradient (i.e.,
+        normal) in the isocontoured field, interpolate the local value of the
+        "fluxing" field, the area of the triangle, and then return:
+
+        area * local_flux_value * (n dot v)
+
+        Where area, local_value, and the vector v are interpolated at the barycenter
+        (weighted by the vertex values) of the triangle.  Note that this
+        specifically allows for the field fluxing across the surface to be
+        *different* from the field being contoured.  If the fluxing_field is
+        not specified, it is assumed to be 1.0 everywhere, and the raw flux
+        with no local-weighting is returned.
+
+        Additionally, the returned flux is defined as flux *into* the surface,
+        not flux *out of* the surface.
+        
+        Parameters
+        ----------
+        field : string
+            Any field that can be obtained in a data object.  This is the field
+            which will be isocontoured and used as the "local_value" in the
+            flux equation.
+        value : float
+            The value at which the isocontour should be calculated.
+        field_x : string
+            The x-component field
+        field_y : string
+            The y-component field
+        field_z : string
+            The z-component field
+        fluxing_field : string, optional
+            The field whose passage over the surface is of interest.  If not
+            specified, assumed to be 1.0 everywhere.
+
+        Returns
+        -------
+        flux : float
+            The summed flux.  Note that it is not currently scaled; this is
+            simply the code-unit area times the fields.
+
+        References
+        ----------
+
+        .. [1] Marching Cubes: http://en.wikipedia.org/wiki/Marching_cubes
+
+        Examples
+        --------
+        This will create a data object, find a nice value in the center, and
+        calculate the metal flux over it.
+
+        >>> dd = pf.h.all_data()
+        >>> rho = dd.quantities["WeightedAverageQuantity"](
+        ...     "Density", weight="CellMassMsun")
+        >>> flux = dd.calculate_isocontour_flux("Density", rho,
+        ...     "x-velocity", "y-velocity", "z-velocity", "Metal_Density")
+        """
+        flux = 0.0
+        for g in self._grids:
+            mask = self._get_cut_mask(g) * g.child_mask
+            vals = g.get_vertex_centered_data(field)
+            if fluxing_field is None:
+                ff = na.ones(vals.shape, dtype="float64")
+            else:
+                ff = g.get_vertex_centered_data(fluxing_field)
+            xv, yv, zv = [g.get_vertex_centered_data(f) for f in 
+                         [field_x, field_y, field_z]]
+            flux += march_cubes_grid_flux(value, vals, xv, yv, zv,
+                        ff, mask, g.LeftEdge, g.dds)
+        return flux
+
     def extract_connected_sets(self, field, num_levels, min_val, max_val,
                                 log_space=True, cumulative=True, cache=False):
         """


--- a/yt/data_objects/hierarchy.py	Fri Oct 14 12:46:08 2011 -0700
+++ b/yt/data_objects/hierarchy.py	Fri Oct 14 13:01:17 2011 -0700
@@ -120,10 +120,16 @@
         # Called by subclass
         self.object_types = []
         self.objects = []
+        self.plots = []
         for name, cls in sorted(data_object_registry.items()):
             cname = cls.__name__
             if cname.endswith("Base"): cname = cname[:-4]
             self._add_object_class(name, cname, cls, dd)
+        if self.pf.refine_by != 2 and hasattr(self, 'proj') and \
+            hasattr(self, 'overlap_proj'):
+            mylog.warning("Refine by something other than two: reverting to"
+                        + " overlap_proj")
+            self.proj = self.overlap_proj
         self.object_types.sort()
 
     # Now all the object related stuff


--- a/yt/data_objects/profiles.py	Fri Oct 14 12:46:08 2011 -0700
+++ b/yt/data_objects/profiles.py	Fri Oct 14 13:01:17 2011 -0700
@@ -271,7 +271,7 @@
         else:
             mi = ((source_data > self._bins.min())
                &  (source_data < self._bins.max()))
-        ds = source_data[mi]
+        sd = source_data[mi]
         if sd.size == 0:
             raise EmptyProfileData()
         # Stick the bins into our fixed bins, set at initialization


--- a/yt/utilities/_amr_utils/FixedInterpolator.c	Fri Oct 14 12:46:08 2011 -0700
+++ b/yt/utilities/_amr_utils/FixedInterpolator.c	Fri Oct 14 13:01:17 2011 -0700
@@ -127,8 +127,8 @@
     return vz[0];
 }
 
-npy_float64 eval_gradient(int *ds, int *ci, npy_float64 *dp,
-				  npy_float64 *data, npy_float64 *grad)
+void eval_gradient(int ds[3], npy_float64 dp[3],
+				  npy_float64 *data, npy_float64 grad[3])
 {
     // We just take some small value
 
@@ -145,9 +145,9 @@
       //fprintf(stderr, "DIM: %d %0.3lf %0.3lf\n", i, plus, minus);
       denom = plus - minus;
       dp[i] = plus;
-      grad[i] += trilinear_interpolate(ds, ci, dp, data) / denom;
+      grad[i] += offset_interpolate(ds, dp, data) / denom;
       dp[i] = minus;
-      grad[i] -= trilinear_interpolate(ds, ci, dp, data) / denom;
+      grad[i] -= offset_interpolate(ds, dp, data) / denom;
       dp[i] = backup;
       normval += grad[i]*grad[i];
     }


--- a/yt/utilities/_amr_utils/FixedInterpolator.h	Fri Oct 14 12:46:08 2011 -0700
+++ b/yt/utilities/_amr_utils/FixedInterpolator.h	Fri Oct 14 13:01:17 2011 -0700
@@ -41,8 +41,7 @@
 npy_float64 trilinear_interpolate(int ds[3], int ci[3], npy_float64 dp[3],
 				  npy_float64 *data);
 
-npy_float64 eval_gradient(int ds[3], int ci[3], npy_float64 dp[3],
-				  npy_float64 *data, npy_float64 *grad);
+void eval_gradient(int ds[3], npy_float64 dp[3], npy_float64 *data, npy_float64 grad[3]);
 
 void vertex_interp(npy_float64 v1, npy_float64 v2, npy_float64 isovalue,
                    npy_float64 vl[3], npy_float64 dds[3],


--- a/yt/utilities/_amr_utils/VolumeIntegrator.pyx	Fri Oct 14 12:46:08 2011 -0700
+++ b/yt/utilities/_amr_utils/VolumeIntegrator.pyx	Fri Oct 14 13:01:17 2011 -0700
@@ -69,6 +69,12 @@
 cdef struct Triangle:
     Triangle *next
     np.float64_t p[3][3]
+    np.float64_t val
+
+cdef struct TriangleCollection:
+    int count
+    Triangle *first
+    Triangle *current
 
 cdef Triangle *AddTriangle(Triangle *self,
                     np.float64_t p0[3], np.float64_t p1[3], np.float64_t p2[3]):
@@ -93,6 +99,25 @@
         this = this.next
     return count
 
+cdef void FillTriangleValues(np.ndarray[np.float64_t, ndim=1] values,
+                             Triangle *first):
+    cdef Triangle *this = first
+    cdef Triangle *last
+    cdef int i = 0
+    while this != NULL:
+        values[i] = this.val
+        i += 1
+        last = this
+        this = this.next
+
+cdef void WipeTriangles(Triangle *first):
+    cdef Triangle *this = first
+    cdef Triangle *last
+    while this != NULL:
+        last = this
+        this = this.next
+        free(last)
+
 cdef void FillAndWipeTriangles(np.ndarray[np.float64_t, ndim=2] vertices,
                                Triangle *first):
     cdef int count = 0
@@ -114,8 +139,8 @@
     np.float64_t offset_interpolate(int ds[3], np.float64_t dp[3], np.float64_t *data)
     np.float64_t trilinear_interpolate(int ds[3], int ci[3], np.float64_t dp[3],
                                        np.float64_t *data)
-    np.float64_t eval_gradient(int *ds, int *ci, np.float64_t *dp,
-                                       np.float64_t *data, np.float64_t *grad)
+    void eval_gradient(int ds[3], np.float64_t dp[3], np.float64_t *data,
+                       np.float64_t grad[3])
     void offset_fill(int *ds, np.float64_t *data, np.float64_t *gridval)
     void vertex_interp(np.float64_t v1, np.float64_t v2, np.float64_t isovalue,
                        np.float64_t vl[3], np.float64_t dds[3],
@@ -843,315 +868,25 @@
             for i in range(3):
                 vel[i] /= vel_mag[0]
 
-    #@cython.boundscheck(False)
-    #@cython.wraparound(False)
+    @cython.boundscheck(False)
+    @cython.wraparound(False)
     @cython.cdivision(True)
     def get_isocontour_triangles(self, np.float64_t isovalue, int field_id = 0):
         # Much of this was inspired by code from Paul Bourke's website:
         # http://paulbourke.net/geometry/polygonise/
-        cdef int *edge_table=[
-        0x0  , 0x109, 0x203, 0x30a, 0x406, 0x50f, 0x605, 0x70c,
-        0x80c, 0x905, 0xa0f, 0xb06, 0xc0a, 0xd03, 0xe09, 0xf00,
-        0x190, 0x99 , 0x393, 0x29a, 0x596, 0x49f, 0x795, 0x69c,
-        0x99c, 0x895, 0xb9f, 0xa96, 0xd9a, 0xc93, 0xf99, 0xe90,
-        0x230, 0x339, 0x33 , 0x13a, 0x636, 0x73f, 0x435, 0x53c,
-        0xa3c, 0xb35, 0x83f, 0x936, 0xe3a, 0xf33, 0xc39, 0xd30,
-        0x3a0, 0x2a9, 0x1a3, 0xaa , 0x7a6, 0x6af, 0x5a5, 0x4ac,
-        0xbac, 0xaa5, 0x9af, 0x8a6, 0xfaa, 0xea3, 0xda9, 0xca0,
-        0x460, 0x569, 0x663, 0x76a, 0x66 , 0x16f, 0x265, 0x36c,
-        0xc6c, 0xd65, 0xe6f, 0xf66, 0x86a, 0x963, 0xa69, 0xb60,
-        0x5f0, 0x4f9, 0x7f3, 0x6fa, 0x1f6, 0xff , 0x3f5, 0x2fc,
-        0xdfc, 0xcf5, 0xfff, 0xef6, 0x9fa, 0x8f3, 0xbf9, 0xaf0,
-        0x650, 0x759, 0x453, 0x55a, 0x256, 0x35f, 0x55 , 0x15c,
-        0xe5c, 0xf55, 0xc5f, 0xd56, 0xa5a, 0xb53, 0x859, 0x950,
-        0x7c0, 0x6c9, 0x5c3, 0x4ca, 0x3c6, 0x2cf, 0x1c5, 0xcc ,
-        0xfcc, 0xec5, 0xdcf, 0xcc6, 0xbca, 0xac3, 0x9c9, 0x8c0,
-        0x8c0, 0x9c9, 0xac3, 0xbca, 0xcc6, 0xdcf, 0xec5, 0xfcc,
-        0xcc , 0x1c5, 0x2cf, 0x3c6, 0x4ca, 0x5c3, 0x6c9, 0x7c0,
-        0x950, 0x859, 0xb53, 0xa5a, 0xd56, 0xc5f, 0xf55, 0xe5c,
-        0x15c, 0x55 , 0x35f, 0x256, 0x55a, 0x453, 0x759, 0x650,
-        0xaf0, 0xbf9, 0x8f3, 0x9fa, 0xef6, 0xfff, 0xcf5, 0xdfc,
-        0x2fc, 0x3f5, 0xff , 0x1f6, 0x6fa, 0x7f3, 0x4f9, 0x5f0,
-        0xb60, 0xa69, 0x963, 0x86a, 0xf66, 0xe6f, 0xd65, 0xc6c,
-        0x36c, 0x265, 0x16f, 0x66 , 0x76a, 0x663, 0x569, 0x460,
-        0xca0, 0xda9, 0xea3, 0xfaa, 0x8a6, 0x9af, 0xaa5, 0xbac,
-        0x4ac, 0x5a5, 0x6af, 0x7a6, 0xaa , 0x1a3, 0x2a9, 0x3a0,
-        0xd30, 0xc39, 0xf33, 0xe3a, 0x936, 0x83f, 0xb35, 0xa3c,
-        0x53c, 0x435, 0x73f, 0x636, 0x13a, 0x33 , 0x339, 0x230,
-        0xe90, 0xf99, 0xc93, 0xd9a, 0xa96, 0xb9f, 0x895, 0x99c,
-        0x69c, 0x795, 0x49f, 0x596, 0x29a, 0x393, 0x99 , 0x190,
-        0xf00, 0xe09, 0xd03, 0xc0a, 0xb06, 0xa0f, 0x905, 0x80c,
-        0x70c, 0x605, 0x50f, 0x406, 0x30a, 0x203, 0x109, 0x0   ]
-
-        cdef int **tri_table = \
-        [[-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
-        [0, 8, 3, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
-        [0, 1, 9, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
-        [1, 8, 3, 9, 8, 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
-        [1, 2, 10, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
-        [0, 8, 3, 1, 2, 10, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
-        [9, 2, 10, 0, 2, 9, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
-        [2, 8, 3, 2, 10, 8, 10, 9, 8, -1, -1, -1, -1, -1, -1, -1],
-        [3, 11, 2, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
-        [0, 11, 2, 8, 11, 0, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
-        [1, 9, 0, 2, 3, 11, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
-        [1, 11, 2, 1, 9, 11, 9, 8, 11, -1, -1, -1, -1, -1, -1, -1],
-        [3, 10, 1, 11, 10, 3, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
-        [0, 10, 1, 0, 8, 10, 8, 11, 10, -1, -1, -1, -1, -1, -1, -1],
-        [3, 9, 0, 3, 11, 9, 11, 10, 9, -1, -1, -1, -1, -1, -1, -1],
-        [9, 8, 10, 10, 8, 11, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
-        [4, 7, 8, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
-        [4, 3, 0, 7, 3, 4, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
-        [0, 1, 9, 8, 4, 7, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
-        [4, 1, 9, 4, 7, 1, 7, 3, 1, -1, -1, -1, -1, -1, -1, -1],
-        [1, 2, 10, 8, 4, 7, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
-        [3, 4, 7, 3, 0, 4, 1, 2, 10, -1, -1, -1, -1, -1, -1, -1],
-        [9, 2, 10, 9, 0, 2, 8, 4, 7, -1, -1, -1, -1, -1, -1, -1],
-        [2, 10, 9, 2, 9, 7, 2, 7, 3, 7, 9, 4, -1, -1, -1, -1],
-        [8, 4, 7, 3, 11, 2, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
-        [11, 4, 7, 11, 2, 4, 2, 0, 4, -1, -1, -1, -1, -1, -1, -1],
-        [9, 0, 1, 8, 4, 7, 2, 3, 11, -1, -1, -1, -1, -1, -1, -1],
-        [4, 7, 11, 9, 4, 11, 9, 11, 2, 9, 2, 1, -1, -1, -1, -1],
-        [3, 10, 1, 3, 11, 10, 7, 8, 4, -1, -1, -1, -1, -1, -1, -1],
-        [1, 11, 10, 1, 4, 11, 1, 0, 4, 7, 11, 4, -1, -1, -1, -1],
-        [4, 7, 8, 9, 0, 11, 9, 11, 10, 11, 0, 3, -1, -1, -1, -1],
-        [4, 7, 11, 4, 11, 9, 9, 11, 10, -1, -1, -1, -1, -1, -1, -1],
-        [9, 5, 4, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
-        [9, 5, 4, 0, 8, 3, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
-        [0, 5, 4, 1, 5, 0, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
-        [8, 5, 4, 8, 3, 5, 3, 1, 5, -1, -1, -1, -1, -1, -1, -1],
-        [1, 2, 10, 9, 5, 4, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
-        [3, 0, 8, 1, 2, 10, 4, 9, 5, -1, -1, -1, -1, -1, -1, -1],
-        [5, 2, 10, 5, 4, 2, 4, 0, 2, -1, -1, -1, -1, -1, -1, -1],
-        [2, 10, 5, 3, 2, 5, 3, 5, 4, 3, 4, 8, -1, -1, -1, -1],
-        [9, 5, 4, 2, 3, 11, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
-        [0, 11, 2, 0, 8, 11, 4, 9, 5, -1, -1, -1, -1, -1, -1, -1],
-        [0, 5, 4, 0, 1, 5, 2, 3, 11, -1, -1, -1, -1, -1, -1, -1],
-        [2, 1, 5, 2, 5, 8, 2, 8, 11, 4, 8, 5, -1, -1, -1, -1],
-        [10, 3, 11, 10, 1, 3, 9, 5, 4, -1, -1, -1, -1, -1, -1, -1],
-        [4, 9, 5, 0, 8, 1, 8, 10, 1, 8, 11, 10, -1, -1, -1, -1],
-        [5, 4, 0, 5, 0, 11, 5, 11, 10, 11, 0, 3, -1, -1, -1, -1],
-        [5, 4, 8, 5, 8, 10, 10, 8, 11, -1, -1, -1, -1, -1, -1, -1],
-        [9, 7, 8, 5, 7, 9, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
-        [9, 3, 0, 9, 5, 3, 5, 7, 3, -1, -1, -1, -1, -1, -1, -1],
-        [0, 7, 8, 0, 1, 7, 1, 5, 7, -1, -1, -1, -1, -1, -1, -1],
-        [1, 5, 3, 3, 5, 7, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
-        [9, 7, 8, 9, 5, 7, 10, 1, 2, -1, -1, -1, -1, -1, -1, -1],
-        [10, 1, 2, 9, 5, 0, 5, 3, 0, 5, 7, 3, -1, -1, -1, -1],
-        [8, 0, 2, 8, 2, 5, 8, 5, 7, 10, 5, 2, -1, -1, -1, -1],
-        [2, 10, 5, 2, 5, 3, 3, 5, 7, -1, -1, -1, -1, -1, -1, -1],
-        [7, 9, 5, 7, 8, 9, 3, 11, 2, -1, -1, -1, -1, -1, -1, -1],
-        [9, 5, 7, 9, 7, 2, 9, 2, 0, 2, 7, 11, -1, -1, -1, -1],
-        [2, 3, 11, 0, 1, 8, 1, 7, 8, 1, 5, 7, -1, -1, -1, -1],
-        [11, 2, 1, 11, 1, 7, 7, 1, 5, -1, -1, -1, -1, -1, -1, -1],
-        [9, 5, 8, 8, 5, 7, 10, 1, 3, 10, 3, 11, -1, -1, -1, -1],
-        [5, 7, 0, 5, 0, 9, 7, 11, 0, 1, 0, 10, 11, 10, 0, -1],
-        [11, 10, 0, 11, 0, 3, 10, 5, 0, 8, 0, 7, 5, 7, 0, -1],
-        [11, 10, 5, 7, 11, 5, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
-        [10, 6, 5, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
-        [0, 8, 3, 5, 10, 6, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
-        [9, 0, 1, 5, 10, 6, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
-        [1, 8, 3, 1, 9, 8, 5, 10, 6, -1, -1, -1, -1, -1, -1, -1],
-        [1, 6, 5, 2, 6, 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
-        [1, 6, 5, 1, 2, 6, 3, 0, 8, -1, -1, -1, -1, -1, -1, -1],
-        [9, 6, 5, 9, 0, 6, 0, 2, 6, -1, -1, -1, -1, -1, -1, -1],
-        [5, 9, 8, 5, 8, 2, 5, 2, 6, 3, 2, 8, -1, -1, -1, -1],
-        [2, 3, 11, 10, 6, 5, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
-        [11, 0, 8, 11, 2, 0, 10, 6, 5, -1, -1, -1, -1, -1, -1, -1],
-        [0, 1, 9, 2, 3, 11, 5, 10, 6, -1, -1, -1, -1, -1, -1, -1],
-        [5, 10, 6, 1, 9, 2, 9, 11, 2, 9, 8, 11, -1, -1, -1, -1],
-        [6, 3, 11, 6, 5, 3, 5, 1, 3, -1, -1, -1, -1, -1, -1, -1],
-        [0, 8, 11, 0, 11, 5, 0, 5, 1, 5, 11, 6, -1, -1, -1, -1],
-        [3, 11, 6, 0, 3, 6, 0, 6, 5, 0, 5, 9, -1, -1, -1, -1],
-        [6, 5, 9, 6, 9, 11, 11, 9, 8, -1, -1, -1, -1, -1, -1, -1],
-        [5, 10, 6, 4, 7, 8, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
-        [4, 3, 0, 4, 7, 3, 6, 5, 10, -1, -1, -1, -1, -1, -1, -1],
-        [1, 9, 0, 5, 10, 6, 8, 4, 7, -1, -1, -1, -1, -1, -1, -1],
-        [10, 6, 5, 1, 9, 7, 1, 7, 3, 7, 9, 4, -1, -1, -1, -1],
-        [6, 1, 2, 6, 5, 1, 4, 7, 8, -1, -1, -1, -1, -1, -1, -1],
-        [1, 2, 5, 5, 2, 6, 3, 0, 4, 3, 4, 7, -1, -1, -1, -1],
-        [8, 4, 7, 9, 0, 5, 0, 6, 5, 0, 2, 6, -1, -1, -1, -1],
-        [7, 3, 9, 7, 9, 4, 3, 2, 9, 5, 9, 6, 2, 6, 9, -1],
-        [3, 11, 2, 7, 8, 4, 10, 6, 5, -1, -1, -1, -1, -1, -1, -1],
-        [5, 10, 6, 4, 7, 2, 4, 2, 0, 2, 7, 11, -1, -1, -1, -1],
-        [0, 1, 9, 4, 7, 8, 2, 3, 11, 5, 10, 6, -1, -1, -1, -1],
-        [9, 2, 1, 9, 11, 2, 9, 4, 11, 7, 11, 4, 5, 10, 6, -1],
-        [8, 4, 7, 3, 11, 5, 3, 5, 1, 5, 11, 6, -1, -1, -1, -1],
-        [5, 1, 11, 5, 11, 6, 1, 0, 11, 7, 11, 4, 0, 4, 11, -1],
-        [0, 5, 9, 0, 6, 5, 0, 3, 6, 11, 6, 3, 8, 4, 7, -1],
-        [6, 5, 9, 6, 9, 11, 4, 7, 9, 7, 11, 9, -1, -1, -1, -1],
-        [10, 4, 9, 6, 4, 10, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
-        [4, 10, 6, 4, 9, 10, 0, 8, 3, -1, -1, -1, -1, -1, -1, -1],
-        [10, 0, 1, 10, 6, 0, 6, 4, 0, -1, -1, -1, -1, -1, -1, -1],
-        [8, 3, 1, 8, 1, 6, 8, 6, 4, 6, 1, 10, -1, -1, -1, -1],
-        [1, 4, 9, 1, 2, 4, 2, 6, 4, -1, -1, -1, -1, -1, -1, -1],
-        [3, 0, 8, 1, 2, 9, 2, 4, 9, 2, 6, 4, -1, -1, -1, -1],
-        [0, 2, 4, 4, 2, 6, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
-        [8, 3, 2, 8, 2, 4, 4, 2, 6, -1, -1, -1, -1, -1, -1, -1],
-        [10, 4, 9, 10, 6, 4, 11, 2, 3, -1, -1, -1, -1, -1, -1, -1],
-        [0, 8, 2, 2, 8, 11, 4, 9, 10, 4, 10, 6, -1, -1, -1, -1],
-        [3, 11, 2, 0, 1, 6, 0, 6, 4, 6, 1, 10, -1, -1, -1, -1],
-        [6, 4, 1, 6, 1, 10, 4, 8, 1, 2, 1, 11, 8, 11, 1, -1],
-        [9, 6, 4, 9, 3, 6, 9, 1, 3, 11, 6, 3, -1, -1, -1, -1],
-        [8, 11, 1, 8, 1, 0, 11, 6, 1, 9, 1, 4, 6, 4, 1, -1],
-        [3, 11, 6, 3, 6, 0, 0, 6, 4, -1, -1, -1, -1, -1, -1, -1],
-        [6, 4, 8, 11, 6, 8, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
-        [7, 10, 6, 7, 8, 10, 8, 9, 10, -1, -1, -1, -1, -1, -1, -1],
-        [0, 7, 3, 0, 10, 7, 0, 9, 10, 6, 7, 10, -1, -1, -1, -1],
-        [10, 6, 7, 1, 10, 7, 1, 7, 8, 1, 8, 0, -1, -1, -1, -1],
-        [10, 6, 7, 10, 7, 1, 1, 7, 3, -1, -1, -1, -1, -1, -1, -1],
-        [1, 2, 6, 1, 6, 8, 1, 8, 9, 8, 6, 7, -1, -1, -1, -1],
-        [2, 6, 9, 2, 9, 1, 6, 7, 9, 0, 9, 3, 7, 3, 9, -1],
-        [7, 8, 0, 7, 0, 6, 6, 0, 2, -1, -1, -1, -1, -1, -1, -1],
-        [7, 3, 2, 6, 7, 2, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
-        [2, 3, 11, 10, 6, 8, 10, 8, 9, 8, 6, 7, -1, -1, -1, -1],
-        [2, 0, 7, 2, 7, 11, 0, 9, 7, 6, 7, 10, 9, 10, 7, -1],
-        [1, 8, 0, 1, 7, 8, 1, 10, 7, 6, 7, 10, 2, 3, 11, -1],
-        [11, 2, 1, 11, 1, 7, 10, 6, 1, 6, 7, 1, -1, -1, -1, -1],
-        [8, 9, 6, 8, 6, 7, 9, 1, 6, 11, 6, 3, 1, 3, 6, -1],
-        [0, 9, 1, 11, 6, 7, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
-        [7, 8, 0, 7, 0, 6, 3, 11, 0, 11, 6, 0, -1, -1, -1, -1],
-        [7, 11, 6, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
-        [7, 6, 11, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
-        [3, 0, 8, 11, 7, 6, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
-        [0, 1, 9, 11, 7, 6, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
-        [8, 1, 9, 8, 3, 1, 11, 7, 6, -1, -1, -1, -1, -1, -1, -1],
-        [10, 1, 2, 6, 11, 7, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
-        [1, 2, 10, 3, 0, 8, 6, 11, 7, -1, -1, -1, -1, -1, -1, -1],
-        [2, 9, 0, 2, 10, 9, 6, 11, 7, -1, -1, -1, -1, -1, -1, -1],
-        [6, 11, 7, 2, 10, 3, 10, 8, 3, 10, 9, 8, -1, -1, -1, -1],
-        [7, 2, 3, 6, 2, 7, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
-        [7, 0, 8, 7, 6, 0, 6, 2, 0, -1, -1, -1, -1, -1, -1, -1],
-        [2, 7, 6, 2, 3, 7, 0, 1, 9, -1, -1, -1, -1, -1, -1, -1],
-        [1, 6, 2, 1, 8, 6, 1, 9, 8, 8, 7, 6, -1, -1, -1, -1],
-        [10, 7, 6, 10, 1, 7, 1, 3, 7, -1, -1, -1, -1, -1, -1, -1],
-        [10, 7, 6, 1, 7, 10, 1, 8, 7, 1, 0, 8, -1, -1, -1, -1],
-        [0, 3, 7, 0, 7, 10, 0, 10, 9, 6, 10, 7, -1, -1, -1, -1],
-        [7, 6, 10, 7, 10, 8, 8, 10, 9, -1, -1, -1, -1, -1, -1, -1],
-        [6, 8, 4, 11, 8, 6, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
-        [3, 6, 11, 3, 0, 6, 0, 4, 6, -1, -1, -1, -1, -1, -1, -1],
-        [8, 6, 11, 8, 4, 6, 9, 0, 1, -1, -1, -1, -1, -1, -1, -1],
-        [9, 4, 6, 9, 6, 3, 9, 3, 1, 11, 3, 6, -1, -1, -1, -1],
-        [6, 8, 4, 6, 11, 8, 2, 10, 1, -1, -1, -1, -1, -1, -1, -1],
-        [1, 2, 10, 3, 0, 11, 0, 6, 11, 0, 4, 6, -1, -1, -1, -1],
-        [4, 11, 8, 4, 6, 11, 0, 2, 9, 2, 10, 9, -1, -1, -1, -1],
-        [10, 9, 3, 10, 3, 2, 9, 4, 3, 11, 3, 6, 4, 6, 3, -1],
-        [8, 2, 3, 8, 4, 2, 4, 6, 2, -1, -1, -1, -1, -1, -1, -1],
-        [0, 4, 2, 4, 6, 2, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
-        [1, 9, 0, 2, 3, 4, 2, 4, 6, 4, 3, 8, -1, -1, -1, -1],
-        [1, 9, 4, 1, 4, 2, 2, 4, 6, -1, -1, -1, -1, -1, -1, -1],
-        [8, 1, 3, 8, 6, 1, 8, 4, 6, 6, 10, 1, -1, -1, -1, -1],
-        [10, 1, 0, 10, 0, 6, 6, 0, 4, -1, -1, -1, -1, -1, -1, -1],
-        [4, 6, 3, 4, 3, 8, 6, 10, 3, 0, 3, 9, 10, 9, 3, -1],
-        [10, 9, 4, 6, 10, 4, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
-        [4, 9, 5, 7, 6, 11, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
-        [0, 8, 3, 4, 9, 5, 11, 7, 6, -1, -1, -1, -1, -1, -1, -1],
-        [5, 0, 1, 5, 4, 0, 7, 6, 11, -1, -1, -1, -1, -1, -1, -1],
-        [11, 7, 6, 8, 3, 4, 3, 5, 4, 3, 1, 5, -1, -1, -1, -1],
-        [9, 5, 4, 10, 1, 2, 7, 6, 11, -1, -1, -1, -1, -1, -1, -1],
-        [6, 11, 7, 1, 2, 10, 0, 8, 3, 4, 9, 5, -1, -1, -1, -1],
-        [7, 6, 11, 5, 4, 10, 4, 2, 10, 4, 0, 2, -1, -1, -1, -1],
-        [3, 4, 8, 3, 5, 4, 3, 2, 5, 10, 5, 2, 11, 7, 6, -1],
-        [7, 2, 3, 7, 6, 2, 5, 4, 9, -1, -1, -1, -1, -1, -1, -1],
-        [9, 5, 4, 0, 8, 6, 0, 6, 2, 6, 8, 7, -1, -1, -1, -1],
-        [3, 6, 2, 3, 7, 6, 1, 5, 0, 5, 4, 0, -1, -1, -1, -1],
-        [6, 2, 8, 6, 8, 7, 2, 1, 8, 4, 8, 5, 1, 5, 8, -1],
-        [9, 5, 4, 10, 1, 6, 1, 7, 6, 1, 3, 7, -1, -1, -1, -1],
-        [1, 6, 10, 1, 7, 6, 1, 0, 7, 8, 7, 0, 9, 5, 4, -1],
-        [4, 0, 10, 4, 10, 5, 0, 3, 10, 6, 10, 7, 3, 7, 10, -1],
-        [7, 6, 10, 7, 10, 8, 5, 4, 10, 4, 8, 10, -1, -1, -1, -1],
-        [6, 9, 5, 6, 11, 9, 11, 8, 9, -1, -1, -1, -1, -1, -1, -1],
-        [3, 6, 11, 0, 6, 3, 0, 5, 6, 0, 9, 5, -1, -1, -1, -1],
-        [0, 11, 8, 0, 5, 11, 0, 1, 5, 5, 6, 11, -1, -1, -1, -1],
-        [6, 11, 3, 6, 3, 5, 5, 3, 1, -1, -1, -1, -1, -1, -1, -1],
-        [1, 2, 10, 9, 5, 11, 9, 11, 8, 11, 5, 6, -1, -1, -1, -1],
-        [0, 11, 3, 0, 6, 11, 0, 9, 6, 5, 6, 9, 1, 2, 10, -1],
-        [11, 8, 5, 11, 5, 6, 8, 0, 5, 10, 5, 2, 0, 2, 5, -1],
-        [6, 11, 3, 6, 3, 5, 2, 10, 3, 10, 5, 3, -1, -1, -1, -1],
-        [5, 8, 9, 5, 2, 8, 5, 6, 2, 3, 8, 2, -1, -1, -1, -1],
-        [9, 5, 6, 9, 6, 0, 0, 6, 2, -1, -1, -1, -1, -1, -1, -1],
-        [1, 5, 8, 1, 8, 0, 5, 6, 8, 3, 8, 2, 6, 2, 8, -1],
-        [1, 5, 6, 2, 1, 6, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
-        [1, 3, 6, 1, 6, 10, 3, 8, 6, 5, 6, 9, 8, 9, 6, -1],
-        [10, 1, 0, 10, 0, 6, 9, 5, 0, 5, 6, 0, -1, -1, -1, -1],
-        [0, 3, 8, 5, 6, 10, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
-        [10, 5, 6, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
-        [11, 5, 10, 7, 5, 11, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
-        [11, 5, 10, 11, 7, 5, 8, 3, 0, -1, -1, -1, -1, -1, -1, -1],
-        [5, 11, 7, 5, 10, 11, 1, 9, 0, -1, -1, -1, -1, -1, -1, -1],
-        [10, 7, 5, 10, 11, 7, 9, 8, 1, 8, 3, 1, -1, -1, -1, -1],
-        [11, 1, 2, 11, 7, 1, 7, 5, 1, -1, -1, -1, -1, -1, -1, -1],
-        [0, 8, 3, 1, 2, 7, 1, 7, 5, 7, 2, 11, -1, -1, -1, -1],
-        [9, 7, 5, 9, 2, 7, 9, 0, 2, 2, 11, 7, -1, -1, -1, -1],
-        [7, 5, 2, 7, 2, 11, 5, 9, 2, 3, 2, 8, 9, 8, 2, -1],
-        [2, 5, 10, 2, 3, 5, 3, 7, 5, -1, -1, -1, -1, -1, -1, -1],
-        [8, 2, 0, 8, 5, 2, 8, 7, 5, 10, 2, 5, -1, -1, -1, -1],
-        [9, 0, 1, 5, 10, 3, 5, 3, 7, 3, 10, 2, -1, -1, -1, -1],
-        [9, 8, 2, 9, 2, 1, 8, 7, 2, 10, 2, 5, 7, 5, 2, -1],
-        [1, 3, 5, 3, 7, 5, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
-        [0, 8, 7, 0, 7, 1, 1, 7, 5, -1, -1, -1, -1, -1, -1, -1],
-        [9, 0, 3, 9, 3, 5, 5, 3, 7, -1, -1, -1, -1, -1, -1, -1],
-        [9, 8, 7, 5, 9, 7, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
-        [5, 8, 4, 5, 10, 8, 10, 11, 8, -1, -1, -1, -1, -1, -1, -1],
-        [5, 0, 4, 5, 11, 0, 5, 10, 11, 11, 3, 0, -1, -1, -1, -1],
-        [0, 1, 9, 8, 4, 10, 8, 10, 11, 10, 4, 5, -1, -1, -1, -1],
-        [10, 11, 4, 10, 4, 5, 11, 3, 4, 9, 4, 1, 3, 1, 4, -1],
-        [2, 5, 1, 2, 8, 5, 2, 11, 8, 4, 5, 8, -1, -1, -1, -1],
-        [0, 4, 11, 0, 11, 3, 4, 5, 11, 2, 11, 1, 5, 1, 11, -1],
-        [0, 2, 5, 0, 5, 9, 2, 11, 5, 4, 5, 8, 11, 8, 5, -1],
-        [9, 4, 5, 2, 11, 3, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
-        [2, 5, 10, 3, 5, 2, 3, 4, 5, 3, 8, 4, -1, -1, -1, -1],
-        [5, 10, 2, 5, 2, 4, 4, 2, 0, -1, -1, -1, -1, -1, -1, -1],
-        [3, 10, 2, 3, 5, 10, 3, 8, 5, 4, 5, 8, 0, 1, 9, -1],
-        [5, 10, 2, 5, 2, 4, 1, 9, 2, 9, 4, 2, -1, -1, -1, -1],
-        [8, 4, 5, 8, 5, 3, 3, 5, 1, -1, -1, -1, -1, -1, -1, -1],
-        [0, 4, 5, 1, 0, 5, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
-        [8, 4, 5, 8, 5, 3, 9, 0, 5, 0, 3, 5, -1, -1, -1, -1],
-        [9, 4, 5, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
-        [4, 11, 7, 4, 9, 11, 9, 10, 11, -1, -1, -1, -1, -1, -1, -1],
-        [0, 8, 3, 4, 9, 7, 9, 11, 7, 9, 10, 11, -1, -1, -1, -1],
-        [1, 10, 11, 1, 11, 4, 1, 4, 0, 7, 4, 11, -1, -1, -1, -1],
-        [3, 1, 4, 3, 4, 8, 1, 10, 4, 7, 4, 11, 10, 11, 4, -1],
-        [4, 11, 7, 9, 11, 4, 9, 2, 11, 9, 1, 2, -1, -1, -1, -1],
-        [9, 7, 4, 9, 11, 7, 9, 1, 11, 2, 11, 1, 0, 8, 3, -1],
-        [11, 7, 4, 11, 4, 2, 2, 4, 0, -1, -1, -1, -1, -1, -1, -1],
-        [11, 7, 4, 11, 4, 2, 8, 3, 4, 3, 2, 4, -1, -1, -1, -1],
-        [2, 9, 10, 2, 7, 9, 2, 3, 7, 7, 4, 9, -1, -1, -1, -1],
-        [9, 10, 7, 9, 7, 4, 10, 2, 7, 8, 7, 0, 2, 0, 7, -1],
-        [3, 7, 10, 3, 10, 2, 7, 4, 10, 1, 10, 0, 4, 0, 10, -1],
-        [1, 10, 2, 8, 7, 4, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
-        [4, 9, 1, 4, 1, 7, 7, 1, 3, -1, -1, -1, -1, -1, -1, -1],
-        [4, 9, 1, 4, 1, 7, 0, 8, 1, 8, 7, 1, -1, -1, -1, -1],
-        [4, 0, 3, 7, 4, 3, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
-        [4, 8, 7, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
-        [9, 10, 8, 10, 11, 8, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
-        [3, 0, 9, 3, 9, 11, 11, 9, 10, -1, -1, -1, -1, -1, -1, -1],
-        [0, 1, 10, 0, 10, 8, 8, 10, 11, -1, -1, -1, -1, -1, -1, -1],
-        [3, 1, 10, 11, 3, 10, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
-        [1, 2, 11, 1, 11, 9, 9, 11, 8, -1, -1, -1, -1, -1, -1, -1],
-        [3, 0, 9, 3, 9, 11, 1, 2, 9, 2, 11, 9, -1, -1, -1, -1],
-        [0, 2, 11, 8, 0, 11, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
-        [3, 2, 11, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
-        [2, 3, 8, 2, 8, 10, 10, 8, 9, -1, -1, -1, -1, -1, -1, -1],
-        [9, 10, 2, 0, 9, 2, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
-        [2, 3, 8, 2, 8, 10, 0, 1, 8, 1, 10, 8, -1, -1, -1, -1],
-        [1, 10, 2, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
-        [1, 3, 8, 9, 1, 8, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
-        [0, 9, 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
-        [0, 3, 8, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
-        [-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1]]
+        # Cython makes us toss this in here, which I think will change in a
+        # future release.
 
         cdef int i, j, k, n
         cdef int offset
         cdef np.float64_t gv[8]
         cdef int cubeindex
-        cdef np.float64_t vertlist[12][3]
-        cdef int ntriang = 0
         cdef np.float64_t *intdata = NULL
         cdef np.float64_t x, y, z
         cdef np.float64_t mu
-        cdef Triangle *first = NULL
-        cdef Triangle *current = NULL
+        cdef TriangleCollection triangles
+        triangles.first = triangles.current = NULL
+        triangles.count = 0
         x = self.left_edge[0]
         for i in range(self.dims[0]):
             y = self.left_edge[1]
@@ -1162,68 +897,578 @@
                            + j * (self.dims[2] + 1) + k
                     intdata = self.data[field_id] + offset
                     offset_fill(self.dims, intdata, gv)
-                    cubeindex = 0
-                    for n in range(8):
-                        if gv[n] < isovalue:
-                            cubeindex |= (1 << n)
-                    if edge_table[cubeindex] == 0:
-                        z += self.dds[2]
-                        continue
-                    if (edge_table[cubeindex] & 1): # 0,0,0 with 1,0,0
-                        vertex_interp(gv[0], gv[1], isovalue, vertlist[0],
-                                      self.dds, x, y, z, 0, 1)
-                    if (edge_table[cubeindex] & 2): # 1,0,0 with 1,1,0
-                        vertex_interp(gv[1], gv[2], isovalue, vertlist[1],
-                                      self.dds, x, y, z, 1, 2)
-                    if (edge_table[cubeindex] & 4): # 1,1,0 with 0,1,0
-                        vertex_interp(gv[2], gv[3], isovalue, vertlist[2],
-                                      self.dds, x, y, z, 2, 3)
-                    if (edge_table[cubeindex] & 8): # 0,1,0 with 0,0,0
-                        vertex_interp(gv[3], gv[0], isovalue, vertlist[3],
-                                      self.dds, x, y, z, 3, 0)
-                    if (edge_table[cubeindex] & 16): # 0,0,1 with 1,0,1
-                        vertex_interp(gv[4], gv[5], isovalue, vertlist[4],
-                                      self.dds, x, y, z, 4, 5)
-                    if (edge_table[cubeindex] & 32): # 1,0,1 with 1,1,1
-                        vertex_interp(gv[5], gv[6], isovalue, vertlist[5],
-                                      self.dds, x, y, z, 5, 6)
-                    if (edge_table[cubeindex] & 64): # 1,1,1 with 0,1,1
-                        vertex_interp(gv[6], gv[7], isovalue, vertlist[6],
-                                      self.dds, x, y, z, 6, 7)
-                    if (edge_table[cubeindex] & 128): # 0,1,1 with 0,0,1
-                        vertex_interp(gv[7], gv[4], isovalue, vertlist[7],
-                                      self.dds, x, y, z, 7, 4)
-                    if (edge_table[cubeindex] & 256): # 0,0,0 with 0,0,1
-                        vertex_interp(gv[0], gv[4], isovalue, vertlist[8],
-                                      self.dds, x, y, z, 0, 4)
-                    if (edge_table[cubeindex] & 512): # 1,0,0 with 1,0,1
-                        vertex_interp(gv[1], gv[5], isovalue, vertlist[9],
-                                      self.dds, x, y, z, 1, 5)
-                    if (edge_table[cubeindex] & 1024): # 1,1,0 with 1,1,1
-                        vertex_interp(gv[2], gv[6], isovalue, vertlist[10],
-                                      self.dds, x, y, z, 2, 6)
-                    if (edge_table[cubeindex] & 2048): # 0,1,0 with 0,1,1
-                        vertex_interp(gv[3], gv[7], isovalue, vertlist[11],
-                                      self.dds, x, y, z, 3, 7)
-                    n = 0
-                    while 1:
-                        current = AddTriangle(current, 
-                                    vertlist[tri_table[cubeindex][n  ]],
-                                    vertlist[tri_table[cubeindex][n+1]],
-                                    vertlist[tri_table[cubeindex][n+2]])
-                        ntriang += 1
-                        if first == NULL: first = current
-                        n += 3
-                        if tri_table[cubeindex][n] == -1: break
+                    march_cubes(gv, isovalue, self.dds, x, y, z,
+                                &triangles)
                     z += self.dds[2]
                 y += self.dds[1]
             x += self.dds[0]
         # Hallo, we are all done.
         cdef np.ndarray[np.float64_t, ndim=2] vertices 
-        vertices = np.zeros((ntriang*3,3), dtype='float64')
-        FillAndWipeTriangles(vertices, first)
+        vertices = np.zeros((triangles.count*3,3), dtype='float64')
+        FillAndWipeTriangles(vertices, triangles.first)
         return vertices
 
+ at cython.boundscheck(False)
+ at cython.wraparound(False)
+ at cython.cdivision(True)
+def march_cubes_grid(np.float64_t isovalue,
+                     np.ndarray[np.float64_t, ndim=3] values,
+                     np.ndarray[np.int32_t, ndim=3] mask,
+                     np.ndarray[np.float64_t, ndim=1] left_edge,
+                     np.ndarray[np.float64_t, ndim=1] dxs,
+                     obj_sample = None):
+    cdef int dims[3]
+    cdef int i, j, k, n, m, nt
+    cdef int offset
+    cdef np.float64_t gv[8], pos[3], point[3], idds[3]
+    cdef np.float64_t *intdata = NULL
+    cdef np.float64_t *sdata = NULL
+    cdef np.float64_t x, y, z, do_sample
+    cdef np.ndarray[np.float64_t, ndim=3] sample
+    cdef np.ndarray[np.float64_t, ndim=1] sampled
+    cdef TriangleCollection triangles
+    cdef Triangle *last, *current
+    if obj_sample is not None:
+        sample = obj_sample
+        sdata = <np.float64_t *> sample.data
+        do_sample = 1
+    else:
+        do_sample = 0
+    for i in range(3):
+        dims[i] = values.shape[i] - 1
+        idds[i] = 1.0 / dxs[i]
+    triangles.first = triangles.current = NULL
+    last = current = NULL
+    triangles.count = 0
+    cdef np.float64_t *data = <np.float64_t *> values.data
+    cdef np.float64_t *dds = <np.float64_t *> dxs.data
+    pos[0] = left_edge[0]
+    for i in range(dims[0]):
+        pos[1] = left_edge[1]
+        for j in range(dims[1]):
+            pos[2] = left_edge[2]
+            for k in range(dims[2]):
+                if mask[i,j,k] == 1:
+                    offset = i * (dims[1] + 1) * (dims[2] + 1) \
+                           + j * (dims[2] + 1) + k
+                    intdata = data + offset
+                    offset_fill(dims, intdata, gv)
+                    nt = march_cubes(gv, isovalue, dds, pos[0], pos[1], pos[2],
+                                &triangles)
+                    if do_sample == 1 and nt > 0:
+                        # At each triangle's center, sample our secondary field
+                        if last == NULL and triangles.first != NULL:
+                            current = triangles.first
+                            last = NULL
+                        elif last != NULL:
+                            current = last.next
+                        while current != NULL:
+                            for n in range(3):
+                                point[n] = 0.0
+                            for n in range(3):
+                                for m in range(3):
+                                    point[m] += (current.p[n][m]-pos[m])*idds[m]
+                            for n in range(3):
+                                point[n] /= 3.0
+                            current.val = offset_interpolate(dims, point,
+                                                             sdata + offset)
+                            last = current
+                            if current.next == NULL: break
+                            current = current.next
+                pos[2] += dds[2]
+            pos[1] += dds[1]
+        pos[0] += dds[0]
+    # Hallo, we are all done.
+    cdef np.ndarray[np.float64_t, ndim=2] vertices 
+    vertices = np.zeros((triangles.count*3,3), dtype='float64')
+    if do_sample == 1:
+        sampled = np.zeros(triangles.count, dtype='float64')
+        FillTriangleValues(sampled, triangles.first)
+        FillAndWipeTriangles(vertices, triangles.first)
+        return vertices, sampled
+    FillAndWipeTriangles(vertices, triangles.first)
+    return vertices
+
+ at cython.boundscheck(False)
+ at cython.wraparound(False)
+ at cython.cdivision(True)
+def march_cubes_grid_flux(
+                     np.float64_t isovalue,
+                     np.ndarray[np.float64_t, ndim=3] values,
+                     np.ndarray[np.float64_t, ndim=3] v1,
+                     np.ndarray[np.float64_t, ndim=3] v2,
+                     np.ndarray[np.float64_t, ndim=3] v3,
+                     np.ndarray[np.float64_t, ndim=3] flux_field,
+                     np.ndarray[np.int32_t, ndim=3] mask,
+                     np.ndarray[np.float64_t, ndim=1] left_edge,
+                     np.ndarray[np.float64_t, ndim=1] dxs):
+    cdef int dims[3]
+    cdef int i, j, k, n, m
+    cdef int offset
+    cdef np.float64_t gv[8]
+    cdef np.float64_t *intdata = NULL
+    cdef TriangleCollection triangles
+    cdef Triangle *current = NULL
+    cdef Triangle *last = NULL
+    cdef np.float64_t *data = <np.float64_t *> values.data
+    cdef np.float64_t *v1data = <np.float64_t *> v1.data
+    cdef np.float64_t *v2data = <np.float64_t *> v2.data
+    cdef np.float64_t *v3data = <np.float64_t *> v3.data
+    cdef np.float64_t *fdata = <np.float64_t *> flux_field.data
+    cdef np.float64_t *dds = <np.float64_t *> dxs.data
+    cdef np.float64_t flux = 0.0
+    cdef np.float64_t center[3], point[3], wval, temp, area, s
+    cdef np.float64_t cell_pos[3], fv[3], idds[3], normal[3]
+    for i in range(3):
+        dims[i] = values.shape[i] - 1
+        idds[i] = 1.0 / dds[i]
+    triangles.first = triangles.current = NULL
+    triangles.count = 0
+    cell_pos[0] = left_edge[0]
+    for i in range(dims[0]):
+        cell_pos[1] = left_edge[1]
+        for j in range(dims[1]):
+            cell_pos[2] = left_edge[2]
+            for k in range(dims[2]):
+                if mask[i,j,k] == 1:
+                    offset = i * (dims[1] + 1) * (dims[2] + 1) \
+                           + j * (dims[2] + 1) + k
+                    intdata = data + offset
+                    offset_fill(dims, intdata, gv)
+                    march_cubes(gv, isovalue, dds,
+                                cell_pos[0], cell_pos[1], cell_pos[2],
+                                &triangles)
+                    # Now our triangles collection has a bunch.  We now
+                    # calculate fluxes for each.
+                    if last == NULL and triangles.first != NULL:
+                        current = triangles.first
+                        last = NULL
+                    elif last != NULL:
+                        current = last.next
+                    while current != NULL:
+                        # Calculate the center of the triangle
+                        wval = 0.0
+                        for n in range(3):
+                            center[n] = 0.0
+                        for n in range(3):
+                            for m in range(3):
+                                point[m] = (current.p[n][m]-cell_pos[m])*idds[m]
+                            # Now we calculate the value at this point
+                            temp = offset_interpolate(dims, point, intdata)
+                            #print "something", temp, point[0], point[1], point[2]
+                            wval += temp
+                            for m in range(3):
+                                center[m] += temp * point[m]
+                        # Now we divide by our normalizing factor
+                        for n in range(3):
+                            center[n] /= wval
+                        # We have our center point of the triangle, in 0..1
+                        # coordinates.  So now we interpolate our three
+                        # fields.
+                        fv[0] = offset_interpolate(dims, center, v1data + offset)
+                        fv[1] = offset_interpolate(dims, center, v2data + offset)
+                        fv[2] = offset_interpolate(dims, center, v3data + offset)
+                        # We interpolate again the actual value data
+                        wval = offset_interpolate(dims, center, fdata + offset)
+                        # Now we have our flux vector and our field value!
+                        # We just need a normal vector with which we can
+                        # dot it.  The normal should be equal to the gradient
+                        # in the center of the triangle, or thereabouts.
+                        eval_gradient(dims, center, intdata, normal)
+                        temp = 0.0
+                        for n in range(3):
+                            temp += normal[n]*normal[n]
+                        # Take the negative, to ensure it points inwardly
+                        temp = -(temp**0.5)
+                        # Dump this somewhere for now
+                        temp = wval * (fv[0] * normal[0] +
+                                       fv[1] * normal[1] +
+                                       fv[2] * normal[2])/temp
+                        # Now we need the area of the triangle.  This will take
+                        # a lot of time to calculate compared to the rest.
+                        # We use Heron's formula.
+                        for n in range(3):
+                            fv[n] = 0.0
+                        for n in range(3):
+                            fv[0] += (current.p[0][n] - current.p[2][n])**2.0
+                            fv[1] += (current.p[1][n] - current.p[0][n])**2.0
+                            fv[2] += (current.p[2][n] - current.p[1][n])**2.0
+                        s = 0.0
+                        for n in range(3):
+                            fv[n] = fv[n]**0.5
+                            s += 0.5 * fv[n]
+                        area = (s*(s-fv[0])*(s-fv[1])*(s-fv[2]))
+                        area = area**0.5
+                        flux += temp*area
+                        last = current
+                        if current.next == NULL: break
+                        current = current.next
+                cell_pos[2] += dds[2]
+            cell_pos[1] += dds[1]
+        cell_pos[0] += dds[0]
+    # Hallo, we are all done.
+    WipeTriangles(triangles.first)
+    return flux
+
+ at cython.boundscheck(False)
+ at cython.wraparound(False)
+ at cython.cdivision(True)
+cdef int march_cubes(
+                 np.float64_t gv[8], np.float64_t isovalue,
+                 np.float64_t dds[3],
+                 np.float64_t x, np.float64_t y, np.float64_t z,
+                 TriangleCollection *triangles):
+    cdef int *edge_table=[
+    0x0  , 0x109, 0x203, 0x30a, 0x406, 0x50f, 0x605, 0x70c,
+    0x80c, 0x905, 0xa0f, 0xb06, 0xc0a, 0xd03, 0xe09, 0xf00,
+    0x190, 0x99 , 0x393, 0x29a, 0x596, 0x49f, 0x795, 0x69c,
+    0x99c, 0x895, 0xb9f, 0xa96, 0xd9a, 0xc93, 0xf99, 0xe90,
+    0x230, 0x339, 0x33 , 0x13a, 0x636, 0x73f, 0x435, 0x53c,
+    0xa3c, 0xb35, 0x83f, 0x936, 0xe3a, 0xf33, 0xc39, 0xd30,
+    0x3a0, 0x2a9, 0x1a3, 0xaa , 0x7a6, 0x6af, 0x5a5, 0x4ac,
+    0xbac, 0xaa5, 0x9af, 0x8a6, 0xfaa, 0xea3, 0xda9, 0xca0,
+    0x460, 0x569, 0x663, 0x76a, 0x66 , 0x16f, 0x265, 0x36c,
+    0xc6c, 0xd65, 0xe6f, 0xf66, 0x86a, 0x963, 0xa69, 0xb60,
+    0x5f0, 0x4f9, 0x7f3, 0x6fa, 0x1f6, 0xff , 0x3f5, 0x2fc,
+    0xdfc, 0xcf5, 0xfff, 0xef6, 0x9fa, 0x8f3, 0xbf9, 0xaf0,
+    0x650, 0x759, 0x453, 0x55a, 0x256, 0x35f, 0x55 , 0x15c,
+    0xe5c, 0xf55, 0xc5f, 0xd56, 0xa5a, 0xb53, 0x859, 0x950,
+    0x7c0, 0x6c9, 0x5c3, 0x4ca, 0x3c6, 0x2cf, 0x1c5, 0xcc ,
+    0xfcc, 0xec5, 0xdcf, 0xcc6, 0xbca, 0xac3, 0x9c9, 0x8c0,
+    0x8c0, 0x9c9, 0xac3, 0xbca, 0xcc6, 0xdcf, 0xec5, 0xfcc,
+    0xcc , 0x1c5, 0x2cf, 0x3c6, 0x4ca, 0x5c3, 0x6c9, 0x7c0,
+    0x950, 0x859, 0xb53, 0xa5a, 0xd56, 0xc5f, 0xf55, 0xe5c,
+    0x15c, 0x55 , 0x35f, 0x256, 0x55a, 0x453, 0x759, 0x650,
+    0xaf0, 0xbf9, 0x8f3, 0x9fa, 0xef6, 0xfff, 0xcf5, 0xdfc,
+    0x2fc, 0x3f5, 0xff , 0x1f6, 0x6fa, 0x7f3, 0x4f9, 0x5f0,
+    0xb60, 0xa69, 0x963, 0x86a, 0xf66, 0xe6f, 0xd65, 0xc6c,
+    0x36c, 0x265, 0x16f, 0x66 , 0x76a, 0x663, 0x569, 0x460,
+    0xca0, 0xda9, 0xea3, 0xfaa, 0x8a6, 0x9af, 0xaa5, 0xbac,
+    0x4ac, 0x5a5, 0x6af, 0x7a6, 0xaa , 0x1a3, 0x2a9, 0x3a0,
+    0xd30, 0xc39, 0xf33, 0xe3a, 0x936, 0x83f, 0xb35, 0xa3c,
+    0x53c, 0x435, 0x73f, 0x636, 0x13a, 0x33 , 0x339, 0x230,
+    0xe90, 0xf99, 0xc93, 0xd9a, 0xa96, 0xb9f, 0x895, 0x99c,
+    0x69c, 0x795, 0x49f, 0x596, 0x29a, 0x393, 0x99 , 0x190,
+    0xf00, 0xe09, 0xd03, 0xc0a, 0xb06, 0xa0f, 0x905, 0x80c,
+    0x70c, 0x605, 0x50f, 0x406, 0x30a, 0x203, 0x109, 0x0   ]
+
+    cdef int **tri_table = \
+    [[-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+    [0, 8, 3, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+    [0, 1, 9, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+    [1, 8, 3, 9, 8, 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+    [1, 2, 10, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+    [0, 8, 3, 1, 2, 10, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+    [9, 2, 10, 0, 2, 9, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+    [2, 8, 3, 2, 10, 8, 10, 9, 8, -1, -1, -1, -1, -1, -1, -1],
+    [3, 11, 2, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+    [0, 11, 2, 8, 11, 0, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+    [1, 9, 0, 2, 3, 11, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+    [1, 11, 2, 1, 9, 11, 9, 8, 11, -1, -1, -1, -1, -1, -1, -1],
+    [3, 10, 1, 11, 10, 3, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+    [0, 10, 1, 0, 8, 10, 8, 11, 10, -1, -1, -1, -1, -1, -1, -1],
+    [3, 9, 0, 3, 11, 9, 11, 10, 9, -1, -1, -1, -1, -1, -1, -1],
+    [9, 8, 10, 10, 8, 11, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+    [4, 7, 8, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+    [4, 3, 0, 7, 3, 4, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+    [0, 1, 9, 8, 4, 7, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+    [4, 1, 9, 4, 7, 1, 7, 3, 1, -1, -1, -1, -1, -1, -1, -1],
+    [1, 2, 10, 8, 4, 7, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+    [3, 4, 7, 3, 0, 4, 1, 2, 10, -1, -1, -1, -1, -1, -1, -1],
+    [9, 2, 10, 9, 0, 2, 8, 4, 7, -1, -1, -1, -1, -1, -1, -1],
+    [2, 10, 9, 2, 9, 7, 2, 7, 3, 7, 9, 4, -1, -1, -1, -1],
+    [8, 4, 7, 3, 11, 2, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+    [11, 4, 7, 11, 2, 4, 2, 0, 4, -1, -1, -1, -1, -1, -1, -1],
+    [9, 0, 1, 8, 4, 7, 2, 3, 11, -1, -1, -1, -1, -1, -1, -1],
+    [4, 7, 11, 9, 4, 11, 9, 11, 2, 9, 2, 1, -1, -1, -1, -1],
+    [3, 10, 1, 3, 11, 10, 7, 8, 4, -1, -1, -1, -1, -1, -1, -1],
+    [1, 11, 10, 1, 4, 11, 1, 0, 4, 7, 11, 4, -1, -1, -1, -1],
+    [4, 7, 8, 9, 0, 11, 9, 11, 10, 11, 0, 3, -1, -1, -1, -1],
+    [4, 7, 11, 4, 11, 9, 9, 11, 10, -1, -1, -1, -1, -1, -1, -1],
+    [9, 5, 4, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+    [9, 5, 4, 0, 8, 3, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+    [0, 5, 4, 1, 5, 0, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+    [8, 5, 4, 8, 3, 5, 3, 1, 5, -1, -1, -1, -1, -1, -1, -1],
+    [1, 2, 10, 9, 5, 4, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+    [3, 0, 8, 1, 2, 10, 4, 9, 5, -1, -1, -1, -1, -1, -1, -1],
+    [5, 2, 10, 5, 4, 2, 4, 0, 2, -1, -1, -1, -1, -1, -1, -1],
+    [2, 10, 5, 3, 2, 5, 3, 5, 4, 3, 4, 8, -1, -1, -1, -1],
+    [9, 5, 4, 2, 3, 11, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+    [0, 11, 2, 0, 8, 11, 4, 9, 5, -1, -1, -1, -1, -1, -1, -1],
+    [0, 5, 4, 0, 1, 5, 2, 3, 11, -1, -1, -1, -1, -1, -1, -1],
+    [2, 1, 5, 2, 5, 8, 2, 8, 11, 4, 8, 5, -1, -1, -1, -1],
+    [10, 3, 11, 10, 1, 3, 9, 5, 4, -1, -1, -1, -1, -1, -1, -1],
+    [4, 9, 5, 0, 8, 1, 8, 10, 1, 8, 11, 10, -1, -1, -1, -1],
+    [5, 4, 0, 5, 0, 11, 5, 11, 10, 11, 0, 3, -1, -1, -1, -1],
+    [5, 4, 8, 5, 8, 10, 10, 8, 11, -1, -1, -1, -1, -1, -1, -1],
+    [9, 7, 8, 5, 7, 9, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+    [9, 3, 0, 9, 5, 3, 5, 7, 3, -1, -1, -1, -1, -1, -1, -1],
+    [0, 7, 8, 0, 1, 7, 1, 5, 7, -1, -1, -1, -1, -1, -1, -1],
+    [1, 5, 3, 3, 5, 7, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+    [9, 7, 8, 9, 5, 7, 10, 1, 2, -1, -1, -1, -1, -1, -1, -1],
+    [10, 1, 2, 9, 5, 0, 5, 3, 0, 5, 7, 3, -1, -1, -1, -1],
+    [8, 0, 2, 8, 2, 5, 8, 5, 7, 10, 5, 2, -1, -1, -1, -1],
+    [2, 10, 5, 2, 5, 3, 3, 5, 7, -1, -1, -1, -1, -1, -1, -1],
+    [7, 9, 5, 7, 8, 9, 3, 11, 2, -1, -1, -1, -1, -1, -1, -1],
+    [9, 5, 7, 9, 7, 2, 9, 2, 0, 2, 7, 11, -1, -1, -1, -1],
+    [2, 3, 11, 0, 1, 8, 1, 7, 8, 1, 5, 7, -1, -1, -1, -1],
+    [11, 2, 1, 11, 1, 7, 7, 1, 5, -1, -1, -1, -1, -1, -1, -1],
+    [9, 5, 8, 8, 5, 7, 10, 1, 3, 10, 3, 11, -1, -1, -1, -1],
+    [5, 7, 0, 5, 0, 9, 7, 11, 0, 1, 0, 10, 11, 10, 0, -1],
+    [11, 10, 0, 11, 0, 3, 10, 5, 0, 8, 0, 7, 5, 7, 0, -1],
+    [11, 10, 5, 7, 11, 5, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+    [10, 6, 5, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+    [0, 8, 3, 5, 10, 6, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+    [9, 0, 1, 5, 10, 6, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+    [1, 8, 3, 1, 9, 8, 5, 10, 6, -1, -1, -1, -1, -1, -1, -1],
+    [1, 6, 5, 2, 6, 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+    [1, 6, 5, 1, 2, 6, 3, 0, 8, -1, -1, -1, -1, -1, -1, -1],
+    [9, 6, 5, 9, 0, 6, 0, 2, 6, -1, -1, -1, -1, -1, -1, -1],
+    [5, 9, 8, 5, 8, 2, 5, 2, 6, 3, 2, 8, -1, -1, -1, -1],
+    [2, 3, 11, 10, 6, 5, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+    [11, 0, 8, 11, 2, 0, 10, 6, 5, -1, -1, -1, -1, -1, -1, -1],
+    [0, 1, 9, 2, 3, 11, 5, 10, 6, -1, -1, -1, -1, -1, -1, -1],
+    [5, 10, 6, 1, 9, 2, 9, 11, 2, 9, 8, 11, -1, -1, -1, -1],
+    [6, 3, 11, 6, 5, 3, 5, 1, 3, -1, -1, -1, -1, -1, -1, -1],
+    [0, 8, 11, 0, 11, 5, 0, 5, 1, 5, 11, 6, -1, -1, -1, -1],
+    [3, 11, 6, 0, 3, 6, 0, 6, 5, 0, 5, 9, -1, -1, -1, -1],
+    [6, 5, 9, 6, 9, 11, 11, 9, 8, -1, -1, -1, -1, -1, -1, -1],
+    [5, 10, 6, 4, 7, 8, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+    [4, 3, 0, 4, 7, 3, 6, 5, 10, -1, -1, -1, -1, -1, -1, -1],
+    [1, 9, 0, 5, 10, 6, 8, 4, 7, -1, -1, -1, -1, -1, -1, -1],
+    [10, 6, 5, 1, 9, 7, 1, 7, 3, 7, 9, 4, -1, -1, -1, -1],
+    [6, 1, 2, 6, 5, 1, 4, 7, 8, -1, -1, -1, -1, -1, -1, -1],
+    [1, 2, 5, 5, 2, 6, 3, 0, 4, 3, 4, 7, -1, -1, -1, -1],
+    [8, 4, 7, 9, 0, 5, 0, 6, 5, 0, 2, 6, -1, -1, -1, -1],
+    [7, 3, 9, 7, 9, 4, 3, 2, 9, 5, 9, 6, 2, 6, 9, -1],
+    [3, 11, 2, 7, 8, 4, 10, 6, 5, -1, -1, -1, -1, -1, -1, -1],
+    [5, 10, 6, 4, 7, 2, 4, 2, 0, 2, 7, 11, -1, -1, -1, -1],
+    [0, 1, 9, 4, 7, 8, 2, 3, 11, 5, 10, 6, -1, -1, -1, -1],
+    [9, 2, 1, 9, 11, 2, 9, 4, 11, 7, 11, 4, 5, 10, 6, -1],
+    [8, 4, 7, 3, 11, 5, 3, 5, 1, 5, 11, 6, -1, -1, -1, -1],
+    [5, 1, 11, 5, 11, 6, 1, 0, 11, 7, 11, 4, 0, 4, 11, -1],
+    [0, 5, 9, 0, 6, 5, 0, 3, 6, 11, 6, 3, 8, 4, 7, -1],
+    [6, 5, 9, 6, 9, 11, 4, 7, 9, 7, 11, 9, -1, -1, -1, -1],
+    [10, 4, 9, 6, 4, 10, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+    [4, 10, 6, 4, 9, 10, 0, 8, 3, -1, -1, -1, -1, -1, -1, -1],
+    [10, 0, 1, 10, 6, 0, 6, 4, 0, -1, -1, -1, -1, -1, -1, -1],
+    [8, 3, 1, 8, 1, 6, 8, 6, 4, 6, 1, 10, -1, -1, -1, -1],
+    [1, 4, 9, 1, 2, 4, 2, 6, 4, -1, -1, -1, -1, -1, -1, -1],
+    [3, 0, 8, 1, 2, 9, 2, 4, 9, 2, 6, 4, -1, -1, -1, -1],
+    [0, 2, 4, 4, 2, 6, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+    [8, 3, 2, 8, 2, 4, 4, 2, 6, -1, -1, -1, -1, -1, -1, -1],
+    [10, 4, 9, 10, 6, 4, 11, 2, 3, -1, -1, -1, -1, -1, -1, -1],
+    [0, 8, 2, 2, 8, 11, 4, 9, 10, 4, 10, 6, -1, -1, -1, -1],
+    [3, 11, 2, 0, 1, 6, 0, 6, 4, 6, 1, 10, -1, -1, -1, -1],
+    [6, 4, 1, 6, 1, 10, 4, 8, 1, 2, 1, 11, 8, 11, 1, -1],
+    [9, 6, 4, 9, 3, 6, 9, 1, 3, 11, 6, 3, -1, -1, -1, -1],
+    [8, 11, 1, 8, 1, 0, 11, 6, 1, 9, 1, 4, 6, 4, 1, -1],
+    [3, 11, 6, 3, 6, 0, 0, 6, 4, -1, -1, -1, -1, -1, -1, -1],
+    [6, 4, 8, 11, 6, 8, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+    [7, 10, 6, 7, 8, 10, 8, 9, 10, -1, -1, -1, -1, -1, -1, -1],
+    [0, 7, 3, 0, 10, 7, 0, 9, 10, 6, 7, 10, -1, -1, -1, -1],
+    [10, 6, 7, 1, 10, 7, 1, 7, 8, 1, 8, 0, -1, -1, -1, -1],
+    [10, 6, 7, 10, 7, 1, 1, 7, 3, -1, -1, -1, -1, -1, -1, -1],
+    [1, 2, 6, 1, 6, 8, 1, 8, 9, 8, 6, 7, -1, -1, -1, -1],
+    [2, 6, 9, 2, 9, 1, 6, 7, 9, 0, 9, 3, 7, 3, 9, -1],
+    [7, 8, 0, 7, 0, 6, 6, 0, 2, -1, -1, -1, -1, -1, -1, -1],
+    [7, 3, 2, 6, 7, 2, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+    [2, 3, 11, 10, 6, 8, 10, 8, 9, 8, 6, 7, -1, -1, -1, -1],
+    [2, 0, 7, 2, 7, 11, 0, 9, 7, 6, 7, 10, 9, 10, 7, -1],
+    [1, 8, 0, 1, 7, 8, 1, 10, 7, 6, 7, 10, 2, 3, 11, -1],
+    [11, 2, 1, 11, 1, 7, 10, 6, 1, 6, 7, 1, -1, -1, -1, -1],
+    [8, 9, 6, 8, 6, 7, 9, 1, 6, 11, 6, 3, 1, 3, 6, -1],
+    [0, 9, 1, 11, 6, 7, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+    [7, 8, 0, 7, 0, 6, 3, 11, 0, 11, 6, 0, -1, -1, -1, -1],
+    [7, 11, 6, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+    [7, 6, 11, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+    [3, 0, 8, 11, 7, 6, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+    [0, 1, 9, 11, 7, 6, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+    [8, 1, 9, 8, 3, 1, 11, 7, 6, -1, -1, -1, -1, -1, -1, -1],
+    [10, 1, 2, 6, 11, 7, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+    [1, 2, 10, 3, 0, 8, 6, 11, 7, -1, -1, -1, -1, -1, -1, -1],
+    [2, 9, 0, 2, 10, 9, 6, 11, 7, -1, -1, -1, -1, -1, -1, -1],
+    [6, 11, 7, 2, 10, 3, 10, 8, 3, 10, 9, 8, -1, -1, -1, -1],
+    [7, 2, 3, 6, 2, 7, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+    [7, 0, 8, 7, 6, 0, 6, 2, 0, -1, -1, -1, -1, -1, -1, -1],
+    [2, 7, 6, 2, 3, 7, 0, 1, 9, -1, -1, -1, -1, -1, -1, -1],
+    [1, 6, 2, 1, 8, 6, 1, 9, 8, 8, 7, 6, -1, -1, -1, -1],
+    [10, 7, 6, 10, 1, 7, 1, 3, 7, -1, -1, -1, -1, -1, -1, -1],
+    [10, 7, 6, 1, 7, 10, 1, 8, 7, 1, 0, 8, -1, -1, -1, -1],
+    [0, 3, 7, 0, 7, 10, 0, 10, 9, 6, 10, 7, -1, -1, -1, -1],
+    [7, 6, 10, 7, 10, 8, 8, 10, 9, -1, -1, -1, -1, -1, -1, -1],
+    [6, 8, 4, 11, 8, 6, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+    [3, 6, 11, 3, 0, 6, 0, 4, 6, -1, -1, -1, -1, -1, -1, -1],
+    [8, 6, 11, 8, 4, 6, 9, 0, 1, -1, -1, -1, -1, -1, -1, -1],
+    [9, 4, 6, 9, 6, 3, 9, 3, 1, 11, 3, 6, -1, -1, -1, -1],
+    [6, 8, 4, 6, 11, 8, 2, 10, 1, -1, -1, -1, -1, -1, -1, -1],
+    [1, 2, 10, 3, 0, 11, 0, 6, 11, 0, 4, 6, -1, -1, -1, -1],
+    [4, 11, 8, 4, 6, 11, 0, 2, 9, 2, 10, 9, -1, -1, -1, -1],
+    [10, 9, 3, 10, 3, 2, 9, 4, 3, 11, 3, 6, 4, 6, 3, -1],
+    [8, 2, 3, 8, 4, 2, 4, 6, 2, -1, -1, -1, -1, -1, -1, -1],
+    [0, 4, 2, 4, 6, 2, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+    [1, 9, 0, 2, 3, 4, 2, 4, 6, 4, 3, 8, -1, -1, -1, -1],
+    [1, 9, 4, 1, 4, 2, 2, 4, 6, -1, -1, -1, -1, -1, -1, -1],
+    [8, 1, 3, 8, 6, 1, 8, 4, 6, 6, 10, 1, -1, -1, -1, -1],
+    [10, 1, 0, 10, 0, 6, 6, 0, 4, -1, -1, -1, -1, -1, -1, -1],
+    [4, 6, 3, 4, 3, 8, 6, 10, 3, 0, 3, 9, 10, 9, 3, -1],
+    [10, 9, 4, 6, 10, 4, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+    [4, 9, 5, 7, 6, 11, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+    [0, 8, 3, 4, 9, 5, 11, 7, 6, -1, -1, -1, -1, -1, -1, -1],
+    [5, 0, 1, 5, 4, 0, 7, 6, 11, -1, -1, -1, -1, -1, -1, -1],
+    [11, 7, 6, 8, 3, 4, 3, 5, 4, 3, 1, 5, -1, -1, -1, -1],
+    [9, 5, 4, 10, 1, 2, 7, 6, 11, -1, -1, -1, -1, -1, -1, -1],
+    [6, 11, 7, 1, 2, 10, 0, 8, 3, 4, 9, 5, -1, -1, -1, -1],
+    [7, 6, 11, 5, 4, 10, 4, 2, 10, 4, 0, 2, -1, -1, -1, -1],
+    [3, 4, 8, 3, 5, 4, 3, 2, 5, 10, 5, 2, 11, 7, 6, -1],
+    [7, 2, 3, 7, 6, 2, 5, 4, 9, -1, -1, -1, -1, -1, -1, -1],
+    [9, 5, 4, 0, 8, 6, 0, 6, 2, 6, 8, 7, -1, -1, -1, -1],
+    [3, 6, 2, 3, 7, 6, 1, 5, 0, 5, 4, 0, -1, -1, -1, -1],
+    [6, 2, 8, 6, 8, 7, 2, 1, 8, 4, 8, 5, 1, 5, 8, -1],
+    [9, 5, 4, 10, 1, 6, 1, 7, 6, 1, 3, 7, -1, -1, -1, -1],
+    [1, 6, 10, 1, 7, 6, 1, 0, 7, 8, 7, 0, 9, 5, 4, -1],
+    [4, 0, 10, 4, 10, 5, 0, 3, 10, 6, 10, 7, 3, 7, 10, -1],
+    [7, 6, 10, 7, 10, 8, 5, 4, 10, 4, 8, 10, -1, -1, -1, -1],
+    [6, 9, 5, 6, 11, 9, 11, 8, 9, -1, -1, -1, -1, -1, -1, -1],
+    [3, 6, 11, 0, 6, 3, 0, 5, 6, 0, 9, 5, -1, -1, -1, -1],
+    [0, 11, 8, 0, 5, 11, 0, 1, 5, 5, 6, 11, -1, -1, -1, -1],
+    [6, 11, 3, 6, 3, 5, 5, 3, 1, -1, -1, -1, -1, -1, -1, -1],
+    [1, 2, 10, 9, 5, 11, 9, 11, 8, 11, 5, 6, -1, -1, -1, -1],
+    [0, 11, 3, 0, 6, 11, 0, 9, 6, 5, 6, 9, 1, 2, 10, -1],
+    [11, 8, 5, 11, 5, 6, 8, 0, 5, 10, 5, 2, 0, 2, 5, -1],
+    [6, 11, 3, 6, 3, 5, 2, 10, 3, 10, 5, 3, -1, -1, -1, -1],
+    [5, 8, 9, 5, 2, 8, 5, 6, 2, 3, 8, 2, -1, -1, -1, -1],
+    [9, 5, 6, 9, 6, 0, 0, 6, 2, -1, -1, -1, -1, -1, -1, -1],
+    [1, 5, 8, 1, 8, 0, 5, 6, 8, 3, 8, 2, 6, 2, 8, -1],
+    [1, 5, 6, 2, 1, 6, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+    [1, 3, 6, 1, 6, 10, 3, 8, 6, 5, 6, 9, 8, 9, 6, -1],
+    [10, 1, 0, 10, 0, 6, 9, 5, 0, 5, 6, 0, -1, -1, -1, -1],
+    [0, 3, 8, 5, 6, 10, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+    [10, 5, 6, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+    [11, 5, 10, 7, 5, 11, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+    [11, 5, 10, 11, 7, 5, 8, 3, 0, -1, -1, -1, -1, -1, -1, -1],
+    [5, 11, 7, 5, 10, 11, 1, 9, 0, -1, -1, -1, -1, -1, -1, -1],
+    [10, 7, 5, 10, 11, 7, 9, 8, 1, 8, 3, 1, -1, -1, -1, -1],
+    [11, 1, 2, 11, 7, 1, 7, 5, 1, -1, -1, -1, -1, -1, -1, -1],
+    [0, 8, 3, 1, 2, 7, 1, 7, 5, 7, 2, 11, -1, -1, -1, -1],
+    [9, 7, 5, 9, 2, 7, 9, 0, 2, 2, 11, 7, -1, -1, -1, -1],
+    [7, 5, 2, 7, 2, 11, 5, 9, 2, 3, 2, 8, 9, 8, 2, -1],
+    [2, 5, 10, 2, 3, 5, 3, 7, 5, -1, -1, -1, -1, -1, -1, -1],
+    [8, 2, 0, 8, 5, 2, 8, 7, 5, 10, 2, 5, -1, -1, -1, -1],
+    [9, 0, 1, 5, 10, 3, 5, 3, 7, 3, 10, 2, -1, -1, -1, -1],
+    [9, 8, 2, 9, 2, 1, 8, 7, 2, 10, 2, 5, 7, 5, 2, -1],
+    [1, 3, 5, 3, 7, 5, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+    [0, 8, 7, 0, 7, 1, 1, 7, 5, -1, -1, -1, -1, -1, -1, -1],
+    [9, 0, 3, 9, 3, 5, 5, 3, 7, -1, -1, -1, -1, -1, -1, -1],
+    [9, 8, 7, 5, 9, 7, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+    [5, 8, 4, 5, 10, 8, 10, 11, 8, -1, -1, -1, -1, -1, -1, -1],
+    [5, 0, 4, 5, 11, 0, 5, 10, 11, 11, 3, 0, -1, -1, -1, -1],
+    [0, 1, 9, 8, 4, 10, 8, 10, 11, 10, 4, 5, -1, -1, -1, -1],
+    [10, 11, 4, 10, 4, 5, 11, 3, 4, 9, 4, 1, 3, 1, 4, -1],
+    [2, 5, 1, 2, 8, 5, 2, 11, 8, 4, 5, 8, -1, -1, -1, -1],
+    [0, 4, 11, 0, 11, 3, 4, 5, 11, 2, 11, 1, 5, 1, 11, -1],
+    [0, 2, 5, 0, 5, 9, 2, 11, 5, 4, 5, 8, 11, 8, 5, -1],
+    [9, 4, 5, 2, 11, 3, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+    [2, 5, 10, 3, 5, 2, 3, 4, 5, 3, 8, 4, -1, -1, -1, -1],
+    [5, 10, 2, 5, 2, 4, 4, 2, 0, -1, -1, -1, -1, -1, -1, -1],
+    [3, 10, 2, 3, 5, 10, 3, 8, 5, 4, 5, 8, 0, 1, 9, -1],
+    [5, 10, 2, 5, 2, 4, 1, 9, 2, 9, 4, 2, -1, -1, -1, -1],
+    [8, 4, 5, 8, 5, 3, 3, 5, 1, -1, -1, -1, -1, -1, -1, -1],
+    [0, 4, 5, 1, 0, 5, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+    [8, 4, 5, 8, 5, 3, 9, 0, 5, 0, 3, 5, -1, -1, -1, -1],
+    [9, 4, 5, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+    [4, 11, 7, 4, 9, 11, 9, 10, 11, -1, -1, -1, -1, -1, -1, -1],
+    [0, 8, 3, 4, 9, 7, 9, 11, 7, 9, 10, 11, -1, -1, -1, -1],
+    [1, 10, 11, 1, 11, 4, 1, 4, 0, 7, 4, 11, -1, -1, -1, -1],
+    [3, 1, 4, 3, 4, 8, 1, 10, 4, 7, 4, 11, 10, 11, 4, -1],
+    [4, 11, 7, 9, 11, 4, 9, 2, 11, 9, 1, 2, -1, -1, -1, -1],
+    [9, 7, 4, 9, 11, 7, 9, 1, 11, 2, 11, 1, 0, 8, 3, -1],
+    [11, 7, 4, 11, 4, 2, 2, 4, 0, -1, -1, -1, -1, -1, -1, -1],
+    [11, 7, 4, 11, 4, 2, 8, 3, 4, 3, 2, 4, -1, -1, -1, -1],
+    [2, 9, 10, 2, 7, 9, 2, 3, 7, 7, 4, 9, -1, -1, -1, -1],
+    [9, 10, 7, 9, 7, 4, 10, 2, 7, 8, 7, 0, 2, 0, 7, -1],
+    [3, 7, 10, 3, 10, 2, 7, 4, 10, 1, 10, 0, 4, 0, 10, -1],
+    [1, 10, 2, 8, 7, 4, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+    [4, 9, 1, 4, 1, 7, 7, 1, 3, -1, -1, -1, -1, -1, -1, -1],
+    [4, 9, 1, 4, 1, 7, 0, 8, 1, 8, 7, 1, -1, -1, -1, -1],
+    [4, 0, 3, 7, 4, 3, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+    [4, 8, 7, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+    [9, 10, 8, 10, 11, 8, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+    [3, 0, 9, 3, 9, 11, 11, 9, 10, -1, -1, -1, -1, -1, -1, -1],
+    [0, 1, 10, 0, 10, 8, 8, 10, 11, -1, -1, -1, -1, -1, -1, -1],
+    [3, 1, 10, 11, 3, 10, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+    [1, 2, 11, 1, 11, 9, 9, 11, 8, -1, -1, -1, -1, -1, -1, -1],
+    [3, 0, 9, 3, 9, 11, 1, 2, 9, 2, 11, 9, -1, -1, -1, -1],
+    [0, 2, 11, 8, 0, 11, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+    [3, 2, 11, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+    [2, 3, 8, 2, 8, 10, 10, 8, 9, -1, -1, -1, -1, -1, -1, -1],
+    [9, 10, 2, 0, 9, 2, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+    [2, 3, 8, 2, 8, 10, 0, 1, 8, 1, 10, 8, -1, -1, -1, -1],
+    [1, 10, 2, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+    [1, 3, 8, 9, 1, 8, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+    [0, 9, 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+    [0, 3, 8, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
+    [-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1]]
+    cdef np.float64_t vertlist[12][3]
+    cdef int cubeindex = 0
+    cdef int n
+    cdef int nt = 0
+    for n in range(8):
+        if gv[n] < isovalue:
+            cubeindex |= (1 << n)
+    if edge_table[cubeindex] == 0:
+        return 0
+    if (edge_table[cubeindex] & 1): # 0,0,0 with 1,0,0
+        vertex_interp(gv[0], gv[1], isovalue, vertlist[0],
+                      dds, x, y, z, 0, 1)
+    if (edge_table[cubeindex] & 2): # 1,0,0 with 1,1,0
+        vertex_interp(gv[1], gv[2], isovalue, vertlist[1],
+                      dds, x, y, z, 1, 2)
+    if (edge_table[cubeindex] & 4): # 1,1,0 with 0,1,0
+        vertex_interp(gv[2], gv[3], isovalue, vertlist[2],
+                      dds, x, y, z, 2, 3)
+    if (edge_table[cubeindex] & 8): # 0,1,0 with 0,0,0
+        vertex_interp(gv[3], gv[0], isovalue, vertlist[3],
+                      dds, x, y, z, 3, 0)
+    if (edge_table[cubeindex] & 16): # 0,0,1 with 1,0,1
+        vertex_interp(gv[4], gv[5], isovalue, vertlist[4],
+                      dds, x, y, z, 4, 5)
+    if (edge_table[cubeindex] & 32): # 1,0,1 with 1,1,1
+        vertex_interp(gv[5], gv[6], isovalue, vertlist[5],
+                      dds, x, y, z, 5, 6)
+    if (edge_table[cubeindex] & 64): # 1,1,1 with 0,1,1
+        vertex_interp(gv[6], gv[7], isovalue, vertlist[6],
+                      dds, x, y, z, 6, 7)
+    if (edge_table[cubeindex] & 128): # 0,1,1 with 0,0,1
+        vertex_interp(gv[7], gv[4], isovalue, vertlist[7],
+                      dds, x, y, z, 7, 4)
+    if (edge_table[cubeindex] & 256): # 0,0,0 with 0,0,1
+        vertex_interp(gv[0], gv[4], isovalue, vertlist[8],
+                      dds, x, y, z, 0, 4)
+    if (edge_table[cubeindex] & 512): # 1,0,0 with 1,0,1
+        vertex_interp(gv[1], gv[5], isovalue, vertlist[9],
+                      dds, x, y, z, 1, 5)
+    if (edge_table[cubeindex] & 1024): # 1,1,0 with 1,1,1
+        vertex_interp(gv[2], gv[6], isovalue, vertlist[10],
+                      dds, x, y, z, 2, 6)
+    if (edge_table[cubeindex] & 2048): # 0,1,0 with 0,1,1
+        vertex_interp(gv[3], gv[7], isovalue, vertlist[11],
+                      dds, x, y, z, 3, 7)
+    n = 0
+    while 1:
+        triangles.current = AddTriangle(triangles.current,
+                    vertlist[tri_table[cubeindex][n  ]],
+                    vertlist[tri_table[cubeindex][n+1]],
+                    vertlist[tri_table[cubeindex][n+2]])
+        triangles.count += 1
+        nt += 1
+        if triangles.first == NULL:
+            triangles.first = triangles.current
+        n += 3
+        if tri_table[cubeindex][n] == -1: break
+    return nt
+    
+
 cdef class GridFace:
     cdef int direction
     cdef public np.float64_t coord


--- a/yt/utilities/_amr_utils/misc_utilities.pyx	Fri Oct 14 12:46:08 2011 -0700
+++ b/yt/utilities/_amr_utils/misc_utilities.pyx	Fri Oct 14 13:01:17 2011 -0700
@@ -27,6 +27,10 @@
 cimport numpy as np
 cimport cython
 
+cdef extern from "stdlib.h":
+    # NOTE that size_t might not be int
+    void *alloca(int)
+
 @cython.boundscheck(False)
 @cython.wraparound(False)
 @cython.cdivision(True)
@@ -140,3 +144,63 @@
             rv[fi] = field[ind[0], ind[1], ind[2]]
         return rv
     raise KeyError
+
+ at cython.boundscheck(False)
+ at cython.wraparound(False)
+ at cython.cdivision(True)
+def kdtree_get_choices(np.ndarray[np.float64_t, ndim=3] data,
+                       np.ndarray[np.float64_t, ndim=1] l_corner,
+                       np.ndarray[np.float64_t, ndim=1] r_corner):
+    cdef int i, j, k, dim, n_unique, best_dim, n_best, n_grids, addit, my_split
+    n_grids = data.shape[0]
+    cdef np.float64_t **uniquedims, *uniques, split
+    uniquedims = <np.float64_t **> alloca(3 * sizeof(np.float64_t*))
+    for i in range(3):
+        uniquedims[i] = <np.float64_t *> \
+                alloca(2*n_grids * sizeof(np.float64_t))
+    my_max = 0
+    for dim in range(3):
+        n_unique = 0
+        uniques = uniquedims[dim]
+        for i in range(n_grids):
+            # Check for disqualification
+            for j in range(2):
+                #print "Checking against", i,j,dim,data[i,j,dim]
+                if not (l_corner[dim] < data[i, j, dim] and
+                        data[i, j, dim] < r_corner[dim]):
+                    #print "Skipping ", data[i,j,dim]
+                    continue
+                skipit = 0
+                # Add our left ...
+                for k in range(n_unique):
+                    if uniques[k] == data[i, j, dim]:
+                        skipit = 1
+                        #print "Identified", uniques[k], data[i,j,dim], n_unique
+                        break
+                if skipit == 0:
+                    uniques[n_unique] = data[i, j, dim]
+                    n_unique += 1
+        if n_unique > my_max:
+            best_dim = dim
+            my_max = n_unique
+            my_split = (n_unique-1)/2
+    # I recognize how lame this is.
+    cdef np.ndarray[np.float64_t, ndim=1] tarr = np.empty(my_max, dtype='float64')
+    for i in range(my_max):
+        #print "Setting tarr: ", i, uniquedims[best_dim][i]
+        tarr[i] = uniquedims[best_dim][i]
+    tarr.sort()
+    split = tarr[my_split]
+    cdef np.ndarray[np.uint8_t, ndim=1] less_ids = np.empty(n_grids, dtype='uint8')
+    cdef np.ndarray[np.uint8_t, ndim=1] greater_ids = np.empty(n_grids, dtype='uint8')
+    for i in range(n_grids):
+        if data[i, 0, best_dim] < split:
+            less_ids[i] = 1
+        else:
+            less_ids[i] = 0
+        if data[i, 1, best_dim] > split:
+            greater_ids[i] = 1
+        else:
+            greater_ids[i] = 0
+    # Return out unique values
+    return best_dim, split, less_ids.view("bool"), greater_ids.view("bool")


--- a/yt/utilities/amr_kdtree/amr_kdtree.py	Fri Oct 14 12:46:08 2011 -0700
+++ b/yt/utilities/amr_kdtree/amr_kdtree.py	Fri Oct 14 13:01:17 2011 -0700
@@ -28,7 +28,7 @@
 import numpy as na
 from yt.funcs import *
 from yt.visualization.volume_rendering.grid_partitioner import HomogenizedVolume
-from yt.utilities.amr_utils import PartitionedGrid
+from yt.utilities.amr_utils import PartitionedGrid, kdtree_get_choices
 from yt.utilities.performance_counters import yt_counters, time_function
 import yt.utilities.parallel_tools.parallel_analysis_interface as PT
 from copy import deepcopy
@@ -48,11 +48,11 @@
     chosen by specifying the `current_left` or `current_right`.
     """
     if(current_left is not None):
-        new_left = na.array([current_left[0],current_left[1],current_left[2]])
+        new_left = current_left.copy()
         new_left[split_dim] = split
         return new_left
     elif(current_right is not None):
-        new_right = na.array([current_right[0],current_right[1],current_right[2]])
+        new_right = current_right.copy()
         new_right[split_dim] = split
         return new_right
 
@@ -736,7 +736,8 @@
         thisnode.ri = na.rint((thisnode.r_corner-gle)/dds).astype('int32')
         thisnode.dims = (thisnode.ri - thisnode.li).astype('int32')
         # Here the cost is actually inversely proportional to 4**Level (empirical)
-        thisnode.cost = (na.prod(thisnode.dims)/4.**thisnode.grid.Level).astype('int64')
+        #thisnode.cost = (na.prod(thisnode.dims)/4.**thisnode.grid.Level).astype('int64')
+        thisnode.cost = 1.0
         # Here is the old way
         # thisnode.cost = na.prod(thisnode.dims).astype('int64')
 
@@ -1060,19 +1061,9 @@
         # For some reason doing dim 0 separately is slightly faster.
         # This could be rewritten to all be in the loop below.
 
-        best_dim = 0
-        best_choices = na.unique(data[:,:,0][(current_node.l_corner[0] < data[:,:,0]) &
-                                             (data[:,:,0] < current_node.r_corner[0])])
-        
-        for d in range(1,3):
-            choices = na.unique(data[:,:,d][(current_node.l_corner[d] < data[:,:,d]) &
-                                            (data[:,:,d] < current_node.r_corner[d])])
-
-            if choices.size > best_choices.size:
-                best_choices, best_dim = choices, d
-
-        split = best_choices[(len(best_choices)-1)/2]
-        return data[:,:,best_dim], best_dim, split
+        best_dim, split, less_ids, greater_ids = \
+            kdtree_get_choices(data, current_node.l_corner, current_node.r_corner)
+        return data[:,:,best_dim], best_dim, split, less_ids, greater_ids
 
     def _build_dividing_node(self, current_node):
         '''
@@ -1080,12 +1071,14 @@
         left and right children.
         '''
         
-        data,best_dim,split = self._get_choices(current_node)
+        data,best_dim,split,less_ids,greater_ids = self._get_choices(current_node)
 
         current_node.split_ax = best_dim
         current_node.split_pos = split
-        less_ids = na.nonzero(data[:,0] < split)[0]
-        greater_ids = na.nonzero(split < data[:,1])[0]
+        #less_ids0 = (data[:,0] < split)
+        #greater_ids0 = (split < data[:,1])
+        #assert(na.all(less_ids0 == less_ids))
+        #assert(na.all(greater_ids0 == greater_ids))
         
         current_node.left_child = MasterNode(my_id=_lchild_id(current_node.id),
                                              parent=current_node,


--- a/yt/utilities/answer_testing/output_tests.py	Fri Oct 14 12:46:08 2011 -0700
+++ b/yt/utilities/answer_testing/output_tests.py	Fri Oct 14 13:01:17 2011 -0700
@@ -49,12 +49,12 @@
         self.acceptable = acceptable
 
     def __repr__(self):
-        return "ValueDelta: Delta %0.5e, max of %0.5e" % (
+        return "ValueDelta: Delta %s, max of %s" % (
             self.delta, self.acceptable)
 
 class ArrayDelta(ValueDelta):
     def __repr__(self):
-        return "ArrayDelta: Delta %0.5e, max of %0.5e" % (
+        return "ArrayDelta: Delta %s, max of %s" % (
             self.delta, self.acceptable)
 
 class ShapeMismatch(RegressionTestException):
@@ -63,7 +63,7 @@
         self.current_shape = current_shape
 
     def __repr__(self):
-        return "Shape Mismatch: old_buffer %s, current_buffer %0.5e" % (
+        return "Shape Mismatch: old_buffer %s, current_buffer %s" % (
             self.old_shape, self.current_shape)
 
 class RegressionTest(object):


--- a/yt/utilities/parallel_tools/parallel_analysis_interface.py	Fri Oct 14 12:46:08 2011 -0700
+++ b/yt/utilities/parallel_tools/parallel_analysis_interface.py	Fri Oct 14 13:01:17 2011 -0700
@@ -211,13 +211,11 @@
     """
     @wraps(f1)
     def in_order(*args, **kwargs):
-        MPI.COMM_WORLD.Barrier()
         if MPI.COMM_WORLD.rank == 0:
             f1(*args, **kwargs)
         MPI.COMM_WORLD.Barrier()
         if MPI.COMM_WORLD.rank != 0:
             f2(*args, **kwargs)
-        MPI.COMM_WORLD.Barrier()
     if not parallel_capable: return f1
     return in_order
 
@@ -1255,9 +1253,8 @@
         if not obj._distributed: return True
         return (obj._owner == MPI.COMM_WORLD.rank)
 
-    def _send_quadtree(self, target, qt, tgd, args):
+    def _send_quadtree(self, target, buf, tgd, args):
         sizebuf = na.zeros(1, 'int64')
-        buf = qt.tobuffer()
         sizebuf[0] = buf[0].size
         MPI.COMM_WORLD.Send([sizebuf, MPI.LONG], dest=target)
         MPI.COMM_WORLD.Send([buf[0], MPI.INT], dest=target)
@@ -1273,9 +1270,7 @@
         MPI.COMM_WORLD.Recv([buf[0], MPI.INT], source=target)
         MPI.COMM_WORLD.Recv([buf[1], MPI.DOUBLE], source=target)
         MPI.COMM_WORLD.Recv([buf[2], MPI.DOUBLE], source=target)
-        qt = QuadTree(tgd, args[2])
-        qt.frombuffer(*buf)
-        return qt
+        return buf
 
     @parallel_passthrough
     def merge_quadtree_buffers(self, qt):
@@ -1294,13 +1289,16 @@
             if (mask & rank) != 0:
                 target = (rank & ~mask) % size
                 #print "SENDING FROM %02i to %02i" % (rank, target)
-                self._send_quadtree(target, qt, tgd, args)
+                buf = qt.tobuffer()
+                self._send_quadtree(target, buf, tgd, args)
                 #qt = self._recv_quadtree(target, tgd, args)
             else:
                 target = (rank | mask)
                 if target < size:
                     #print "RECEIVING FROM %02i on %02i" % (target, rank)
-                    qto = self._recv_quadtree(target, tgd, args)
+                    buf = self._recv_quadtree(target, tgd, args)
+                    qto = QuadTree(tgd, args[2])
+                    qto.frombuffer(*buf)
                     merge_quadtrees(qt, qto)
                     del qto
                     #self._send_quadtree(target, qt, tgd, args)


--- a/yt/visualization/fixed_resolution.py	Fri Oct 14 12:46:08 2011 -0700
+++ b/yt/visualization/fixed_resolution.py	Fri Oct 14 13:01:17 2011 -0700
@@ -30,6 +30,7 @@
     axis_names
 import _MPL
 import numpy as na
+import weakref
 
 class FixedResolutionBuffer(object):
     def __init__(self, data_source, bounds, buff_size, antialias = True,
@@ -96,6 +97,10 @@
         self.axis = data_source.axis
         self.periodic = periodic
 
+        h = getattr(data_source, "hierarchy", None)
+        if h is not None:
+            h.plots.append(weakref.proxy(self))
+
         # Handle periodicity, just in case
         if self.data_source.axis < 3:
             DLE = self.pf.domain_left_edge
@@ -273,6 +278,17 @@
         else: data=self[field]
         numdisplay.display(data)    
 
+    @property
+    def limits(self):
+        rv = dict(x = None, y = None, z = None)
+        xax = x_dict[self.axis]
+        yax = y_dict[self.axis]
+        xn = axis_names[xax]
+        yn = axis_names[yax]
+        rv[xn] = (self.bounds[0], self.bounds[1])
+        rv[yn] = (self.bounds[2], self.bounds[3])
+        return rv
+
 class ObliqueFixedResolutionBuffer(FixedResolutionBuffer):
     """
     This object is a subclass of :class:`yt.visualization.fixed_resolution.FixedResolutionBuffer`


--- a/yt/visualization/plot_modifications.py	Fri Oct 14 12:46:08 2011 -0700
+++ b/yt/visualization/plot_modifications.py	Fri Oct 14 13:01:17 2011 -0700
@@ -50,8 +50,14 @@
         pass
 
     def convert_to_pixels(self, plot, coord, offset = True):
-        x0, x1 = plot.xlim
-        y0, y1 = plot.ylim
+        if plot.xlim is not None:
+            x0, x1 = plot.xlim
+        else:
+            x0, x1 = plot._axes.get_xlim()
+        if plot.ylim is not None:
+            y0, y1 = plot.ylim
+        else:
+            y0, y1 = plot._axes.get_ylim()
         l, b, width, height = mpl_get_bounds(plot._axes.bbox)
         dx = width / (x1-x0)
         dy = height / (y1-y0)
@@ -473,7 +479,7 @@
 class ImageLineCallback(LinePlotCallback):
     _type_name = "image_line"
 
-    def __init__(self, p1, p2, plot_args = None):
+    def __init__(self, p1, p2, data_coords=False, plot_args = None):
         """
         Plot from *p1* to *p2* (image plane coordinates)
         with *plot_args* fed into the plot.
@@ -484,19 +490,27 @@
         if plot_args is None: plot_args = {}
         self.plot_args = plot_args
         self._ids = []
+        self.data_coords = data_coords
 
     def __call__(self, plot):
         # We manually clear out any previous calls to this callback:
         plot._axes.lines = [l for l in plot._axes.lines if id(l) not in self._ids]
-        p1 = self.convert_to_pixels(plot, self.p1)
-        p2 = self.convert_to_pixels(plot, self.p2)
+        kwargs = self.plot_args.copy()
+        if self.data_coords and len(plot.image._A.shape) == 2:
+            p1 = self.convert_to_pixels(plot, self.p1)
+            p2 = self.convert_to_pixels(plot, self.p2)
+        else:
+            p1, p2 = self.p1, self.p2
+            if not self.data_coords:
+                kwargs["transform"] = plot._axes.transAxes
+
         px, py = (p1[0], p2[0]), (p1[1], p2[1])
 
         # Save state
         xx0, xx1 = plot._axes.get_xlim()
         yy0, yy1 = plot._axes.get_ylim()
         plot._axes.hold(True)
-        ii = plot._axes.plot(px, py, **self.plot_args)
+        ii = plot._axes.plot(px, py, **kwargs)
         self._ids.append(id(ii[0]))
         # Reset state
         plot._axes.set_xlim(xx0,xx1)
@@ -905,7 +919,7 @@
 
 class TextLabelCallback(PlotCallback):
     _type_name = "text"
-    def __init__(self, pos, text, data_coords=False,text_args = None):
+    def __init__(self, pos, text, data_coords=False, text_args = None):
         """
         Accepts a position in (0..1, 0..1) of the image, some text and
         optionally some text arguments. If data_coords is True,

Repository URL: https://bitbucket.org/yt_analysis/yt/

--

This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.



More information about the yt-svn mailing list