[Yt-svn] commit/yt: 17 new changesets

Bitbucket commits-noreply at bitbucket.org
Mon Mar 28 12:45:02 PDT 2011


17 new changesets in yt:

http://bitbucket.org/yt_analysis/yt/changeset/250535db11f1/
changeset:   r3888:250535db11f1
branch:      yt
user:        sskory
date:        2011-03-15 19:38:25
summary:     In the middle of the treecode potential calculation addition.

The periodic clumps stuff is finished, but not confirmed to work yet.

The treecode stuff is not finished and therefore not functional.
affected #:  2 files (5.1 KB)

--- a/yt/data_objects/derived_quantities.py	Sun Mar 13 20:11:53 2011 -0600
+++ b/yt/data_objects/derived_quantities.py	Tue Mar 15 12:38:25 2011 -0600
@@ -35,6 +35,7 @@
 from yt.utilities.data_point_utilities import FindBindingEnergy
 from yt.utilities.parallel_tools.parallel_analysis_interface import \
     ParallelAnalysisInterface
+from yt.utilities.amr_utils import Octree
 
 __CUDA_BLOCK_SIZE = 256
 
@@ -272,7 +273,8 @@
 add_quantity("ParticleSpinParameter", function=_ParticleSpinParameter,
              combine_function=_combBaryonSpinParameter, n_ret=4)
     
-def _IsBound(data, truncate = True, include_thermal_energy = False):
+def _IsBound(data, truncate = True, include_thermal_energy = False,
+    treecode = False):
     """
     This returns whether or not the object is gravitationally bound
     
@@ -298,15 +300,74 @@
     # We only divide once here because we have velocity in cgs, but radius is
     # in code.
     G = 6.67e-8 / data.convert("cm") # cm^3 g^-1 s^-2
+    # Check for periodicity of the clump.
+    two_root = 2. / na.array(data.pf.domain_dimensions)
+    periodic = na.array([0., 0., 0.])
+    for i,dim in enumerate(["x", "y", "z"]):
+        sorted = data[dim][data[dim].argsort()]
+        # If two adjacent values are different by (more than) two root grid
+        # cells, I think it's reasonable to assume that the clump wraps around.
+        diff = sorted[1:] - sorted[0:-1]
+        if (diff >= two_root[i]).any():
+            # Record the maximum of the minimum range of values, i.e. the last
+            # value just before the gap in values in the sorted array.
+            sel = (diff >= two_root[i])
+            index = na.min(na.nonzero(sel))
+            periodic[i] = sorted[index]
+    # This dict won't make a copy of the data, but we will make a copy to 
+    # change if needed in the periodic section.
+    local_data = {}
+    domain_period = data.pf.domain_right_edge - data.pf.domain_left_edge
+    for label in ["x", "y", "z", "CellMass"]:
+        local_data[label] = data[label]
+    if periodic.any():
+        # Adjust local_data to re-center the clump to remove the periodicity
+        # by the gap calculated above.
+        for i,dim in enumerate(["x", "y", "z"]):
+            if not periodic[i]: continue
+            local_data[dim] = data[dim].copy()
+            local_data[dim] += periodic[i]
+            local_data[dim] %= domain_period[i]
     import time
     t1 = time.time()
-    try:
-        pot = G*_cudaIsBound(data, truncate, kinetic/G)
-    except (ImportError, AssertionError):
-        pot = G*FindBindingEnergy(data["CellMass"],
-                                  data['x'],data['y'],data['z'],
-                                  truncate, kinetic/G)
-        mylog.info("Boundedness check took %0.3e seconds", time.time()-t1)
+    if treecode:
+        # Calculate the binding energy using the treecode method.
+        # Faster but less accurate.
+        # Make an octree for one value (mass) with incremental=True.
+        octree = Octree(na.array(data.pf.domain_dimensions), 1, True)
+        # Now discover what levels this data comes from.
+        root_dx = 1./data.pf.domain_dimensions[0]
+        dxes = na.unique(data['dx']) # unique returns a sorted array,
+        dyes = na.unique(data['dy']) # so these will all have the same
+        dzes = na.unique(data['dx']) # order.
+        # We only need one dim to figure out levels, we'll use x.
+        levels = na.floor(root_dx / dxes / data.pf.refine_by).astype('int')
+        lsort = levels.argsort()
+        levels = levels[lsort]
+        dxes = dxes[lsort]
+        dyes = dyes[lsort]
+        dzes = dzes[lsort]
+        # Now we pick out the values from each level and add them to the octree.
+        for L, dx, dy, dz in zip(levels, dxes, dyes, dzes):
+            print "adding to octree", L
+            sel = (dxes == dx)
+            thisx = (local_data["x"][sel] / dx).astype('int64')
+            thisy = (local_data["y"][sel] / dy).astype('int64')
+            thisz = (local_data["z"][sel] / dz).astype('int64')
+            vals = na.array([local_data["CellMass"][sel]], order='F')
+            octree.add_array_to_tree(L, thisx, thisy, thisz, vals,
+                na.ones_like(thisx).astype('float64'))
+        # Now we calculate the binding energy using a treecode.
+        pot = 0
+    else:
+        try:
+            pot = G*_cudaIsBound(local_data, truncate, kinetic/G)
+        except (ImportError, AssertionError):
+            pot = G*FindBindingEnergy(local_data["CellMass"],
+                                local_data['x'],local_data['y'],local_data['z'],
+                                truncate, kinetic/G)
+    mylog.info("Boundedness check took %0.3e seconds", time.time()-t1)
+    del local_data
     return [(pot / kinetic)]
 def _combIsBound(data, bound):
     return bound


--- a/yt/utilities/_amr_utils/Octree.pyx	Sun Mar 13 20:11:53 2011 -0600
+++ b/yt/utilities/_amr_utils/Octree.pyx	Tue Mar 15 12:38:25 2011 -0600
@@ -275,6 +275,53 @@
                             level, curpos + added, pdata, vdata, wdata)
         return added
 
+    cdef np.float64_t node_separation(self, OctreeNode *node1, OctreeNode *node2):
+        # Find the distance between the two nodes. To match FindBindingEnergy
+        # in data_point_utilities.c, we'll do this in code [0,1) units.
+        # We'll also assume 
+        cdef np.float64_t fn1, fn2, p1, p2, dist
+        cdef np.int64_t n1, n2
+        cdef int i
+        dist = 0.0
+        for i in range(3):
+            n1 = self.po2[node1.level] * self.top_grid_dims[i]
+            n2 = self.po2[node2.level] * self.top_grid_dims[i]
+            fn1 = <np.float64_t> n1
+            fn2 = <np.float64_t> n2
+            p1 = (<np.float64_t> node1.pos[i]) / fn1
+            p2 = (<np.float64_t> node2.pos[i]) / fn2
+            dist += np.sqrt((p1 - p2) * (p1 - p2))
+        return dist
+    
+    cdef np.float64_t opening_angle(self, OctreeNode *node1, OctreeNode *node2):
+        # Calculate the opening angle of node2 upon the center of node1.
+        # In order to keep things simple, we will not assume symmetry in all
+        # three directions of the octree, and we'll use the largest dimension
+        # if the tree is not symmetric. This is not strictly the opening angle
+        # the purest sense, but it's slightly more accurate, so it's OK.
+        # This is done in code units to match the distance calculation.
+        cdef np.float64_t d2, dx2, dist
+        cdef np.int64_t n2
+        cdef int i
+        d2 = 0
+        if self.top_grid_dims[1] == self.top_grid_dims[0] and \
+                self.top_grid_dims[2] == self.top_grid_dims[0]:
+            # Symmetric
+            n2 = self.po2[node2.level] * self.top_grid_dims[0]
+            d2 = 1. / (<np.float64_t> n2)
+        else:
+            # Not symmetric
+            for i in range(3):
+                n2 = self.po2[node2.level] * self.top_grid_dims[i]
+                dx2 = 1. / (<np.float64_t> n2)
+                d2 = np.maximum(d2, dx2)
+        # Now calculate the opening angle using the small angle approximation.
+        # This is OK because for large angles, i.e. d2 ~ dist, 
+        dist = self.node_separation(node1, node2)
+        return d2 / dist
+
+    
+
     def __dealloc__(self):
         cdef int i, j, k
         for i in range(self.top_grid_dims[0]):


http://bitbucket.org/yt_analysis/yt/changeset/6736a071c1c1/
changeset:   r3889:6736a071c1c1
branch:      yt
user:        sskory
date:        2011-03-16 04:40:43
summary:     More work on treecode, definitely not functional yet.
affected #:  2 files (1.1 KB)

--- a/yt/data_objects/derived_quantities.py	Tue Mar 15 12:38:25 2011 -0600
+++ b/yt/data_objects/derived_quantities.py	Tue Mar 15 21:40:43 2011 -0600
@@ -302,6 +302,7 @@
     G = 6.67e-8 / data.convert("cm") # cm^3 g^-1 s^-2
     # Check for periodicity of the clump.
     two_root = 2. / na.array(data.pf.domain_dimensions)
+    domain_period = data.pf.domain_right_edge - data.pf.domain_left_edge
     periodic = na.array([0., 0., 0.])
     for i,dim in enumerate(["x", "y", "z"]):
         sorted = data[dim][data[dim].argsort()]
@@ -309,15 +310,15 @@
         # cells, I think it's reasonable to assume that the clump wraps around.
         diff = sorted[1:] - sorted[0:-1]
         if (diff >= two_root[i]).any():
-            # Record the maximum of the minimum range of values, i.e. the last
-            # value just before the gap in values in the sorted array.
+            # We will record the distance of the larger of the two values that
+            # define the gap from the right boundary, which we'll use for the
+            # periodic adjustment later.
             sel = (diff >= two_root[i])
             index = na.min(na.nonzero(sel))
-            periodic[i] = sorted[index]
+            periodic[i] = data.pf.domain_right_edge[i] - sorted[index + 1]
     # This dict won't make a copy of the data, but we will make a copy to 
     # change if needed in the periodic section.
     local_data = {}
-    domain_period = data.pf.domain_right_edge - data.pf.domain_left_edge
     for label in ["x", "y", "z", "CellMass"]:
         local_data[label] = data[label]
     if periodic.any():


--- a/yt/utilities/_amr_utils/Octree.pyx	Tue Mar 15 12:38:25 2011 -0600
+++ b/yt/utilities/_amr_utils/Octree.pyx	Tue Mar 15 21:40:43 2011 -0600
@@ -315,12 +315,37 @@
                 n2 = self.po2[node2.level] * self.top_grid_dims[i]
                 dx2 = 1. / (<np.float64_t> n2)
                 d2 = np.maximum(d2, dx2)
-        # Now calculate the opening angle using the small angle approximation.
-        # This is OK because for large angles, i.e. d2 ~ dist, 
+        # Now calculate the opening angle.
         dist = self.node_separation(node1, node2)
         return d2 / dist
 
-    
+    cdef np.float64_t iterate_remote_node_fbe(self, OctreeNode *node1,
+            OctreeNode *node2):
+        # node1 never changes.
+        # node2 is the iterated-upon remote node.
+        
+
+    cdef np.float64_t iterate_child_node_fbe(self, OctreeNode *node):
+        # Recursively iterate over child nodes until we get a childless node.
+        cdef int i, j, k
+        cdef float64_t potential
+        potential = 0
+        if node.children[0][0][0] is NULL:
+            # We have a childless node. Time to iterate over every other
+            # node using the treecode method.
+            
+        for i in range(2):
+            for j in range(2):
+                for k in range(2):
+                    potential += self.iterate_child_node_fbe(node.children[i][j][k])
+
+    def find_binding_energy(self, truncate, kinetic):
+        cdef int i, j, k
+        # The first part of the loop goes over all of the root level cells.
+        for i in range(self.top_grid_dims[0]):
+            for j in range(self.top_grid_dims[1]):
+                for k in range(self.top_grid_dims[2]):
+                    
 
     def __dealloc__(self):
         cdef int i, j, k


http://bitbucket.org/yt_analysis/yt/changeset/8ce8c2ea9813/
changeset:   r3890:8ce8c2ea9813
branch:      yt
user:        sskory
date:        2011-03-16 20:42:22
summary:     All the pieces of the treecode are done, and it compiles, but I have no idea if it works yet. Likely not.
affected #:  1 file (5.0 KB)

--- a/yt/utilities/_amr_utils/Octree.pyx	Tue Mar 15 21:40:43 2011 -0600
+++ b/yt/utilities/_amr_utils/Octree.pyx	Wed Mar 16 13:42:22 2011 -0600
@@ -43,6 +43,7 @@
     int level
     int nvals
     OctreeNode *children[2][2][2]
+    OctreeNode *parent
 
 cdef void OTN_add_value(OctreeNode *self,
         np.float64_t *val, np.float64_t weight_val):
@@ -65,14 +66,38 @@
                 self.children[i][j][k] = OTN_initialize(
                             npos,
                             self.nvals, self.val, self.weight_val,
-                            self.level + 1)
+                            self.level + 1, self)
     if incremental: return
     for i in range(self.nvals): self.val[i] = 0.0
     self.weight_val = 0.0
 
+cdef int OTN_same(OctreeNode *node1, OctreeNode *node2):
+    # Returns 1 if node1 == node2; 0 otherwise.
+    # If all of these conditions are met, they are the same node.
+    if node1.pos[0] == node2.pos[0] and \
+       node1.pos[1] == node2.pos[1] and \
+       node1.pos[2] == node2.pos[2] and \
+       node1.level == node2.level: return 1
+    return 0
+
+cdef int OTN_contained(OctreeNode *node1, OctreeNode *node2):
+    # Returns 1 if node2 contains node1; and 0 otherwise.
+    # node1.level > node2.level.
+    cdef OctreeNode *parent_node
+    parent_node = node1.parent
+    while parent_node is not NULL:
+        # If all of these conditions are met, node2 is a parent of node1.
+        if parent_node.pos[0] == node2.pos[0] and \
+           parent_node.pos[1] == node2.pos[0] and \
+           parent_node.pos[2] == node2.pos[2] and \
+           parent_node.level == node2.level: return 1
+        parent_node = parent_node.parent
+    # If we've gotten this far, the two nodes are not related.
+    return 0
+
 cdef OctreeNode *OTN_initialize(np.int64_t pos[3], int nvals,
                         np.float64_t *val, np.float64_t weight_val,
-                        int level):
+                        int level, OctreeNode *parent):
     cdef OctreeNode *node
     cdef int i, j, k
     node = <OctreeNode *> malloc(sizeof(OctreeNode))
@@ -80,6 +105,7 @@
     node.pos[1] = pos[1]
     node.pos[2] = pos[2]
     node.nvals = nvals
+    node.parent = parent
     node.val = <np.float64_t *> malloc(
                 nvals * sizeof(np.float64_t))
     for i in range(nvals):
@@ -108,6 +134,7 @@
     cdef OctreeNode ****root_nodes
     cdef np.int64_t top_grid_dims[3]
     cdef int incremental
+    cdef np.float64_t opening_angle
 
     def __cinit__(self, np.ndarray[np.int64_t, ndim=1] top_grid_dims,
                   int nvals, int incremental = False):
@@ -144,7 +171,7 @@
                 for k in range(top_grid_dims[2]):
                     pos[2] = k
                     self.root_nodes[i][j][k] = OTN_initialize(
-                        pos, nvals, vals, weight_val, 0)
+                        pos, nvals, vals, weight_val, 0, NULL)
 
     cdef void add_to_position(self,
                  int level, np.int64_t pos[3],
@@ -275,7 +302,7 @@
                             level, curpos + added, pdata, vdata, wdata)
         return added
 
-    cdef np.float64_t node_separation(self, OctreeNode *node1, OctreeNode *node2):
+    cdef np.float64_t fbe_node_separation(self, OctreeNode *node1, OctreeNode *node2):
         # Find the distance between the two nodes. To match FindBindingEnergy
         # in data_point_utilities.c, we'll do this in code [0,1) units.
         # We'll also assume 
@@ -293,7 +320,8 @@
             dist += np.sqrt((p1 - p2) * (p1 - p2))
         return dist
     
-    cdef np.float64_t opening_angle(self, OctreeNode *node1, OctreeNode *node2):
+    cdef np.float64_t fbe_opening_angle(self, OctreeNode *node1,
+            OctreeNode *node2):
         # Calculate the opening angle of node2 upon the center of node1.
         # In order to keep things simple, we will not assume symmetry in all
         # three directions of the octree, and we'll use the largest dimension
@@ -303,7 +331,7 @@
         cdef np.float64_t d2, dx2, dist
         cdef np.int64_t n2
         cdef int i
-        d2 = 0
+        d2 = 0.0
         if self.top_grid_dims[1] == self.top_grid_dims[0] and \
                 self.top_grid_dims[2] == self.top_grid_dims[0]:
             # Symmetric
@@ -316,36 +344,113 @@
                 dx2 = 1. / (<np.float64_t> n2)
                 d2 = np.maximum(d2, dx2)
         # Now calculate the opening angle.
-        dist = self.node_separation(node1, node2)
+        dist = self.fbe_node_separation(node1, node2)
         return d2 / dist
 
-    cdef np.float64_t iterate_remote_node_fbe(self, OctreeNode *node1,
+    cdef np.float64_t fbe_potential_of_remote_nodes(self, OctreeNode *node1,
+            int top_i, int top_j, int top_k):
+        # Given a childless node "node1", calculate the potential for it from
+        # all the other nodes using the treecode method.
+        cdef int i, j, k
+        cdef np.float64_t potential
+        potential = 0.0
+        # We *do* want to walk over the root_node that node1 is in to look for
+        # level>0 nodes close to node1, but none of
+        # the previously-walked root_nodes.
+        for i in range(top_i, self.top_grid_dims[0]):
+            for j in range(top_j, self.top_grid_dims[1]):
+                for k in range(top_k, self.top_grid_dims[2]):
+                    potential += self.fbe_iterate_remote_nodes(node1,
+                        self.root_nodes[i][j][k])
+        return potential
+
+    cdef np.float64_t fbe_iterate_remote_nodes(self, OctreeNode *node1,
             OctreeNode *node2):
         # node1 never changes.
         # node2 is the iterated-upon remote node.
-        
-
-    cdef np.float64_t iterate_child_node_fbe(self, OctreeNode *node):
-        # Recursively iterate over child nodes until we get a childless node.
-        cdef int i, j, k
-        cdef float64_t potential
-        potential = 0
-        if node.children[0][0][0] is NULL:
-            # We have a childless node. Time to iterate over every other
-            # node using the treecode method.
-            
+        cdef int i, j, k, contained
+        cdef np.float64_t potential, dist, angle
+        potential = 0.0
+        # Do nothing with node2 when it is the same as node1. No potential
+        # calculation, and no digging deeper.
+        if OTN_same(node1, node2): return 0.0
+        # If we have a childless node2 we can skip everything below and
+        # calculate the potential.
+        if node2.children[0][0][0] == NULL:
+            dist = self.fbe_node_separation(node1, node2)
+            return node1.val[0] * node2.val[0] / dist
+        # We don't want to calculate the potential of the node to a parent node.
+        # Find out if there is a relationship.
+        contained = OTN_contained(node1, node2)
+        # Now we apply the opening angle test. If the opening angle is small
+        # enough, we use this node for the potential and dig no deeper.
+        angle = self.fbe_opening_angle(node1, node2)
+        if angle < self.opening_angle and not contained:
+            dist = self.fbe_node_separation(node1, node2)
+            return node1.val[0] * node2.val[0] / dist
+        # If the above is not satisfied, we must dig deeper!
         for i in range(2):
             for j in range(2):
                 for k in range(2):
-                    potential += self.iterate_child_node_fbe(node.children[i][j][k])
+                    potential += self.fbe_iterate_remote_nodes(node1,
+                        node2.children[i][j][k])
+        return potential
 
-    def find_binding_energy(self, truncate, kinetic):
+    cdef np.float64_t fbe_iterate_children(self, OctreeNode *node, int top_i,
+            int top_j, int top_k):
+        # Recursively iterate over child nodes until we get a childless node.
         cdef int i, j, k
-        # The first part of the loop goes over all of the root level cells.
+        cdef np.float64_t potential
+        potential = 0.0
+        # We have a childless node. Time to iterate over every other
+        # node using the treecode method.
+        if node.children[0][0][0] is NULL:
+            potential = self.fbe_potential_of_remote_nodes(node, top_i, top_j,
+                top_k)
+            return potential
+        # If the node has children, we need to walk all of them returning
+        # the potential for each.
+        for i in range(2):
+            for j in range(2):
+                for k in range(2):
+                    potential += self.fbe_iterate_children(node.children[i][j][k],
+                        top_i, top_j, top_k)
+        return potential
+
+    def find_binding_energy(self, truncate, kinetic, opening_angle = 1.0):
+        r"""Find the binding energy of an ensemble of data points using the
+        treecode method.
+        
+        Note: The first entry of the vals array MUST be Mass.
+        """
+        # Here are the order of events:
+        # 1. We loop over all of the root_nodes, below.
+        # 2. In fbe_iterate_children, each of these nodes is iterated until
+        #    we reach a node without any children.
+        # 3. Next, starting in fbe_potential_of_remote_nodes we again loop over
+        #    root_nodes, not double-counting, calling fbe_iterate_remote_nodes.
+        # 4. In fbe_iterate_remote_nodes, if we have a childless node, we
+        #    calculate the potential between the two nodes and return. Or, if
+        #    we have a node with a small enough opening angle, we return the
+        #    potential. If neither of these are done, we call
+        #    fbe_iterate_remote_nodes on the children nodes.
+        # 5. All of this returns a total, non-double-counted potential.
+        cdef int i, j, k
+        cdef np.float64_t potential
+        potential = 0.0
+        self.opening_angle = opening_angle
+        # The first part of the loop goes over all of the root level nodes.
         for i in range(self.top_grid_dims[0]):
             for j in range(self.top_grid_dims[1]):
                 for k in range(self.top_grid_dims[2]):
-                    
+                    potential += self.fbe_iterate_children(self.root_nodes[i][j][k],
+                        i, j, k)
+                    if truncate and potential > kinetic: break
+                if truncate and potential > kinetic: break
+            if truncate and potential > kinetic:
+                print "Truncating!"
+                break
+        return potential
 
     def __dealloc__(self):
         cdef int i, j, k


http://bitbucket.org/yt_analysis/yt/changeset/bcfd0ac99961/
changeset:   r3891:bcfd0ac99961
branch:      yt
user:        sskory
date:        2011-03-16 23:30:34
summary:     Fixing the buffer size in construct_boundary_relationships, and a small
change in Octree for max.
affected #:  2 files (105 bytes)

--- a/yt/utilities/_amr_utils/ContourFinding.pyx	Wed Mar 16 13:42:22 2011 -0600
+++ b/yt/utilities/_amr_utils/ContourFinding.pyx	Wed Mar 16 16:30:34 2011 -0600
@@ -49,7 +49,7 @@
     ny = contour_ids.shape[1]
     nz = contour_ids.shape[2]
     # We allocate an array of fixed (maximum) size
-    cdef int s = (ny*nx + nx*nz + nx*nz - 4) * 9
+    cdef int s = (ny*nx + nx*nz + ny*nz - 2) * 18
     cdef np.ndarray[np.int64_t, ndim=2] tree = np.zeros((s, 2), dtype="int64")
     cdef int ti = 0
     # First x-pass


--- a/yt/utilities/_amr_utils/Octree.pyx	Wed Mar 16 13:42:22 2011 -0600
+++ b/yt/utilities/_amr_utils/Octree.pyx	Wed Mar 16 16:30:34 2011 -0600
@@ -36,6 +36,10 @@
     # NOTE that size_t might not be int
     void *alloca(int)
 
+cdef inline np.float64_t f64max(np.float64_t f0, np.float64_t f1):
+    if f0 > f1: return f0
+    return f1
+
 cdef struct OctreeNode:
     np.float64_t *val
     np.float64_t weight_val
@@ -342,7 +346,7 @@
             for i in range(3):
                 n2 = self.po2[node2.level] * self.top_grid_dims[i]
                 dx2 = 1. / (<np.float64_t> n2)
-                d2 = np.maximum(d2, dx2)
+                d2 = f64max(d2, dx2)
         # Now calculate the opening angle.
         dist = self.fbe_node_separation(node1, node2)
         return d2 / dist


http://bitbucket.org/yt_analysis/yt/changeset/af0a7b98504d/
changeset:   r3892:af0a7b98504d
branch:      yt
user:        sskory
date:        2011-03-18 18:27:47
summary:     Intermediate commit. treecode/octree not functional.

I'm about to make a major change to how I populate the octree, so I'm saving this in case wwhat I do is complete junk.
affected #:  3 files (2.8 KB)

--- a/yt/data_objects/derived_quantities.py	Wed Mar 16 16:30:34 2011 -0600
+++ b/yt/data_objects/derived_quantities.py	Fri Mar 18 11:27:47 2011 -0600
@@ -315,7 +315,10 @@
             # periodic adjustment later.
             sel = (diff >= two_root[i])
             index = na.min(na.nonzero(sel))
-            periodic[i] = data.pf.domain_right_edge[i] - sorted[index + 1]
+            # The last addition term below ensures that the data makes a full
+            # wrap-around.
+            periodic[i] = data.pf.domain_right_edge[i] - sorted[index + 1] + \
+                two_root[i] / 2.
     # This dict won't make a copy of the data, but we will make a copy to 
     # change if needed in the periodic section.
     local_data = {}
@@ -336,12 +339,13 @@
         # Faster but less accurate.
         # Make an octree for one value (mass) with incremental=True.
         octree = Octree(na.array(data.pf.domain_dimensions), 1, True)
-        # Now discover what levels this data comes from.
-        root_dx = 1./data.pf.domain_dimensions[0]
+        # Now discover what levels this data comes from, not assuming
+        # symmetry.
         dxes = na.unique(data['dx']) # unique returns a sorted array,
         dyes = na.unique(data['dy']) # so these will all have the same
         dzes = na.unique(data['dx']) # order.
         # We only need one dim to figure out levels, we'll use x.
+        root_dx = 1./data.pf.domain_dimensions[0]
         levels = na.floor(root_dx / dxes / data.pf.refine_by).astype('int')
         lsort = levels.argsort()
         levels = levels[lsort]
@@ -349,17 +353,43 @@
         dyes = dyes[lsort]
         dzes = dzes[lsort]
         # Now we pick out the values from each level and add them to the octree.
+        cover_min = na.array([na.amin(local_data['x']), na.amin(local_data['y']),
+            na.amin(local_data['z'])])
+        cover_max = na.array([na.amax(local_data['x']), na.amax(local_data['y']),
+            na.amax(local_data['z'])])
+        # Fix to root grid cell centers
+        cover_min = cover_min - cover_min % root_dx
+        cover_max = cover_max - cover_max % root_dx
+        # This step adds filler levels, e.g. if this object has data at only
+        # some deep level(s) we need to introduce a zero field for the higher
+        # levels.
+        for L in range(max(data.pf.h.max_level+1, na.amax(levels))):
+            imin = (cover_min * na.array(data.pf.domain_dimensions) * 2**L).astype('int64')
+            imax = (cover_max * na.array(data.pf.domain_dimensions) * 2**L + 2**L - 1).astype('int64')
+            ActiveDimensions = imax - imin
+            i, j, k = na.indices(ActiveDimensions)
+            i = i.flatten() + imin[0]
+            j = j.flatten() + imin[1]
+            k = k.flatten() + imin[2]
+            octree.add_array_to_tree(L, i, j, k,
+                na.array([na.zeros_like(i)], order='F', dtype='float64'),
+                na.ones_like(i).astype('float64'))
+            print L, imin, imax
         for L, dx, dy, dz in zip(levels, dxes, dyes, dzes):
             print "adding to octree", L
-            sel = (dxes == dx)
+            sel = (data["dx"] == dx)
             thisx = (local_data["x"][sel] / dx).astype('int64')
             thisy = (local_data["y"][sel] / dy).astype('int64')
             thisz = (local_data["z"][sel] / dz).astype('int64')
             vals = na.array([local_data["CellMass"][sel]], order='F')
+            #vals = na.array([na.ones_like(local_data["CellMass"][sel])], order='F')
             octree.add_array_to_tree(L, thisx, thisy, thisz, vals,
-                na.ones_like(thisx).astype('float64'))
+               na.ones_like(thisx).astype('float64'))
+            print na.min(thisx), na.min(thisy), na.min(thisz)
+            print na.max(thisx), na.max(thisy), na.max(thisz)
         # Now we calculate the binding energy using a treecode.
-        pot = 0
+        print 'calculating'
+        pot = G*octree.find_binding_energy(truncate, kinetic/G, 0.0)
     else:
         try:
             pot = G*_cudaIsBound(local_data, truncate, kinetic/G)


--- a/yt/utilities/_amr_utils/Octree.pyx	Wed Mar 16 16:30:34 2011 -0600
+++ b/yt/utilities/_amr_utils/Octree.pyx	Fri Mar 18 11:27:47 2011 -0600
@@ -90,11 +90,7 @@
     cdef OctreeNode *parent_node
     parent_node = node1.parent
     while parent_node is not NULL:
-        # If all of these conditions are met, node2 is a parent of node1.
-        if parent_node.pos[0] == node2.pos[0] and \
-           parent_node.pos[1] == node2.pos[0] and \
-           parent_node.pos[2] == node2.pos[2] and \
-           parent_node.level == node2.level: return 1
+        if parent_node is node2: return 1
         parent_node = parent_node.parent
     # If we've gotten this far, the two nodes are not related.
     return 0
@@ -139,6 +135,7 @@
     cdef np.int64_t top_grid_dims[3]
     cdef int incremental
     cdef np.float64_t opening_angle
+    cdef int count1, count2
 
     def __cinit__(self, np.ndarray[np.int64_t, ndim=1] top_grid_dims,
                   int nvals, int incremental = False):
@@ -150,6 +147,8 @@
                 sizeof(np.float64_t)*nvals)
         cdef np.float64_t weight_val = 0.0
         self.nvals = nvals
+        count1 = 0
+        count2 = 0
         for i in range(nvals): vals[i] = 0.0
 
         self.top_grid_dims[0] = top_grid_dims[0]
@@ -319,9 +318,11 @@
             n2 = self.po2[node2.level] * self.top_grid_dims[i]
             fn1 = <np.float64_t> n1
             fn2 = <np.float64_t> n2
-            p1 = (<np.float64_t> node1.pos[i]) / fn1
-            p2 = (<np.float64_t> node2.pos[i]) / fn2
-            dist += np.sqrt((p1 - p2) * (p1 - p2))
+            # The added term is to re-cell center the data.
+            p1 = (<np.float64_t> node1.pos[i]) / fn1 + 1. / fn1 / 2.
+            p2 = (<np.float64_t> node2.pos[i]) / fn2 + 1. / fn2 / 2.
+            dist += (p1 - p2) * (p1 - p2)
+        dist = np.sqrt(dist)
         return dist
     
     cdef np.float64_t fbe_opening_angle(self, OctreeNode *node1,
@@ -336,6 +337,7 @@
         cdef np.int64_t n2
         cdef int i
         d2 = 0.0
+        if OTN_same(node1, node2): return 100000.0 # Just some large number.
         if self.top_grid_dims[1] == self.top_grid_dims[0] and \
                 self.top_grid_dims[2] == self.top_grid_dims[0]:
             # Symmetric
@@ -364,6 +366,7 @@
         for i in range(top_i, self.top_grid_dims[0]):
             for j in range(top_j, self.top_grid_dims[1]):
                 for k in range(top_k, self.top_grid_dims[2]):
+                    if self.root_nodes[i][j][k].val[0] == 0: continue
                     potential += self.fbe_iterate_remote_nodes(node1,
                         self.root_nodes[i][j][k])
         return potential
@@ -378,14 +381,16 @@
         # Do nothing with node2 when it is the same as node1. No potential
         # calculation, and no digging deeper.
         if OTN_same(node1, node2): return 0.0
+        # Is node1 contained inside node2?
+        contained = OTN_contained(node1, node2)
         # If we have a childless node2 we can skip everything below and
-        # calculate the potential.
-        if node2.children[0][0][0] == NULL:
+        # calculate the potential, as long as node2 does not contain node1.
+        if node2.children[0][0][0] == NULL and not contained:
             dist = self.fbe_node_separation(node1, node2)
+            self.count2 += 1
+            #print '%1.10e %1.10e %1.10e %1.10e xx' % (node1.val[0], node2.val[0],
+            #dist, node1.val[0] * node2.val[0] / dist)
             return node1.val[0] * node2.val[0] / dist
-        # We don't want to calculate the potential of the node to a parent node.
-        # Find out if there is a relationship.
-        contained = OTN_contained(node1, node2)
         # Now we apply the opening angle test. If the opening angle is small
         # enough, we use this node for the potential and dig no deeper.
         angle = self.fbe_opening_angle(node1, node2)
@@ -396,6 +401,7 @@
         for i in range(2):
             for j in range(2):
                 for k in range(2):
+                    if node2.children[i][j][k].val[0] == 0.0: continue
                     potential += self.fbe_iterate_remote_nodes(node1,
                         node2.children[i][j][k])
         return potential
@@ -409,14 +415,16 @@
         # We have a childless node. Time to iterate over every other
         # node using the treecode method.
         if node.children[0][0][0] is NULL:
-            potential = self.fbe_potential_of_remote_nodes(node, top_i, top_j,
-                top_k)
+            self.count1 += 1
+            potential = self.fbe_potential_of_remote_nodes(node,
+                top_i, top_j, top_k)
             return potential
         # If the node has children, we need to walk all of them returning
         # the potential for each.
         for i in range(2):
             for j in range(2):
                 for k in range(2):
+                    if node.children[i][j][k].val[0] == 0.0: continue
                     potential += self.fbe_iterate_children(node.children[i][j][k],
                         top_i, top_j, top_k)
         return potential
@@ -447,13 +455,18 @@
         for i in range(self.top_grid_dims[0]):
             for j in range(self.top_grid_dims[1]):
                 for k in range(self.top_grid_dims[2]):
+                    if self.root_nodes[i][j][k].val[0] == 0.0: continue
+                    #print i,j,k
                     potential += self.fbe_iterate_children(self.root_nodes[i][j][k],
                         i, j, k)
+                    #potential = kinetic * 2
                     if truncate and potential > kinetic: break
                 if truncate and potential > kinetic: break
             if truncate and potential > kinetic:
                 print "Truncating!"
                 break
+        print 'count1', self.count1
+        print 'count2', self.count2
         return potential
 
     def __dealloc__(self):


--- a/yt/utilities/data_point_utilities.c	Wed Mar 16 16:30:34 2011 -0600
+++ b/yt/utilities/data_point_utilities.c	Fri Mar 18 11:27:47 2011 -0600
@@ -1567,6 +1567,7 @@
     float workDone = 0;
     int every_cells = floor(n_q / 100);
     int until_output = 1;
+    double temp1, temp2;
     for (q_outer = 0; q_outer < n_q - 1; q_outer++) {
         this_potential = 0;
         mass_o = *(npy_float64*) PyArray_GETPTR1(mass, q_outer);
@@ -1582,6 +1583,15 @@
                             sqrtl( (x_i-x_o)*(x_i-x_o)
                                  + (y_i-y_o)*(y_i-y_o)
                                  + (z_i-z_o)*(z_i-z_o) );
+            temp1 = sqrtl( (x_i-x_o)*(x_i-x_o)
+                                 + (y_i-y_o)*(y_i-y_o)
+                                 + (z_i-z_o)*(z_i-z_o) );
+            temp2 = mass_o * mass_i / 
+                            sqrtl( (x_i-x_o)*(x_i-x_o)
+                                 + (y_i-y_o)*(y_i-y_o)
+                                 + (z_i-z_o)*(z_i-z_o) );
+            //fprintf(stdout, "%1.10e %1.10e %1.10e %1.10e xx\n",
+            //mass_o, mass_i, temp1, temp2);
         }
         total_potential += this_potential;
 	workDone += n_q - q_outer - 1;
@@ -1604,7 +1614,6 @@
     Py_DECREF(x);
     Py_DECREF(y);
     Py_DECREF(z);
-
     PyObject *status = PyFloat_FromDouble(total_potential);
     return status;
 


http://bitbucket.org/yt_analysis/yt/changeset/73f029e61e3d/
changeset:   r3893:73f029e61e3d
branch:      yt
user:        sskory
date:        2011-03-19 20:14:34
summary:     More progress. I can get the same potential with angle=0 as the full
calculation, but for a region with only one level of data. Segfaults
otherwise. More to do.
affected #:  3 files (3.4 KB)

--- a/yt/data_objects/derived_quantities.py	Fri Mar 18 11:27:47 2011 -0600
+++ b/yt/data_objects/derived_quantities.py	Sat Mar 19 13:14:34 2011 -0600
@@ -274,7 +274,7 @@
              combine_function=_combBaryonSpinParameter, n_ret=4)
     
 def _IsBound(data, truncate = True, include_thermal_energy = False,
-    treecode = False):
+    treecode = False, opening_angle = 1.0):
     """
     This returns whether or not the object is gravitationally bound
     
@@ -338,58 +338,67 @@
         # Calculate the binding energy using the treecode method.
         # Faster but less accurate.
         # Make an octree for one value (mass) with incremental=True.
-        octree = Octree(na.array(data.pf.domain_dimensions), 1, True)
+#         octree = Octree(na.array(data.pf.domain_dimensions), 1, True)
+        # First we find the min/max coverage of this data object.
+        root_dx = 1./na.array(data.pf.domain_dimensions).astype('float64')
+        cover_min = na.array([na.amin(local_data['x']), na.amin(local_data['y']),
+            na.amin(local_data['z'])])
+        cover_max = na.array([na.amax(local_data['x']), na.amax(local_data['y']),
+            na.amax(local_data['z'])])
+        # Fix the coverage to match to root grid cell left 
+        # edges for making indexes.
+        cover_min = cover_min - cover_min % root_dx
+        cover_max = cover_max - cover_max % root_dx
+        cover_imin = (cover_min * na.array(data.pf.domain_dimensions)).astype('int64')
+        cover_imax = (cover_max * na.array(data.pf.domain_dimensions) + 1).astype('int64')
+        cover_ActiveDimensions = cover_imax - cover_imin
+        # Create the octree with these dimensions.
+        # One value (mass) with incremental=True.
+        octree = Octree(cover_ActiveDimensions, 1, True)
+        print 'here', cover_ActiveDimensions
         # Now discover what levels this data comes from, not assuming
         # symmetry.
         dxes = na.unique(data['dx']) # unique returns a sorted array,
         dyes = na.unique(data['dy']) # so these will all have the same
         dzes = na.unique(data['dx']) # order.
         # We only need one dim to figure out levels, we'll use x.
-        root_dx = 1./data.pf.domain_dimensions[0]
-        levels = na.floor(root_dx / dxes / data.pf.refine_by).astype('int')
+        dx = 1./data.pf.domain_dimensions[0]
+        levels = na.floor(dx / dxes / data.pf.refine_by).astype('int')
         lsort = levels.argsort()
         levels = levels[lsort]
         dxes = dxes[lsort]
         dyes = dyes[lsort]
         dzes = dzes[lsort]
-        # Now we pick out the values from each level and add them to the octree.
-        cover_min = na.array([na.amin(local_data['x']), na.amin(local_data['y']),
-            na.amin(local_data['z'])])
-        cover_max = na.array([na.amax(local_data['x']), na.amax(local_data['y']),
-            na.amax(local_data['z'])])
-        # Fix to root grid cell centers
-        cover_min = cover_min - cover_min % root_dx
-        cover_max = cover_max - cover_max % root_dx
-        # This step adds filler levels, e.g. if this object has data at only
-        # some deep level(s) we need to introduce a zero field for the higher
-        # levels.
-        for L in range(max(data.pf.h.max_level+1, na.amax(levels))):
-            imin = (cover_min * na.array(data.pf.domain_dimensions) * 2**L).astype('int64')
-            imax = (cover_max * na.array(data.pf.domain_dimensions) * 2**L + 2**L - 1).astype('int64')
-            ActiveDimensions = imax - imin
+        # This step adds massless cells for all the levels we need in order
+        # to fully populate all the parent-child cells needed.
+        for L in range(min(data.pf.h.max_level+1, na.amax(levels)+1)):
+            ActiveDimensions = cover_ActiveDimensions * 2**L
             i, j, k = na.indices(ActiveDimensions)
-            i = i.flatten() + imin[0]
-            j = j.flatten() + imin[1]
-            k = k.flatten() + imin[2]
+            i = i.flatten()
+            j = j.flatten()
+            k = k.flatten()
             octree.add_array_to_tree(L, i, j, k,
                 na.array([na.zeros_like(i)], order='F', dtype='float64'),
-                na.ones_like(i).astype('float64'))
-            print L, imin, imax
+                na.zeros_like(i).astype('float64'))
+            print L, ActiveDimensions
+            print i[0], i[-1], j[0], j[-1], k[0],k[-1]
+        # Now we add actual data to the octree.
         for L, dx, dy, dz in zip(levels, dxes, dyes, dzes):
             print "adding to octree", L
             sel = (data["dx"] == dx)
-            thisx = (local_data["x"][sel] / dx).astype('int64')
-            thisy = (local_data["y"][sel] / dy).astype('int64')
-            thisz = (local_data["z"][sel] / dz).astype('int64')
+            thisx = (local_data["x"][sel] / dx).astype('int64') - cover_imin[0] * 2**L
+            thisy = (local_data["y"][sel] / dy).astype('int64') - cover_imin[1] * 2**L
+            thisz = (local_data["z"][sel] / dz).astype('int64') - cover_imin[2] * 2**L
             vals = na.array([local_data["CellMass"][sel]], order='F')
-            #vals = na.array([na.ones_like(local_data["CellMass"][sel])], order='F')
+            print na.min(thisx), na.min(thisy), na.min(thisz)
+            print na.max(thisx), na.max(thisy), na.max(thisz)
             octree.add_array_to_tree(L, thisx, thisy, thisz, vals,
                na.ones_like(thisx).astype('float64'))
-            print na.min(thisx), na.min(thisy), na.min(thisz)
-            print na.max(thisx), na.max(thisy), na.max(thisz)
         # Now we calculate the binding energy using a treecode.
         print 'calculating'
-        pot = G*octree.find_binding_energy(truncate, kinetic/G, 0.0)
+        pot = G*octree.find_binding_energy(truncate, kinetic/G, root_dx,
+            opening_angle)
+        #octree.print_all_nodes()
     else:
         try:
             pot = G*_cudaIsBound(local_data, truncate, kinetic/G)


--- a/yt/utilities/_amr_utils/Octree.pyx	Fri Mar 18 11:27:47 2011 -0600
+++ b/yt/utilities/_amr_utils/Octree.pyx	Sat Mar 19 13:14:34 2011 -0600
@@ -77,11 +77,7 @@
 
 cdef int OTN_same(OctreeNode *node1, OctreeNode *node2):
     # Returns 1 if node1 == node2; 0 otherwise.
-    # If all of these conditions are met, they are the same node.
-    if node1.pos[0] == node2.pos[0] and \
-       node1.pos[1] == node2.pos[1] and \
-       node1.pos[2] == node2.pos[2] and \
-       node1.level == node2.level: return 1
+    if node1 is node2: return 1
     return 0
 
 cdef int OTN_contained(OctreeNode *node1, OctreeNode *node2):
@@ -134,8 +130,10 @@
     cdef OctreeNode ****root_nodes
     cdef np.int64_t top_grid_dims[3]
     cdef int incremental
+    # Below is for the treecode.
     cdef np.float64_t opening_angle
-    cdef int count1, count2
+    cdef np.float64_t root_dx[3]
+    cdef int switch
 
     def __cinit__(self, np.ndarray[np.int64_t, ndim=1] top_grid_dims,
                   int nvals, int incremental = False):
@@ -147,8 +145,6 @@
                 sizeof(np.float64_t)*nvals)
         cdef np.float64_t weight_val = 0.0
         self.nvals = nvals
-        count1 = 0
-        count2 = 0
         for i in range(nvals): vals[i] = 0.0
 
         self.top_grid_dims[0] = top_grid_dims[0]
@@ -307,20 +303,17 @@
 
     cdef np.float64_t fbe_node_separation(self, OctreeNode *node1, OctreeNode *node2):
         # Find the distance between the two nodes. To match FindBindingEnergy
-        # in data_point_utilities.c, we'll do this in code [0,1) units.
-        # We'll also assume 
-        cdef np.float64_t fn1, fn2, p1, p2, dist
-        cdef np.int64_t n1, n2
+        # in data_point_utilities.c, we'll do this in code units.
+        cdef np.float64_t dx1, dx2, p1, p2, dist
         cdef int i
         dist = 0.0
         for i in range(3):
-            n1 = self.po2[node1.level] * self.top_grid_dims[i]
-            n2 = self.po2[node2.level] * self.top_grid_dims[i]
-            fn1 = <np.float64_t> n1
-            fn2 = <np.float64_t> n2
+            # Discover the appropriate dx for each node/dim.
+            dx1 = self.root_dx[i] / (<np.float64_t> self.po2[node1.level])
+            dx2 = self.root_dx[i] / (<np.float64_t> self.po2[node2.level])
             # The added term is to re-cell center the data.
-            p1 = (<np.float64_t> node1.pos[i]) / fn1 + 1. / fn1 / 2.
-            p2 = (<np.float64_t> node2.pos[i]) / fn2 + 1. / fn2 / 2.
+            p1 = (<np.float64_t> node1.pos[i]) * dx1 + dx1/2.
+            p2 = (<np.float64_t> node2.pos[i]) * dx2 + dx2/2.
             dist += (p1 - p2) * (p1 - p2)
         dist = np.sqrt(dist)
         return dist
@@ -363,6 +356,7 @@
         # We *do* want to walk over the root_node that node1 is in to look for
         # level>0 nodes close to node1, but none of
         # the previously-walked root_nodes.
+        self.switch = 0
         for i in range(top_i, self.top_grid_dims[0]):
             for j in range(top_j, self.top_grid_dims[1]):
                 for k in range(top_k, self.top_grid_dims[2]):
@@ -375,28 +369,39 @@
             OctreeNode *node2):
         # node1 never changes.
         # node2 is the iterated-upon remote node.
+        # self.switch - In order to prevent double counting, we only want
+        # to call this function between node1 and node2 where node2
+        # comes *after* node1 in the iteration order.
+        # switch=0: do not calculate any potential, but keep on iterating.
+        # switch=1: do potentials and keep on iterating if needed.
         cdef int i, j, k, contained
         cdef np.float64_t potential, dist, angle
         potential = 0.0
         # Do nothing with node2 when it is the same as node1. No potential
-        # calculation, and no digging deeper.
-        if OTN_same(node1, node2): return 0.0
+        # calculation, and no digging deeper. But we flip the switch.
+        if OTN_same(node1, node2):
+            self.switch = 1
+            return 0.0
         # Is node1 contained inside node2?
         contained = OTN_contained(node1, node2)
         # If we have a childless node2 we can skip everything below and
         # calculate the potential, as long as node2 does not contain node1.
-        if node2.children[0][0][0] == NULL and not contained:
+        # Errr...  contained may not be needed here.
+        if node2.children[0][0][0] == NULL and not contained and \
+                not OTN_same(node1, node2) and self.switch:
             dist = self.fbe_node_separation(node1, node2)
-            self.count2 += 1
-            #print '%1.10e %1.10e %1.10e %1.10e xx' % (node1.val[0], node2.val[0],
-            #dist, node1.val[0] * node2.val[0] / dist)
             return node1.val[0] * node2.val[0] / dist
         # Now we apply the opening angle test. If the opening angle is small
         # enough, we use this node for the potential and dig no deeper.
         angle = self.fbe_opening_angle(node1, node2)
-        if angle < self.opening_angle and not contained:
+        if angle < self.opening_angle and not contained and \
+                not OTN_same(node1, node2) and self.switch:
             dist = self.fbe_node_separation(node1, node2)
             return node1.val[0] * node2.val[0] / dist
+        # If we've gotten this far with a childless node, it means we've
+        # already accounted for it.
+        if node2.children[0][0][0] == NULL:
+            return 0.0
         # If the above is not satisfied, we must dig deeper!
         for i in range(2):
             for j in range(2):
@@ -415,7 +420,6 @@
         # We have a childless node. Time to iterate over every other
         # node using the treecode method.
         if node.children[0][0][0] is NULL:
-            self.count1 += 1
             potential = self.fbe_potential_of_remote_nodes(node,
                 top_i, top_j, top_k)
             return potential
@@ -429,7 +433,8 @@
                         top_i, top_j, top_k)
         return potential
 
-    def find_binding_energy(self, truncate, kinetic, opening_angle = 1.0):
+    def find_binding_energy(self, int truncate, float kinetic,
+        np.ndarray[np.float64_t, ndim=1] root_dx, float opening_angle = 1.0):
         r"""Find the binding energy of an ensemble of data points using the
         treecode method.
         
@@ -451,24 +456,88 @@
         cdef np.float64_t potential
         potential = 0.0
         self.opening_angle = opening_angle
+        for i in range(3):
+            self.root_dx[i] = root_dx[i]
         # The first part of the loop goes over all of the root level nodes.
         for i in range(self.top_grid_dims[0]):
             for j in range(self.top_grid_dims[1]):
                 for k in range(self.top_grid_dims[2]):
                     if self.root_nodes[i][j][k].val[0] == 0.0: continue
-                    #print i,j,k
                     potential += self.fbe_iterate_children(self.root_nodes[i][j][k],
                         i, j, k)
-                    #potential = kinetic * 2
                     if truncate and potential > kinetic: break
                 if truncate and potential > kinetic: break
             if truncate and potential > kinetic:
                 print "Truncating!"
                 break
-        print 'count1', self.count1
-        print 'count2', self.count2
         return potential
 
+    cdef int node_ID(self, OctreeNode *node):
+        # Returns an unique ID for this node based on its position and level.
+        cdef int ID, i, offset, root
+        cdef np.int64_t this_grid_dims[3]
+        offset = 0
+        root = 1
+        for i in range(3):
+            root *= self.top_grid_dims[i]
+            this_grid_dims[i] = self.top_grid_dims[i] * 2**node.level
+        for i in range(node.level):
+            offset += root * 2**(3 * i)
+        ID = offset + (node.pos[0] + this_grid_dims[0] * (node.pos[1] + \
+            this_grid_dims[1] * node.pos[2]))
+        return ID
+
+    cdef int node_ID_on_level(self, OctreeNode *node):
+        # Returns the node ID on node.level for this node.
+        cdef int ID, i
+        cdef np.int64_t this_grid_dims[3]
+        for i in range(3):
+            this_grid_dims[i] = self.top_grid_dims[i] * 2**node.level
+        ID = node.pos[0] + this_grid_dims[0] * (node.pos[1] + \
+            this_grid_dims[1] * node.pos[2])
+        return ID
+
+    cdef void print_node_info(self, OctreeNode *node):
+        cdef int i, j, k
+        line = "%d\t" % self.node_ID(node)
+        line += "%d\t%d\t%d\t%d\t" % (node.level,node.pos[0],node.pos[1],node.pos[2])
+        for i in range(node.nvals):
+            line += "%1.5e\t" % node.val[i]
+        line += "%f\t" % node.weight_val
+        line += "%s\t%s\t" % (node.children[0][0][0] is not NULL, node.parent is not NULL)
+        if node.children[0][0][0] is not NULL:
+            nline = ""
+            for i in range(2):
+                for j in range(2):
+                    for k in range(2):
+                        nline += "%d," % self.node_ID(node.children[i][j][k])
+            line += nline
+        print line
+
+    cdef void iterate_print_nodes(self, OctreeNode *node):
+        cdef int i, j, k
+        self.print_node_info(node)
+        if node.children[0][0][0] is NULL:
+            return
+        for i in range(2):
+            for j in range(2):
+                for k in range(2):
+                    self.iterate_print_nodes(node.children[i][j][k])
+        return
+
+    def print_all_nodes(self):
+        cdef int i, j, k
+        line = "ID\tlevel\tx\ty\tz\t"
+        for i in range(self.nvals):
+            line += "val%d\t\t" % i
+        line += "weight\t\tchild?\tparent?\tchildren"
+        print line
+        for i in range(self.top_grid_dims[0]):
+            for j in range(self.top_grid_dims[1]):
+                for k in range(self.top_grid_dims[2]):
+                    self.iterate_print_nodes(self.root_nodes[i][j][k])
+        return
+
     def __dealloc__(self):
         cdef int i, j, k
         for i in range(self.top_grid_dims[0]):


--- a/yt/utilities/data_point_utilities.c	Fri Mar 18 11:27:47 2011 -0600
+++ b/yt/utilities/data_point_utilities.c	Sat Mar 19 13:14:34 2011 -0600
@@ -1590,8 +1590,6 @@
                             sqrtl( (x_i-x_o)*(x_i-x_o)
                                  + (y_i-y_o)*(y_i-y_o)
                                  + (z_i-z_o)*(z_i-z_o) );
-            //fprintf(stdout, "%1.10e %1.10e %1.10e %1.10e xx\n",
-            //mass_o, mass_i, temp1, temp2);
         }
         total_potential += this_potential;
 	workDone += n_q - q_outer - 1;


http://bitbucket.org/yt_analysis/yt/changeset/d173960477a7/
changeset:   r3894:d173960477a7
branch:      yt
user:        sskory
date:        2011-03-20 19:44:58
summary:     More progress...
affected #:  2 files (979 bytes)

--- a/yt/data_objects/derived_quantities.py	Sat Mar 19 13:14:34 2011 -0600
+++ b/yt/data_objects/derived_quantities.py	Sun Mar 20 12:44:58 2011 -0600
@@ -340,11 +340,18 @@
         # Make an octree for one value (mass) with incremental=True.
 #         octree = Octree(na.array(data.pf.domain_dimensions), 1, True)
         # First we find the min/max coverage of this data object.
+        # The octree doesn't like uneven root grids, so we will make it cubical.
         root_dx = 1./na.array(data.pf.domain_dimensions).astype('float64')
-        cover_min = na.array([na.amin(local_data['x']), na.amin(local_data['y']),
+        left = min([na.amin(local_data['x']), na.amin(local_data['y']),
             na.amin(local_data['z'])])
-        cover_max = na.array([na.amax(local_data['x']), na.amax(local_data['y']),
+        right = max([na.amax(local_data['x']), na.amax(local_data['y']),
             na.amax(local_data['z'])])
+        #cover_min = na.array([na.amin(local_data['x']), na.amin(local_data['y']),
+        #    na.amin(local_data['z'])])
+        #cover_max = na.array([na.amax(local_data['x']), na.amax(local_data['y']),
+        #    na.amax(local_data['z'])])
+        cover_min = na.array([left, left, left])
+        cover_max = na.array([right, right, right])
         # Fix the coverage to match to root grid cell left 
         # edges for making indexes.
         cover_min = cover_min - cover_min % root_dx
@@ -399,6 +406,7 @@
         pot = G*octree.find_binding_energy(truncate, kinetic/G, root_dx,
             opening_angle)
         #octree.print_all_nodes()
+        #pot = 0.
     else:
         try:
             pot = G*_cudaIsBound(local_data, truncate, kinetic/G)


--- a/yt/utilities/_amr_utils/Octree.pyx	Sat Mar 19 13:14:34 2011 -0600
+++ b/yt/utilities/_amr_utils/Octree.pyx	Sun Mar 20 12:44:58 2011 -0600
@@ -32,6 +32,8 @@
 
 from stdlib cimport malloc, free, abs
 
+import sys
+
 cdef extern from "stdlib.h":
     # NOTE that size_t might not be int
     void *alloca(int)
@@ -134,6 +136,7 @@
     cdef np.float64_t opening_angle
     cdef np.float64_t root_dx[3]
     cdef int switch
+    cdef int count1,count2,count3
 
     def __cinit__(self, np.ndarray[np.int64_t, ndim=1] top_grid_dims,
                   int nvals, int incremental = False):
@@ -347,26 +350,33 @@
         return d2 / dist
 
     cdef np.float64_t fbe_potential_of_remote_nodes(self, OctreeNode *node1,
-            int top_i, int top_j, int top_k):
+            int sum):
         # Given a childless node "node1", calculate the potential for it from
         # all the other nodes using the treecode method.
-        cdef int i, j, k
+        cdef int i, j, k, this_sum
         cdef np.float64_t potential
         potential = 0.0
         # We *do* want to walk over the root_node that node1 is in to look for
         # level>0 nodes close to node1, but none of
         # the previously-walked root_nodes.
         self.switch = 0
-        for i in range(top_i, self.top_grid_dims[0]):
-            for j in range(top_j, self.top_grid_dims[1]):
-                for k in range(top_k, self.top_grid_dims[2]):
+        self.count2 = 0
+        self.count3 = 0
+        this_sum = 0
+        for i in range(self.top_grid_dims[0]):
+            for j in range(self.top_grid_dims[1]):
+                for k in range(self.top_grid_dims[2]):
+                    this_sum += 1
+                    if this_sum < sum: continue
                     if self.root_nodes[i][j][k].val[0] == 0: continue
                     potential += self.fbe_iterate_remote_nodes(node1,
                         self.root_nodes[i][j][k])
+        #print 'count2', self.count2, self.node_ID(node1), self.count3
         return potential
 
     cdef np.float64_t fbe_iterate_remote_nodes(self, OctreeNode *node1,
             OctreeNode *node2):
+
         # node1 never changes.
         # node2 is the iterated-upon remote node.
         # self.switch - In order to prevent double counting, we only want
@@ -377,6 +387,7 @@
         cdef int i, j, k, contained
         cdef np.float64_t potential, dist, angle
         potential = 0.0
+        self.count3 += 1
         # Do nothing with node2 when it is the same as node1. No potential
         # calculation, and no digging deeper. But we flip the switch.
         if OTN_same(node1, node2):
@@ -389,6 +400,8 @@
         # Errr...  contained may not be needed here.
         if node2.children[0][0][0] == NULL and not contained and \
                 not OTN_same(node1, node2) and self.switch:
+            self.count2 += 1
+            #print 'h', self.node_ID(node1), self.node_ID(node2)
             dist = self.fbe_node_separation(node1, node2)
             return node1.val[0] * node2.val[0] / dist
         # Now we apply the opening angle test. If the opening angle is small
@@ -411,8 +424,7 @@
                         node2.children[i][j][k])
         return potential
 
-    cdef np.float64_t fbe_iterate_children(self, OctreeNode *node, int top_i,
-            int top_j, int top_k):
+    cdef np.float64_t fbe_iterate_children(self, OctreeNode *node, int sum):
         # Recursively iterate over child nodes until we get a childless node.
         cdef int i, j, k
         cdef np.float64_t potential
@@ -420,8 +432,8 @@
         # We have a childless node. Time to iterate over every other
         # node using the treecode method.
         if node.children[0][0][0] is NULL:
-            potential = self.fbe_potential_of_remote_nodes(node,
-                top_i, top_j, top_k)
+            self.count1 += 1
+            potential = self.fbe_potential_of_remote_nodes(node, sum)
             return potential
         # If the node has children, we need to walk all of them returning
         # the potential for each.
@@ -430,7 +442,7 @@
                 for k in range(2):
                     if node.children[i][j][k].val[0] == 0.0: continue
                     potential += self.fbe_iterate_children(node.children[i][j][k],
-                        top_i, top_j, top_k)
+                        sum)
         return potential
 
     def find_binding_energy(self, int truncate, float kinetic,
@@ -452,24 +464,29 @@
         #    potential. If neither of these are done, we call
         #    fbe_iterate_remote_nodes on the children nodes.
         # 5. All of this returns a total, non-double-counted potential.
-        cdef int i, j, k
+        cdef int i, j, k, sum
         cdef np.float64_t potential
         potential = 0.0
         self.opening_angle = opening_angle
+        self.count1 = 0
+        self.count2 = 0
         for i in range(3):
             self.root_dx[i] = root_dx[i]
         # The first part of the loop goes over all of the root level nodes.
+        sum = 0
         for i in range(self.top_grid_dims[0]):
             for j in range(self.top_grid_dims[1]):
                 for k in range(self.top_grid_dims[2]):
                     if self.root_nodes[i][j][k].val[0] == 0.0: continue
+                    sum += 1
                     potential += self.fbe_iterate_children(self.root_nodes[i][j][k],
-                        i, j, k)
+                        sum)
                     if truncate and potential > kinetic: break
                 if truncate and potential > kinetic: break
             if truncate and potential > kinetic:
                 print "Truncating!"
                 break
+        #print 'count1', self.count1
         return potential
 
     cdef int node_ID(self, OctreeNode *node):
@@ -513,6 +530,7 @@
                         nline += "%d," % self.node_ID(node.children[i][j][k])
             line += nline
         print line
+        return
 
     cdef void iterate_print_nodes(self, OctreeNode *node):
         cdef int i, j, k
@@ -527,6 +545,8 @@
 
     def print_all_nodes(self):
         cdef int i, j, k
+        sys.stdout.flush()
+        sys.stderr.flush()
         line = "ID\tlevel\tx\ty\tz\t"
         for i in range(self.nvals):
             line += "val%d\t\t" % i
@@ -536,6 +556,8 @@
             for j in range(self.top_grid_dims[1]):
                 for k in range(self.top_grid_dims[2]):
                     self.iterate_print_nodes(self.root_nodes[i][j][k])
+        sys.stdout.flush()
+        sys.stderr.flush()
         return
 
     def __dealloc__(self):


http://bitbucket.org/yt_analysis/yt/changeset/bfd5016dbe09/
changeset:   r3895:bfd5016dbe09
branch:      yt
user:        sskory
date:        2011-03-20 22:04:37
summary:     I think it works, but it's not very fast, even with very aggressive opening angles.
affected #:  2 files (585 bytes)

--- a/yt/data_objects/derived_quantities.py	Sun Mar 20 12:44:58 2011 -0600
+++ b/yt/data_objects/derived_quantities.py	Sun Mar 20 15:04:37 2011 -0600
@@ -337,19 +337,12 @@
     if treecode:
         # Calculate the binding energy using the treecode method.
         # Faster but less accurate.
-        # Make an octree for one value (mass) with incremental=True.
-#         octree = Octree(na.array(data.pf.domain_dimensions), 1, True)
-        # First we find the min/max coverage of this data object.
         # The octree doesn't like uneven root grids, so we will make it cubical.
         root_dx = 1./na.array(data.pf.domain_dimensions).astype('float64')
         left = min([na.amin(local_data['x']), na.amin(local_data['y']),
             na.amin(local_data['z'])])
         right = max([na.amax(local_data['x']), na.amax(local_data['y']),
             na.amax(local_data['z'])])
-        #cover_min = na.array([na.amin(local_data['x']), na.amin(local_data['y']),
-        #    na.amin(local_data['z'])])
-        #cover_max = na.array([na.amax(local_data['x']), na.amax(local_data['y']),
-        #    na.amax(local_data['z'])])
         cover_min = na.array([left, left, left])
         cover_max = na.array([right, right, right])
         # Fix the coverage to match to root grid cell left 
@@ -406,7 +399,6 @@
         pot = G*octree.find_binding_energy(truncate, kinetic/G, root_dx,
             opening_angle)
         #octree.print_all_nodes()
-        #pot = 0.
     else:
         try:
             pot = G*_cudaIsBound(local_data, truncate, kinetic/G)


--- a/yt/utilities/_amr_utils/Octree.pyx	Sun Mar 20 12:44:58 2011 -0600
+++ b/yt/utilities/_amr_utils/Octree.pyx	Sun Mar 20 15:04:37 2011 -0600
@@ -32,7 +32,7 @@
 
 from stdlib cimport malloc, free, abs
 
-import sys
+import sys, time
 
 cdef extern from "stdlib.h":
     # NOTE that size_t might not be int
@@ -136,7 +136,7 @@
     cdef np.float64_t opening_angle
     cdef np.float64_t root_dx[3]
     cdef int switch
-    cdef int count1,count2,count3
+    cdef int count2,count3
 
     def __cinit__(self, np.ndarray[np.int64_t, ndim=1] top_grid_dims,
                   int nvals, int incremental = False):
@@ -360,8 +360,6 @@
         # level>0 nodes close to node1, but none of
         # the previously-walked root_nodes.
         self.switch = 0
-        self.count2 = 0
-        self.count3 = 0
         this_sum = 0
         for i in range(self.top_grid_dims[0]):
             for j in range(self.top_grid_dims[1]):
@@ -371,7 +369,6 @@
                     if self.root_nodes[i][j][k].val[0] == 0: continue
                     potential += self.fbe_iterate_remote_nodes(node1,
                         self.root_nodes[i][j][k])
-        #print 'count2', self.count2, self.node_ID(node1), self.count3
         return potential
 
     cdef np.float64_t fbe_iterate_remote_nodes(self, OctreeNode *node1,
@@ -387,7 +384,6 @@
         cdef int i, j, k, contained
         cdef np.float64_t potential, dist, angle
         potential = 0.0
-        self.count3 += 1
         # Do nothing with node2 when it is the same as node1. No potential
         # calculation, and no digging deeper. But we flip the switch.
         if OTN_same(node1, node2):
@@ -401,7 +397,6 @@
         if node2.children[0][0][0] == NULL and not contained and \
                 not OTN_same(node1, node2) and self.switch:
             self.count2 += 1
-            #print 'h', self.node_ID(node1), self.node_ID(node2)
             dist = self.fbe_node_separation(node1, node2)
             return node1.val[0] * node2.val[0] / dist
         # Now we apply the opening angle test. If the opening angle is small
@@ -410,6 +405,7 @@
         if angle < self.opening_angle and not contained and \
                 not OTN_same(node1, node2) and self.switch:
             dist = self.fbe_node_separation(node1, node2)
+            self.count3 += 1
             return node1.val[0] * node2.val[0] / dist
         # If we've gotten this far with a childless node, it means we've
         # already accounted for it.
@@ -432,7 +428,6 @@
         # We have a childless node. Time to iterate over every other
         # node using the treecode method.
         if node.children[0][0][0] is NULL:
-            self.count1 += 1
             potential = self.fbe_potential_of_remote_nodes(node, sum)
             return potential
         # If the node has children, we need to walk all of them returning
@@ -445,6 +440,8 @@
                         sum)
         return potential
 
+    @cython.boundscheck(False)
+    @cython.wraparound(False)
     def find_binding_energy(self, int truncate, float kinetic,
         np.ndarray[np.float64_t, ndim=1] root_dx, float opening_angle = 1.0):
         r"""Find the binding energy of an ensemble of data points using the
@@ -468,7 +465,7 @@
         cdef np.float64_t potential
         potential = 0.0
         self.opening_angle = opening_angle
-        self.count1 = 0
+        self.count3 = 0
         self.count2 = 0
         for i in range(3):
             self.root_dx[i] = root_dx[i]
@@ -486,7 +483,8 @@
             if truncate and potential > kinetic:
                 print "Truncating!"
                 break
-        #print 'count1', self.count1
+        print 'count2', self.count2
+        print 'count3', self.count3
         return potential
 
     cdef int node_ID(self, OctreeNode *node):


http://bitbucket.org/yt_analysis/yt/changeset/b8e523048400/
changeset:   r3896:b8e523048400
branch:      yt
user:        sskory
date:        2011-03-21 01:11:08
summary:     Beginnings of the linked list.
affected #:  2 files (2.3 KB)

--- a/yt/data_objects/derived_quantities.py	Sun Mar 20 15:04:37 2011 -0600
+++ b/yt/data_objects/derived_quantities.py	Sun Mar 20 18:11:08 2011 -0600
@@ -396,9 +396,11 @@
                na.ones_like(thisx).astype('float64'))
         # Now we calculate the binding energy using a treecode.
         print 'calculating'
-        pot = G*octree.find_binding_energy(truncate, kinetic/G, root_dx,
-            opening_angle)
-        #octree.print_all_nodes()
+        #pot = G*octree.find_binding_energy(truncate, kinetic/G, root_dx,
+        #    opening_angle)
+        octree.finalize(treecode = 1)
+        octree.print_all_nodes()
+        pot = 0
     else:
         try:
             pot = G*_cudaIsBound(local_data, truncate, kinetic/G)


--- a/yt/utilities/_amr_utils/Octree.pyx	Sun Mar 20 15:04:37 2011 -0600
+++ b/yt/utilities/_amr_utils/Octree.pyx	Sun Mar 20 18:11:08 2011 -0600
@@ -50,6 +50,7 @@
     int nvals
     OctreeNode *children[2][2][2]
     OctreeNode *parent
+    OctreeNode *next
 
 cdef void OTN_add_value(OctreeNode *self,
         np.float64_t *val, np.float64_t weight_val):
@@ -104,6 +105,7 @@
     node.pos[2] = pos[2]
     node.nvals = nvals
     node.parent = parent
+    node.next = NULL
     node.val = <np.float64_t *> malloc(
                 nvals * sizeof(np.float64_t))
     for i in range(nvals):
@@ -137,6 +139,7 @@
     cdef np.float64_t root_dx[3]
     cdef int switch
     cdef int count2,count3
+    cdef OctreeNode *last_node
 
     def __cinit__(self, np.ndarray[np.int64_t, ndim=1] top_grid_dims,
                   int nvals, int incremental = False):
@@ -440,6 +443,54 @@
                         sum)
         return potential
 
+    cdef void set_next_initial(self, OctreeNode *node, int treecode):
+        cdef int i, j, k
+        if treecode and node.val[0] is not 0.:
+            self.last_node.next = node
+            self.last_node = node
+        if node.children[0][0][0] is NULL: return
+        for i in range(2):
+            for j in range(2):
+                for k in range(2):
+                    self.set_next_initial(node.children[i][j][k], treecode)
+        return
+
+    cdef void set_next_final(self, OctreeNode *node):
+        cdef int i, j, k
+        cdef OctreeNode *initial_next
+        cdef OctreeNode *temp_next
+        initial_next = node.next
+        temp_next = node.next
+        if node.next is NULL: return
+        while temp_next.level > node.level:
+            temp_next = temp_next.next
+            if temp_next is NULL: break
+        node.next = temp_next
+        self.set_next_final(initial_next)
+
+    def finalize(self, int treecode = 0):
+        # Set up the linked list for the nodes.
+        # Set treecode = 1 if nodes with no mass are to be skipped in the
+        # list.
+        cdef int i, j, k, sum
+        self.last_node = self.root_nodes[0][0][0]
+        for i in range(self.top_grid_dims[0]):
+            for j in range(self.top_grid_dims[1]):
+                for k in range(self.top_grid_dims[2]):
+                    self.set_next_initial(self.root_nodes[i][j][k], treecode)
+        # Now we want to link to the next node in the list that is
+        # on a level the same or lower (coarser) than us.
+        sum = 1
+        for i in range(self.top_grid_dims[0]):
+            for j in range(self.top_grid_dims[1]):
+                for k in range(self.top_grid_dims[2]):
+                    self.set_next_final(self.root_nodes[i][j][k])
+                    if sum < 7:
+                        if treecode and self.root_nodes[int(sum/4)][int(sum%4/2)][int(sum%2)].val[0] is not 0:
+                            self.root_nodes[i][j][k].next = \
+                                self.root_nodes[int(sum/4)][int(sum%4/2)][int(sum%2)]
+                    sum += 1
+
     @cython.boundscheck(False)
     @cython.wraparound(False)
     def find_binding_energy(self, int truncate, float kinetic,
@@ -515,6 +566,9 @@
     cdef void print_node_info(self, OctreeNode *node):
         cdef int i, j, k
         line = "%d\t" % self.node_ID(node)
+        if node.next is not NULL:
+            line += "%d\t" % self.node_ID(node.next)
+        else: line += "-1\t"
         line += "%d\t%d\t%d\t%d\t" % (node.level,node.pos[0],node.pos[1],node.pos[2])
         for i in range(node.nvals):
             line += "%1.5e\t" % node.val[i]
@@ -545,7 +599,7 @@
         cdef int i, j, k
         sys.stdout.flush()
         sys.stderr.flush()
-        line = "ID\tlevel\tx\ty\tz\t"
+        line = "ID\tnext\tlevel\tx\ty\tz\t"
         for i in range(self.nvals):
             line += "val%d\t\t" % i
         line += "weight\t\tchild?\tparent?\tchildren"


http://bitbucket.org/yt_analysis/yt/changeset/006a39da5fbb/
changeset:   r3897:006a39da5fbb
branch:      yt
user:        sskory
date:        2011-03-21 16:09:56
summary:     Linked-list treecode method works.
affected #:  2 files (3.1 KB)

--- a/yt/data_objects/derived_quantities.py	Sun Mar 20 18:11:08 2011 -0600
+++ b/yt/data_objects/derived_quantities.py	Mon Mar 21 09:09:56 2011 -0600
@@ -274,7 +274,7 @@
              combine_function=_combBaryonSpinParameter, n_ret=4)
     
 def _IsBound(data, truncate = True, include_thermal_energy = False,
-    treecode = False, opening_angle = 1.0):
+    treecode = 0, opening_angle = 1.0):
     """
     This returns whether or not the object is gravitationally bound
     
@@ -396,11 +396,14 @@
                na.ones_like(thisx).astype('float64'))
         # Now we calculate the binding energy using a treecode.
         print 'calculating'
-        #pot = G*octree.find_binding_energy(truncate, kinetic/G, root_dx,
-        #    opening_angle)
-        octree.finalize(treecode = 1)
-        octree.print_all_nodes()
-        pot = 0
+        if treecode == 1:
+            pot = G*octree.find_binding_energy(truncate, kinetic/G, root_dx,
+                opening_angle)
+        elif treecode == 2:
+            octree.finalize(treecode = 1)
+            pot = G*octree.find_b_e(truncate, kinetic/G, root_dx, opening_angle)
+        #octree.print_all_nodes()
+        #pot = 0
     else:
         try:
             pot = G*_cudaIsBound(local_data, truncate, kinetic/G)


--- a/yt/utilities/_amr_utils/Octree.pyx	Sun Mar 20 18:11:08 2011 -0600
+++ b/yt/utilities/_amr_utils/Octree.pyx	Mon Mar 21 09:09:56 2011 -0600
@@ -51,6 +51,7 @@
     OctreeNode *children[2][2][2]
     OctreeNode *parent
     OctreeNode *next
+    OctreeNode *up_next
 
 cdef void OTN_add_value(OctreeNode *self,
         np.float64_t *val, np.float64_t weight_val):
@@ -106,6 +107,7 @@
     node.nvals = nvals
     node.parent = parent
     node.next = NULL
+    node.up_next = NULL
     node.val = <np.float64_t *> malloc(
                 nvals * sizeof(np.float64_t))
     for i in range(nvals):
@@ -136,9 +138,11 @@
     cdef int incremental
     # Below is for the treecode.
     cdef np.float64_t opening_angle
+    # We'll store dist here so it doesn't have to be calculated twice.
+    cdef np.float64_t dist
     cdef np.float64_t root_dx[3]
     cdef int switch
-    cdef int count2,count3
+    cdef int full_count,tree_count
     cdef OctreeNode *last_node
 
     def __cinit__(self, np.ndarray[np.int64_t, ndim=1] top_grid_dims,
@@ -350,6 +354,7 @@
                 d2 = f64max(d2, dx2)
         # Now calculate the opening angle.
         dist = self.fbe_node_separation(node1, node2)
+        self.dist = dist
         return d2 / dist
 
     cdef np.float64_t fbe_potential_of_remote_nodes(self, OctreeNode *node1,
@@ -399,7 +404,7 @@
         # Errr...  contained may not be needed here.
         if node2.children[0][0][0] == NULL and not contained and \
                 not OTN_same(node1, node2) and self.switch:
-            self.count2 += 1
+            self.full_count += 1
             dist = self.fbe_node_separation(node1, node2)
             return node1.val[0] * node2.val[0] / dist
         # Now we apply the opening angle test. If the opening angle is small
@@ -408,7 +413,7 @@
         if angle < self.opening_angle and not contained and \
                 not OTN_same(node1, node2) and self.switch:
             dist = self.fbe_node_separation(node1, node2)
-            self.count3 += 1
+            self.tree_count += 1
             return node1.val[0] * node2.val[0] / dist
         # If we've gotten this far with a childless node, it means we've
         # already accounted for it.
@@ -465,7 +470,7 @@
         while temp_next.level > node.level:
             temp_next = temp_next.next
             if temp_next is NULL: break
-        node.next = temp_next
+        node.up_next = temp_next
         self.set_next_final(initial_next)
 
     def finalize(self, int treecode = 0):
@@ -487,10 +492,67 @@
                     self.set_next_final(self.root_nodes[i][j][k])
                     if sum < 7:
                         if treecode and self.root_nodes[int(sum/4)][int(sum%4/2)][int(sum%2)].val[0] is not 0:
-                            self.root_nodes[i][j][k].next = \
+                            self.root_nodes[i][j][k].up_next = \
                                 self.root_nodes[int(sum/4)][int(sum%4/2)][int(sum%2)]
                     sum += 1
 
+    cdef np.float64_t fbe_main(self, np.float64_t potential, int truncate,
+            np.float64_t kinetic):
+        cdef np.float64_t angle
+        cdef OctreeNode *this_node
+        cdef OctreeNode *pair_node
+        this_node = self.root_nodes[0][0][0]
+        while this_node is not NULL:
+            # Iterate down to a childless node.
+            while this_node.children[0][0][0] is not NULL:
+                this_node = this_node.next
+                if this_node is NULL: break
+            if truncate and potential > kinetic:
+                print 'Truncating...'
+                break
+            pair_node = this_node.next
+            while pair_node is not NULL:
+                # If pair_node is a childless node, we can calculate the
+                # right now, and get a new pair_node.
+                if pair_node.children[0][0][0] == NULL:
+                    dist = self.fbe_node_separation(this_node, pair_node)
+                    potential += this_node.val[0] * pair_node.val[0] / dist
+                    if truncate and potential > kinetic: break
+                    pair_node = pair_node.next
+                    self.full_count += 1
+                    continue
+                # Next, if the opening angle to pair_node is small enough,
+                # calculate the potential and get a new pair_node using
+                # up_next because we don't need to look at pair_node's children.
+                angle = self.fbe_opening_angle(this_node, pair_node)
+                if angle < self.opening_angle:
+                    potential += this_node.val[0] * pair_node.val[0] / self.dist
+                    if truncate and potential > kinetic: break
+                    pair_node = pair_node.up_next
+                    self.tree_count += 1
+                # If we've gotten this far, pair_node has children, but it's
+                # too coarse, so we simply dig deeper using .next.
+                else:
+                    pair_node = pair_node.next
+            # Now we find a new this_node in the list.
+            this_node = this_node.next
+        return potential
+
+    def find_b_e(self, int truncate, float kinetic,
+        np.ndarray[np.float64_t, ndim=1] root_dx, float opening_angle = 1.0):
+        cdef int i, j, k, sum
+        cdef np.float64_t potential
+        potential = 0.0
+        self.tree_count = 0
+        self.full_count = 0
+        self.opening_angle = opening_angle
+        for i in range(3):
+            self.root_dx[i] = root_dx[i]
+        potential = self.fbe_main(potential, truncate, kinetic)
+        print 'full', self.full_count
+        print 'tree', self.tree_count
+        return potential
+
     @cython.boundscheck(False)
     @cython.wraparound(False)
     def find_binding_energy(self, int truncate, float kinetic,
@@ -516,8 +578,8 @@
         cdef np.float64_t potential
         potential = 0.0
         self.opening_angle = opening_angle
-        self.count3 = 0
-        self.count2 = 0
+        self.tree_count = 0
+        self.full_count = 0
         for i in range(3):
             self.root_dx[i] = root_dx[i]
         # The first part of the loop goes over all of the root level nodes.
@@ -534,8 +596,8 @@
             if truncate and potential > kinetic:
                 print "Truncating!"
                 break
-        print 'count2', self.count2
-        print 'count3', self.count3
+        print 'full', self.full_count
+        print 'tree', self.tree_count
         return potential
 
     cdef int node_ID(self, OctreeNode *node):
@@ -569,6 +631,9 @@
         if node.next is not NULL:
             line += "%d\t" % self.node_ID(node.next)
         else: line += "-1\t"
+        if node.up_next is not NULL:
+            line += "%d\t" % self.node_ID(node.up_next)
+        else: line += "-1\t"
         line += "%d\t%d\t%d\t%d\t" % (node.level,node.pos[0],node.pos[1],node.pos[2])
         for i in range(node.nvals):
             line += "%1.5e\t" % node.val[i]
@@ -599,7 +664,7 @@
         cdef int i, j, k
         sys.stdout.flush()
         sys.stderr.flush()
-        line = "ID\tnext\tlevel\tx\ty\tz\t"
+        line = "ID\tnext\tup_n\tlevel\tx\ty\tz\t"
         for i in range(self.nvals):
             line += "val%d\t\t" % i
         line += "weight\t\tchild?\tparent?\tchildren"


http://bitbucket.org/yt_analysis/yt/changeset/f6965d73b85c/
changeset:   r3898:f6965d73b85c
branch:      yt
user:        MatthewTurk
date:        2011-03-21 17:03:12
summary:     First pass at a few optimizations of the binding energy calculation
affected #:  1 file (169 bytes)

--- a/yt/utilities/_amr_utils/Octree.pyx	Mon Mar 21 09:09:56 2011 -0600
+++ b/yt/utilities/_amr_utils/Octree.pyx	Mon Mar 21 09:03:12 2011 -0700
@@ -38,6 +38,9 @@
     # NOTE that size_t might not be int
     void *alloca(int)
 
+cdef extern from "math.h":
+    double sqrt(double x)
+
 cdef inline np.float64_t f64max(np.float64_t f0, np.float64_t f1):
     if f0 > f1: return f0
     return f1
@@ -311,6 +314,7 @@
                             level, curpos + added, pdata, vdata, wdata)
         return added
 
+    @cython.cdivision(True)
     cdef np.float64_t fbe_node_separation(self, OctreeNode *node1, OctreeNode *node2):
         # Find the distance between the two nodes. To match FindBindingEnergy
         # in data_point_utilities.c, we'll do this in code units.
@@ -325,9 +329,10 @@
             p1 = (<np.float64_t> node1.pos[i]) * dx1 + dx1/2.
             p2 = (<np.float64_t> node2.pos[i]) * dx2 + dx2/2.
             dist += (p1 - p2) * (p1 - p2)
-        dist = np.sqrt(dist)
+        dist = sqrt(dist)
         return dist
     
+    @cython.cdivision(True)
     cdef np.float64_t fbe_opening_angle(self, OctreeNode *node1,
             OctreeNode *node2):
         # Calculate the opening angle of node2 upon the center of node1.
@@ -379,6 +384,7 @@
                         self.root_nodes[i][j][k])
         return potential
 
+    @cython.cdivision(True)
     cdef np.float64_t fbe_iterate_remote_nodes(self, OctreeNode *node1,
             OctreeNode *node2):
 
@@ -496,9 +502,10 @@
                                 self.root_nodes[int(sum/4)][int(sum%4/2)][int(sum%2)]
                     sum += 1
 
+    @cython.cdivision(True)
     cdef np.float64_t fbe_main(self, np.float64_t potential, int truncate,
             np.float64_t kinetic):
-        cdef np.float64_t angle
+        cdef np.float64_t angle, dist
         cdef OctreeNode *this_node
         cdef OctreeNode *pair_node
         this_node = self.root_nodes[0][0][0]


http://bitbucket.org/yt_analysis/yt/changeset/c90fe5fa0d3c/
changeset:   r3899:c90fe5fa0d3c
branch:      yt
user:        sskory
date:        2011-03-21 20:53:27
summary:     About to remove my first crack at the treecode, leaving
only the linked-list method.
affected #:  1 file (1.8 KB)

--- a/yt/utilities/_amr_utils/Octree.pyx	Mon Mar 21 09:03:12 2011 -0700
+++ b/yt/utilities/_amr_utils/Octree.pyx	Mon Mar 21 13:53:27 2011 -0600
@@ -345,7 +345,7 @@
         cdef np.int64_t n2
         cdef int i
         d2 = 0.0
-        if OTN_same(node1, node2): return 100000.0 # Just some large number.
+        if node1 is node2: return 100000.0 # Just some large number.
         if self.top_grid_dims[1] == self.top_grid_dims[0] and \
                 self.top_grid_dims[2] == self.top_grid_dims[0]:
             # Symmetric
@@ -454,7 +454,9 @@
                         sum)
         return potential
 
-    cdef void set_next_initial(self, OctreeNode *node, int treecode):
+    cdef void set_next(self, OctreeNode *node, int treecode):
+        # This sets up the linked list, pointing node.next to the next node
+        # in the iteration order.
         cdef int i, j, k
         if treecode and node.val[0] is not 0.:
             self.last_node.next = node
@@ -463,10 +465,14 @@
         for i in range(2):
             for j in range(2):
                 for k in range(2):
-                    self.set_next_initial(node.children[i][j][k], treecode)
+                    self.set_next(node.children[i][j][k], treecode)
         return
 
-    cdef void set_next_final(self, OctreeNode *node):
+    cdef void set_up_next(self, OctreeNode *node):
+        # This sets up a second linked list, pointing node.up_next to the next
+        # node in the list that is at the same or lower (coarser) level than
+        # this node. This is useful in the treecode for skipping over nodes
+        # that don't need to be inspected.
         cdef int i, j, k
         cdef OctreeNode *initial_next
         cdef OctreeNode *temp_next
@@ -477,34 +483,47 @@
             temp_next = temp_next.next
             if temp_next is NULL: break
         node.up_next = temp_next
-        self.set_next_final(initial_next)
+        self.set_up_next(initial_next)
 
     def finalize(self, int treecode = 0):
         # Set up the linked list for the nodes.
         # Set treecode = 1 if nodes with no mass are to be skipped in the
         # list.
-        cdef int i, j, k, sum
+        cdef int i, j, k, sum, top_grid_total, ii, jj, kk
         self.last_node = self.root_nodes[0][0][0]
         for i in range(self.top_grid_dims[0]):
             for j in range(self.top_grid_dims[1]):
                 for k in range(self.top_grid_dims[2]):
-                    self.set_next_initial(self.root_nodes[i][j][k], treecode)
+                    self.set_next(self.root_nodes[i][j][k], treecode)
         # Now we want to link to the next node in the list that is
-        # on a level the same or lower (coarser) than us.
+        # on a level the same or lower (coarser) than us. This will be used
+        # during a treecode search so we can skip higher-level (finer) nodes.
         sum = 1
+        top_grid_total = self.top_grid_dims[0] * self.top_grid_dims[1] * \
+            self.top_grid_dims[2]
         for i in range(self.top_grid_dims[0]):
             for j in range(self.top_grid_dims[1]):
                 for k in range(self.top_grid_dims[2]):
-                    self.set_next_final(self.root_nodes[i][j][k])
-                    if sum < 7:
-                        if treecode and self.root_nodes[int(sum/4)][int(sum%4/2)][int(sum%2)].val[0] is not 0:
-                            self.root_nodes[i][j][k].up_next = \
-                                self.root_nodes[int(sum/4)][int(sum%4/2)][int(sum%2)]
+                    self.set_up_next(self.root_nodes[i][j][k])
+                    # Point the root_nodes.up_next to the next root_node in the
+                    # list, except for the last one which stays pointing to NULL.
+                    if sum < top_grid_total - 1:
+                        ii = i
+                        jj = j
+                        kk = (k + 1) % self.top_grid_dims[2]
+                        if kk < k:
+                            jj = (j + 1) % self.top_grid_dims[1]
+                            if jj < j:
+                                ii = (i + 1) % self.top_grid_dims[0]
+                        self.root_nodes[i][j][k].up_next = \
+                            self.root_nodes[ii][jj][kk]
                     sum += 1
 
     @cython.cdivision(True)
     cdef np.float64_t fbe_main(self, np.float64_t potential, int truncate,
             np.float64_t kinetic):
+        # The work is done here. Starting at the top of the linked list of
+        # nodes, 
         cdef np.float64_t angle, dist
         cdef OctreeNode *this_node
         cdef OctreeNode *pair_node
@@ -513,12 +532,19 @@
             # Iterate down to a childless node.
             while this_node.children[0][0][0] is not NULL:
                 this_node = this_node.next
+                # In case we reach the end of the list...
                 if this_node is NULL: break
             if truncate and potential > kinetic:
                 print 'Truncating...'
                 break
             pair_node = this_node.next
             while pair_node is not NULL:
+                # If pair_node is massless, we can jump to up_next, because
+                # nothing pair_node contains will have mass either.
+                # I think that this should primarily happen for root_nodes.
+                if pair_node.val[0] is 0.0:
+                    pair_node = pair_node.up_next
+                    continue
                 # If pair_node is a childless node, we can calculate the
                 # right now, and get a new pair_node.
                 if pair_node.children[0][0][0] == NULL:
@@ -533,14 +559,20 @@
                 # up_next because we don't need to look at pair_node's children.
                 angle = self.fbe_opening_angle(this_node, pair_node)
                 if angle < self.opening_angle:
+                    # self.dist was just set in fbe_opening_angle, so we
+                    # can use it here without re-calculating it for these two
+                    # nodes.
                     potential += this_node.val[0] * pair_node.val[0] / self.dist
                     if truncate and potential > kinetic: break
+                    # We can skip all the nodes that are contained within 
+                    # pair_node, saving time walking the linked list.
                     pair_node = pair_node.up_next
                     self.tree_count += 1
                 # If we've gotten this far, pair_node has children, but it's
                 # too coarse, so we simply dig deeper using .next.
                 else:
                     pair_node = pair_node.next
+            # We've exhausted the pair_nodes.
             # Now we find a new this_node in the list.
             this_node = this_node.next
         return potential


http://bitbucket.org/yt_analysis/yt/changeset/792a173698ba/
changeset:   r3900:792a173698ba
branch:      yt
user:        sskory
date:        2011-03-21 22:15:53
summary:     Removed the old treecode method and cleaned things up a bit.
affected #:  2 files (6.5 KB)

--- a/yt/data_objects/derived_quantities.py	Mon Mar 21 13:53:27 2011 -0600
+++ b/yt/data_objects/derived_quantities.py	Mon Mar 21 15:15:53 2011 -0600
@@ -274,7 +274,7 @@
              combine_function=_combBaryonSpinParameter, n_ret=4)
     
 def _IsBound(data, truncate = True, include_thermal_energy = False,
-    treecode = 0, opening_angle = 1.0):
+    treecode = False, opening_angle = 1.0):
     """
     This returns whether or not the object is gravitationally bound
     
@@ -355,7 +355,7 @@
         # Create the octree with these dimensions.
         # One value (mass) with incremental=True.
         octree = Octree(cover_ActiveDimensions, 1, True)
-        print 'here', cover_ActiveDimensions
+        #print 'here', cover_ActiveDimensions
         # Now discover what levels this data comes from, not assuming
         # symmetry.
         dxes = na.unique(data['dx']) # unique returns a sorted array,
@@ -380,30 +380,25 @@
             octree.add_array_to_tree(L, i, j, k,
                 na.array([na.zeros_like(i)], order='F', dtype='float64'),
                 na.zeros_like(i).astype('float64'))
-            print L, ActiveDimensions
-            print i[0], i[-1], j[0], j[-1], k[0],k[-1]
+            #print L, ActiveDimensions
+            #print i[0], i[-1], j[0], j[-1], k[0],k[-1]
         # Now we add actual data to the octree.
         for L, dx, dy, dz in zip(levels, dxes, dyes, dzes):
-            print "adding to octree", L
+            mylog.info("Adding data to Octree for level %d" % L)
             sel = (data["dx"] == dx)
             thisx = (local_data["x"][sel] / dx).astype('int64') - cover_imin[0] * 2**L
             thisy = (local_data["y"][sel] / dy).astype('int64') - cover_imin[1] * 2**L
             thisz = (local_data["z"][sel] / dz).astype('int64') - cover_imin[2] * 2**L
             vals = na.array([local_data["CellMass"][sel]], order='F')
-            print na.min(thisx), na.min(thisy), na.min(thisz)
-            print na.max(thisx), na.max(thisy), na.max(thisz)
+            #print na.min(thisx), na.min(thisy), na.min(thisz)
+            #print na.max(thisx), na.max(thisy), na.max(thisz)
             octree.add_array_to_tree(L, thisx, thisy, thisz, vals,
                na.ones_like(thisx).astype('float64'))
         # Now we calculate the binding energy using a treecode.
-        print 'calculating'
-        if treecode == 1:
-            pot = G*octree.find_binding_energy(truncate, kinetic/G, root_dx,
-                opening_angle)
-        elif treecode == 2:
-            octree.finalize(treecode = 1)
-            pot = G*octree.find_b_e(truncate, kinetic/G, root_dx, opening_angle)
+        octree.finalize(treecode = 1)
+        pot = G*octree.find_binding_energy(truncate, kinetic/G, root_dx,
+            opening_angle)
         #octree.print_all_nodes()
-        #pot = 0
     else:
         try:
             pot = G*_cudaIsBound(local_data, truncate, kinetic/G)


--- a/yt/utilities/_amr_utils/Octree.pyx	Mon Mar 21 13:53:27 2011 -0600
+++ b/yt/utilities/_amr_utils/Octree.pyx	Mon Mar 21 15:15:53 2011 -0600
@@ -82,22 +82,6 @@
     for i in range(self.nvals): self.val[i] = 0.0
     self.weight_val = 0.0
 
-cdef int OTN_same(OctreeNode *node1, OctreeNode *node2):
-    # Returns 1 if node1 == node2; 0 otherwise.
-    if node1 is node2: return 1
-    return 0
-
-cdef int OTN_contained(OctreeNode *node1, OctreeNode *node2):
-    # Returns 1 if node2 contains node1; and 0 otherwise.
-    # node1.level > node2.level.
-    cdef OctreeNode *parent_node
-    parent_node = node1.parent
-    while parent_node is not NULL:
-        if parent_node is node2: return 1
-        parent_node = parent_node.parent
-    # If we've gotten this far, the two nodes are not related.
-    return 0
-
 cdef OctreeNode *OTN_initialize(np.int64_t pos[3], int nvals,
                         np.float64_t *val, np.float64_t weight_val,
                         int level, OctreeNode *parent):
@@ -144,8 +128,6 @@
     # We'll store dist here so it doesn't have to be calculated twice.
     cdef np.float64_t dist
     cdef np.float64_t root_dx[3]
-    cdef int switch
-    cdef int full_count,tree_count
     cdef OctreeNode *last_node
 
     def __cinit__(self, np.ndarray[np.int64_t, ndim=1] top_grid_dims,
@@ -362,103 +344,21 @@
         self.dist = dist
         return d2 / dist
 
-    cdef np.float64_t fbe_potential_of_remote_nodes(self, OctreeNode *node1,
-            int sum):
-        # Given a childless node "node1", calculate the potential for it from
-        # all the other nodes using the treecode method.
-        cdef int i, j, k, this_sum
-        cdef np.float64_t potential
-        potential = 0.0
-        # We *do* want to walk over the root_node that node1 is in to look for
-        # level>0 nodes close to node1, but none of
-        # the previously-walked root_nodes.
-        self.switch = 0
-        this_sum = 0
-        for i in range(self.top_grid_dims[0]):
-            for j in range(self.top_grid_dims[1]):
-                for k in range(self.top_grid_dims[2]):
-                    this_sum += 1
-                    if this_sum < sum: continue
-                    if self.root_nodes[i][j][k].val[0] == 0: continue
-                    potential += self.fbe_iterate_remote_nodes(node1,
-                        self.root_nodes[i][j][k])
-        return potential
-
-    @cython.cdivision(True)
-    cdef np.float64_t fbe_iterate_remote_nodes(self, OctreeNode *node1,
-            OctreeNode *node2):
-
-        # node1 never changes.
-        # node2 is the iterated-upon remote node.
-        # self.switch - In order to prevent double counting, we only want
-        # to call this function between node1 and node2 where node2
-        # comes *after* node1 in the iteration order.
-        # switch=0: do not calculate any potential, but keep on iterating.
-        # switch=1: do potentials and keep on iterating if needed.
-        cdef int i, j, k, contained
-        cdef np.float64_t potential, dist, angle
-        potential = 0.0
-        # Do nothing with node2 when it is the same as node1. No potential
-        # calculation, and no digging deeper. But we flip the switch.
-        if OTN_same(node1, node2):
-            self.switch = 1
-            return 0.0
-        # Is node1 contained inside node2?
-        contained = OTN_contained(node1, node2)
-        # If we have a childless node2 we can skip everything below and
-        # calculate the potential, as long as node2 does not contain node1.
-        # Errr...  contained may not be needed here.
-        if node2.children[0][0][0] == NULL and not contained and \
-                not OTN_same(node1, node2) and self.switch:
-            self.full_count += 1
-            dist = self.fbe_node_separation(node1, node2)
-            return node1.val[0] * node2.val[0] / dist
-        # Now we apply the opening angle test. If the opening angle is small
-        # enough, we use this node for the potential and dig no deeper.
-        angle = self.fbe_opening_angle(node1, node2)
-        if angle < self.opening_angle and not contained and \
-                not OTN_same(node1, node2) and self.switch:
-            dist = self.fbe_node_separation(node1, node2)
-            self.tree_count += 1
-            return node1.val[0] * node2.val[0] / dist
-        # If we've gotten this far with a childless node, it means we've
-        # already accounted for it.
-        if node2.children[0][0][0] == NULL:
-            return 0.0
-        # If the above is not satisfied, we must dig deeper!
-        for i in range(2):
-            for j in range(2):
-                for k in range(2):
-                    if node2.children[i][j][k].val[0] == 0.0: continue
-                    potential += self.fbe_iterate_remote_nodes(node1,
-                        node2.children[i][j][k])
-        return potential
-
-    cdef np.float64_t fbe_iterate_children(self, OctreeNode *node, int sum):
-        # Recursively iterate over child nodes until we get a childless node.
-        cdef int i, j, k
-        cdef np.float64_t potential
-        potential = 0.0
-        # We have a childless node. Time to iterate over every other
-        # node using the treecode method.
-        if node.children[0][0][0] is NULL:
-            potential = self.fbe_potential_of_remote_nodes(node, sum)
-            return potential
-        # If the node has children, we need to walk all of them returning
-        # the potential for each.
-        for i in range(2):
-            for j in range(2):
-                for k in range(2):
-                    if node.children[i][j][k].val[0] == 0.0: continue
-                    potential += self.fbe_iterate_children(node.children[i][j][k],
-                        sum)
-        return potential
-
     cdef void set_next(self, OctreeNode *node, int treecode):
         # This sets up the linked list, pointing node.next to the next node
         # in the iteration order.
         cdef int i, j, k
         if treecode and node.val[0] is not 0.:
+            # In a treecode, we only make a new next link if this node has mass.
+            self.last_node.next = node
+            self.last_node = node
+        elif treecode and node.val[0] is 0.:
+            # No mass means it's children have no mass, no need to dig an
+            # further.
+            return
+        else:
+            # We're not doing the treecode, but we still want a linked list,
+            # we don't care about val[0] necessarily.
             self.last_node.next = node
             self.last_node = node
         if node.children[0][0][0] is NULL: return
@@ -541,18 +441,18 @@
             while pair_node is not NULL:
                 # If pair_node is massless, we can jump to up_next, because
                 # nothing pair_node contains will have mass either.
-                # I think that this should primarily happen for root_nodes.
+                # I think that this should primarily happen for root_nodes
+                # created for padding to make the region cubical.
                 if pair_node.val[0] is 0.0:
                     pair_node = pair_node.up_next
                     continue
-                # If pair_node is a childless node, we can calculate the
+                # If pair_node is a childless node, we can calculate the pot
                 # right now, and get a new pair_node.
                 if pair_node.children[0][0][0] == NULL:
                     dist = self.fbe_node_separation(this_node, pair_node)
                     potential += this_node.val[0] * pair_node.val[0] / dist
                     if truncate and potential > kinetic: break
                     pair_node = pair_node.next
-                    self.full_count += 1
                     continue
                 # Next, if the opening angle to pair_node is small enough,
                 # calculate the potential and get a new pair_node using
@@ -567,31 +467,16 @@
                     # We can skip all the nodes that are contained within 
                     # pair_node, saving time walking the linked list.
                     pair_node = pair_node.up_next
-                    self.tree_count += 1
                 # If we've gotten this far, pair_node has children, but it's
                 # too coarse, so we simply dig deeper using .next.
                 else:
                     pair_node = pair_node.next
             # We've exhausted the pair_nodes.
-            # Now we find a new this_node in the list.
+            # Now we find a new this_node in the list, and do more searches
+            # over pair_node.
             this_node = this_node.next
         return potential
 
-    def find_b_e(self, int truncate, float kinetic,
-        np.ndarray[np.float64_t, ndim=1] root_dx, float opening_angle = 1.0):
-        cdef int i, j, k, sum
-        cdef np.float64_t potential
-        potential = 0.0
-        self.tree_count = 0
-        self.full_count = 0
-        self.opening_angle = opening_angle
-        for i in range(3):
-            self.root_dx[i] = root_dx[i]
-        potential = self.fbe_main(potential, truncate, kinetic)
-        print 'full', self.full_count
-        print 'tree', self.tree_count
-        return potential
-
     @cython.boundscheck(False)
     @cython.wraparound(False)
     def find_binding_energy(self, int truncate, float kinetic,
@@ -601,42 +486,15 @@
         
         Note: The first entry of the vals array MUST be Mass.
         """
-        # Here are the order of events:
-        # 1. We loop over all of the root_nodes, below.
-        # 2. In fbe_iterate_children, each of these nodes is iterated until
-        #    we reach a node without any children.
-        # 3. Next, starting in fbe_potential_of_remote_nodes we again loop over
-        #    root_nodes, not double-counting, calling fbe_iterate_remote_nodes.
-        # 4. In fbe_iterate_remote_nodes, if we have a childless node, we
-        #    calculate the potential between the two nodes and return. Or, if
-        #    we have a node with a small enough opening angle, we return the
-        #    potential. If neither of these are done, we call
-        #    fbe_iterate_remote_nodes on the children nodes.
-        # 5. All of this returns a total, non-double-counted potential.
+        # The real work is done in fbe_main(), this just sets things up
+        # and returns the potential.
         cdef int i, j, k, sum
         cdef np.float64_t potential
         potential = 0.0
         self.opening_angle = opening_angle
-        self.tree_count = 0
-        self.full_count = 0
         for i in range(3):
             self.root_dx[i] = root_dx[i]
-        # The first part of the loop goes over all of the root level nodes.
-        sum = 0
-        for i in range(self.top_grid_dims[0]):
-            for j in range(self.top_grid_dims[1]):
-                for k in range(self.top_grid_dims[2]):
-                    if self.root_nodes[i][j][k].val[0] == 0.0: continue
-                    sum += 1
-                    potential += self.fbe_iterate_children(self.root_nodes[i][j][k],
-                        sum)
-                    if truncate and potential > kinetic: break
-                if truncate and potential > kinetic: break
-            if truncate and potential > kinetic:
-                print "Truncating!"
-                break
-        print 'full', self.full_count
-        print 'tree', self.tree_count
+        potential = self.fbe_main(potential, truncate, kinetic)
         return potential
 
     cdef int node_ID(self, OctreeNode *node):


http://bitbucket.org/yt_analysis/yt/changeset/fee901ff6b65/
changeset:   r3901:fee901ff6b65
branch:      yt
user:        sskory
date:        2011-03-24 18:08:54
summary:     The segfault may have been fixed.
affected #:  2 files (1.2 KB)

--- a/yt/utilities/_amr_utils/Octree.pyx	Mon Mar 21 15:15:53 2011 -0600
+++ b/yt/utilities/_amr_utils/Octree.pyx	Thu Mar 24 11:08:54 2011 -0600
@@ -419,6 +419,18 @@
                             self.root_nodes[ii][jj][kk]
                     sum += 1
 
+    cdef int fbe_test_next(self, OctreeNode *node):
+        # Return false 0 if none of node's children have mass, or this node
+        # has no children
+        # Return true 1 if at least one of the children have mass.
+        cdef int i, j, k
+        if node.children[0][0][0] is NULL: return 0
+        for i in range(2):
+            for j in range(2):
+                for k in range(2):
+                    if node.children[i][j][k].val[0] > 0.: return 1
+        return 0
+
     @cython.cdivision(True)
     cdef np.float64_t fbe_main(self, np.float64_t potential, int truncate,
             np.float64_t kinetic):
@@ -427,13 +439,19 @@
         cdef np.float64_t angle, dist
         cdef OctreeNode *this_node
         cdef OctreeNode *pair_node
+        cdef int pair_count
+        cdef int to_break
+        to_break = 0
         this_node = self.root_nodes[0][0][0]
         while this_node is not NULL:
-            # Iterate down to a childless node.
-            while this_node.children[0][0][0] is not NULL:
+            # Iterate down the list to a node that either has no children,
+            # or all of its children are massless. The second case is when data
+            # from a level that isn't the deepest has been added to the tree.
+            while self.fbe_test_next(this_node):
                 this_node = this_node.next
                 # In case we reach the end of the list...
                 if this_node is NULL: break
+            if this_node is NULL: break
             if truncate and potential > kinetic:
                 print 'Truncating...'
                 break
@@ -446,9 +464,11 @@
                 if pair_node.val[0] is 0.0:
                     pair_node = pair_node.up_next
                     continue
-                # If pair_node is a childless node, we can calculate the pot
+                # If pair_node is a childless node, or is a coarser node with
+                # no children, we can calculate the pot
                 # right now, and get a new pair_node.
-                if pair_node.children[0][0][0] == NULL:
+                #if pair_node.children[0][0][0] == NULL:
+                if not self.fbe_test_next(pair_node):
                     dist = self.fbe_node_separation(this_node, pair_node)
                     potential += this_node.val[0] * pair_node.val[0] / dist
                     if truncate and potential > kinetic: break
@@ -479,7 +499,7 @@
 
     @cython.boundscheck(False)
     @cython.wraparound(False)
-    def find_binding_energy(self, int truncate, float kinetic,
+    def find_binding_energy(self, int truncate, np.float64_t kinetic,
         np.ndarray[np.float64_t, ndim=1] root_dx, float opening_angle = 1.0):
         r"""Find the binding energy of an ensemble of data points using the
         treecode method.


--- a/yt/utilities/data_point_utilities.c	Mon Mar 21 15:15:53 2011 -0600
+++ b/yt/utilities/data_point_utilities.c	Thu Mar 24 11:08:54 2011 -0600
@@ -1567,7 +1567,6 @@
     float workDone = 0;
     int every_cells = floor(n_q / 100);
     int until_output = 1;
-    double temp1, temp2;
     for (q_outer = 0; q_outer < n_q - 1; q_outer++) {
         this_potential = 0;
         mass_o = *(npy_float64*) PyArray_GETPTR1(mass, q_outer);
@@ -1583,13 +1582,6 @@
                             sqrtl( (x_i-x_o)*(x_i-x_o)
                                  + (y_i-y_o)*(y_i-y_o)
                                  + (z_i-z_o)*(z_i-z_o) );
-            temp1 = sqrtl( (x_i-x_o)*(x_i-x_o)
-                                 + (y_i-y_o)*(y_i-y_o)
-                                 + (z_i-z_o)*(z_i-z_o) );
-            temp2 = mass_o * mass_i / 
-                            sqrtl( (x_i-x_o)*(x_i-x_o)
-                                 + (y_i-y_o)*(y_i-y_o)
-                                 + (z_i-z_o)*(z_i-z_o) );
         }
         total_potential += this_potential;
 	workDone += n_q - q_outer - 1;


http://bitbucket.org/yt_analysis/yt/changeset/ec42a6b43cfd/
changeset:   r3902:ec42a6b43cfd
branch:      yt
user:        sskory
date:        2011-03-26 16:36:44
summary:     Adding a small modification which should speed things up a skoch.
affected #:  2 files (251 bytes)

--- a/yt/data_objects/derived_quantities.py	Thu Mar 24 11:08:54 2011 -0600
+++ b/yt/data_objects/derived_quantities.py	Sat Mar 26 09:36:44 2011 -0600
@@ -393,9 +393,10 @@
             #print na.min(thisx), na.min(thisy), na.min(thisz)
             #print na.max(thisx), na.max(thisy), na.max(thisz)
             octree.add_array_to_tree(L, thisx, thisy, thisz, vals,
-               na.ones_like(thisx).astype('float64'))
+               na.ones_like(thisx).astype('float64'), treecode = 1)
         # Now we calculate the binding energy using a treecode.
         octree.finalize(treecode = 1)
+        mylog.info("Using a treecode to find gravitational energy for %d cells." % local_data['x'].size)
         pot = G*octree.find_binding_energy(truncate, kinetic/G, root_dx,
             opening_angle)
         #octree.print_all_nodes()


--- a/yt/utilities/_amr_utils/Octree.pyx	Thu Mar 24 11:08:54 2011 -0600
+++ b/yt/utilities/_amr_utils/Octree.pyx	Sat Mar 26 09:36:44 2011 -0600
@@ -51,17 +51,20 @@
     np.int64_t pos[3]
     int level
     int nvals
+    int max_level # The maximum level under this node with mass.
     OctreeNode *children[2][2][2]
     OctreeNode *parent
     OctreeNode *next
     OctreeNode *up_next
 
 cdef void OTN_add_value(OctreeNode *self,
-        np.float64_t *val, np.float64_t weight_val):
+        np.float64_t *val, np.float64_t weight_val, int level, int treecode):
     cdef int i
     for i in range(self.nvals):
         self.val[i] += val[i]
     self.weight_val += weight_val
+    if treecode and val[0] > 0.:
+        self.max_level = imax(self.max_level, level)
 
 cdef void OTN_refine(OctreeNode *self, int incremental = 0):
     cdef int i, j, k, i1, j1
@@ -95,6 +98,7 @@
     node.parent = parent
     node.next = NULL
     node.up_next = NULL
+    node.max_level = 0
     node.val = <np.float64_t *> malloc(
                 nvals * sizeof(np.float64_t))
     for i in range(nvals):
@@ -170,14 +174,14 @@
     cdef void add_to_position(self,
                  int level, np.int64_t pos[3],
                  np.float64_t *val,
-                 np.float64_t weight_val):
+                 np.float64_t weight_val, treecode):
         cdef int i, j, k, L
         cdef OctreeNode *node
         node = self.find_on_root_level(pos, level)
         cdef np.int64_t fac
         for L in range(level):
             if self.incremental:
-                OTN_add_value(node, val, weight_val)
+                OTN_add_value(node, val, weight_val, level, treecode)
             if node.children[0][0][0] == NULL:
                 OTN_refine(node, self.incremental)
             # Maybe we should use bitwise operators?
@@ -186,7 +190,7 @@
             j = (pos[1] >= fac*(2*node.pos[1]+1))
             k = (pos[2] >= fac*(2*node.pos[2]+1))
             node = node.children[i][j][k]
-        OTN_add_value(node, val, weight_val)
+        OTN_add_value(node, val, weight_val, level, treecode)
             
     cdef OctreeNode *find_on_root_level(self, np.int64_t pos[3], int level):
         # We need this because the root level won't just have four children
@@ -205,7 +209,8 @@
             np.ndarray[np.int64_t, ndim=1] pys,
             np.ndarray[np.int64_t, ndim=1] pzs,
             np.ndarray[np.float64_t, ndim=2] pvals,
-            np.ndarray[np.float64_t, ndim=1] pweight_vals):
+            np.ndarray[np.float64_t, ndim=1] pweight_vals,
+            int treecode = 0):
         cdef int np = pxs.shape[0]
         cdef int p
         cdef cnp.float64_t *vals
@@ -216,7 +221,7 @@
             pos[0] = pxs[p]
             pos[1] = pys[p]
             pos[2] = pzs[p]
-            self.add_to_position(level, pos, vals, pweight_vals[p])
+            self.add_to_position(level, pos, vals, pweight_vals[p], treecode)
 
     def add_grid_to_tree(self, int level,
                          np.ndarray[np.int64_t, ndim=1] start_index,
@@ -390,6 +395,7 @@
         # Set treecode = 1 if nodes with no mass are to be skipped in the
         # list.
         cdef int i, j, k, sum, top_grid_total, ii, jj, kk
+        cdef OctreeNode *this_node
         self.last_node = self.root_nodes[0][0][0]
         for i in range(self.top_grid_dims[0]):
             for j in range(self.top_grid_dims[1]):
@@ -419,18 +425,6 @@
                             self.root_nodes[ii][jj][kk]
                     sum += 1
 
-    cdef int fbe_test_next(self, OctreeNode *node):
-        # Return false 0 if none of node's children have mass, or this node
-        # has no children
-        # Return true 1 if at least one of the children have mass.
-        cdef int i, j, k
-        if node.children[0][0][0] is NULL: return 0
-        for i in range(2):
-            for j in range(2):
-                for k in range(2):
-                    if node.children[i][j][k].val[0] > 0.: return 1
-        return 0
-
     @cython.cdivision(True)
     cdef np.float64_t fbe_main(self, np.float64_t potential, int truncate,
             np.float64_t kinetic):
@@ -444,10 +438,11 @@
         to_break = 0
         this_node = self.root_nodes[0][0][0]
         while this_node is not NULL:
-            # Iterate down the list to a node that either has no children,
-            # or all of its children are massless. The second case is when data
+            # Iterate down the list to a node that either has no children and
+            # is at the max_level of the tree, or to a node where
+            # all of its children are massless. The second case is when data
             # from a level that isn't the deepest has been added to the tree.
-            while self.fbe_test_next(this_node):
+            while this_node.max_level is not this_node.level:
                 this_node = this_node.next
                 # In case we reach the end of the list...
                 if this_node is NULL: break
@@ -467,8 +462,7 @@
                 # If pair_node is a childless node, or is a coarser node with
                 # no children, we can calculate the pot
                 # right now, and get a new pair_node.
-                #if pair_node.children[0][0][0] == NULL:
-                if not self.fbe_test_next(pair_node):
+                if pair_node.max_level is pair_node.level:
                     dist = self.fbe_node_separation(this_node, pair_node)
                     potential += this_node.val[0] * pair_node.val[0] / dist
                     if truncate and potential > kinetic: break


http://bitbucket.org/yt_analysis/yt/changeset/f1693d900bc1/
changeset:   r3903:f1693d900bc1
branch:      yt
user:        sskory
date:        2011-03-28 21:44:14
summary:     Finishing touches to the treecode pre-merge.
affected #:  2 files (993 bytes)

--- a/yt/data_objects/derived_quantities.py	Sat Mar 26 09:36:44 2011 -0600
+++ b/yt/data_objects/derived_quantities.py	Mon Mar 28 13:44:14 2011 -0600
@@ -274,15 +274,35 @@
              combine_function=_combBaryonSpinParameter, n_ret=4)
     
 def _IsBound(data, truncate = True, include_thermal_energy = False,
-    treecode = False, opening_angle = 1.0):
-    """
-    This returns whether or not the object is gravitationally bound
+    treecode = True, opening_angle = 1.0, periodic_test = False):
+    r"""
+    This returns whether or not the object is gravitationally bound. If this
+    returns a value greater than one, it is bound, and otherwise not.
     
-    :param truncate: Should the calculation stop once the ratio of
-                     gravitational:kinetic is 1.0?
-    :param include_thermal_energy: Should we add the energy from ThermalEnergy
-                                   on to the kinetic energy to calculate 
-                                   binding energy?
+    Parameters
+    ----------
+    truncate : Bool
+        Should the calculation stop once the ratio of
+        gravitational:kinetic is 1.0?
+    include_thermal_energy : Bool
+        Should we add the energy from ThermalEnergy
+        on to the kinetic energy to calculate 
+        binding energy?
+    treecode : Bool
+        Whether or not to use the treecode.
+    opening_angle : Float 
+        The maximal angle a remote node may subtend in order
+        for the treecode method of mass conglomeration may be
+        used to calculate the potential between masses.
+    periodic_test : Bool 
+        Used for testing the periodic adjustment machinery
+        of this derived quantity. 
+
+    Examples
+    --------
+    >>> sp.quantities["IsBound"](truncate=False,
+    ... include_thermal_energy=True, treecode=False, opening_angle=2.0)
+    0.32493
     """
     # Kinetic energy
     bv_x,bv_y,bv_z = data.quantities["BulkVelocity"]()
@@ -296,6 +316,8 @@
     if (include_thermal_energy):
         thermal = (data["ThermalEnergy"] * data["CellMass"]).sum()
         kinetic += thermal
+    if periodic_test:
+        kinetic = na.ones_like(kinetic)
     # Gravitational potential energy
     # We only divide once here because we have velocity in cgs, but radius is
     # in code.
@@ -310,6 +332,7 @@
         # cells, I think it's reasonable to assume that the clump wraps around.
         diff = sorted[1:] - sorted[0:-1]
         if (diff >= two_root[i]).any():
+            mylog.info("Adjusting clump for periodic boundaries in dim %s" % dim)
             # We will record the distance of the larger of the two values that
             # define the gap from the right boundary, which we'll use for the
             # periodic adjustment later.
@@ -319,8 +342,8 @@
             # wrap-around.
             periodic[i] = data.pf.domain_right_edge[i] - sorted[index + 1] + \
                 two_root[i] / 2.
-    # This dict won't make a copy of the data, but we will make a copy to 
-    # change if needed in the periodic section.
+    # This dict won't make a copy of the data, but it will make a copy to 
+    # change if needed in the periodic section immediately below.
     local_data = {}
     for label in ["x", "y", "z", "CellMass"]:
         local_data[label] = data[label]
@@ -332,6 +355,8 @@
             local_data[dim] = data[dim].copy()
             local_data[dim] += periodic[i]
             local_data[dim] %= domain_period[i]
+    if periodic_test:
+        local_data["CellMass"] = na.ones_like(local_data["CellMass"])
     import time
     t1 = time.time()
     if treecode:
@@ -380,8 +405,6 @@
             octree.add_array_to_tree(L, i, j, k,
                 na.array([na.zeros_like(i)], order='F', dtype='float64'),
                 na.zeros_like(i).astype('float64'))
-            #print L, ActiveDimensions
-            #print i[0], i[-1], j[0], j[-1], k[0],k[-1]
         # Now we add actual data to the octree.
         for L, dx, dy, dz in zip(levels, dxes, dyes, dzes):
             mylog.info("Adding data to Octree for level %d" % L)
@@ -390,8 +413,6 @@
             thisy = (local_data["y"][sel] / dy).astype('int64') - cover_imin[1] * 2**L
             thisz = (local_data["z"][sel] / dz).astype('int64') - cover_imin[2] * 2**L
             vals = na.array([local_data["CellMass"][sel]], order='F')
-            #print na.min(thisx), na.min(thisy), na.min(thisz)
-            #print na.max(thisx), na.max(thisy), na.max(thisz)
             octree.add_array_to_tree(L, thisx, thisy, thisz, vals,
                na.ones_like(thisx).astype('float64'), treecode = 1)
         # Now we calculate the binding energy using a treecode.


--- a/yt/utilities/_amr_utils/Octree.pyx	Sat Mar 26 09:36:44 2011 -0600
+++ b/yt/utilities/_amr_utils/Octree.pyx	Mon Mar 28 13:44:14 2011 -0600
@@ -498,7 +498,8 @@
         r"""Find the binding energy of an ensemble of data points using the
         treecode method.
         
-        Note: The first entry of the vals array MUST be Mass.
+        Note: The first entry of the vals array MUST be Mass. Any other
+        values will be ignored, including the weight array.
         """
         # The real work is done in fbe_main(), this just sets things up
         # and returns the potential.
@@ -572,6 +573,18 @@
         return
 
     def print_all_nodes(self):
+        r"""
+        Prints out information about all the nodes in the octree.
+        
+        Parameters
+        ----------
+        None.
+        
+        Examples
+        --------
+        >>> octree.print_all_nodes()
+        (many lines of data)
+        """
         cdef int i, j, k
         sys.stdout.flush()
         sys.stderr.flush()


http://bitbucket.org/yt_analysis/yt/changeset/9c6df26b5072/
changeset:   r3904:9c6df26b5072
branch:      yt
user:        sskory
date:        2011-03-28 21:44:34
summary:     Merging.
affected #:  4 files (0 bytes)

--- a/.bzrignore	Mon Mar 28 13:44:14 2011 -0600
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,4 +0,0 @@
-__config__.py
-build
-hdf5.cfg
-setuptools-0.6c9-py2.5.egg


--- a/CREDITS	Mon Mar 28 13:44:14 2011 -0600
+++ b/CREDITS	Mon Mar 28 13:44:34 2011 -0600
@@ -12,6 +12,9 @@
                                 Oliver Hahn (ohahn at stanford.edu)
                                 John ZuHone (jzuhone at cfa.harvard.edu)
                                 Chris Malone (cmalone at mail.astro.sunysb.edu)
+                                Cameron Hummels (chummels at astro.columbia.edu)
+                                Stefan Klemer (sklemer at phys.uni-goettingen.de)
+                                Andrew Myers (atmyers at astro.berkeley.edu)
 
 We also include the Delaunay Triangulation module written by Robert Kern of
 Enthought, the cmdln.py module by Trent Mick, and the progressbar module by


--- a/doc/install_script.sh	Mon Mar 28 13:44:14 2011 -0600
+++ b/doc/install_script.sh	Mon Mar 28 13:44:34 2011 -0600
@@ -50,6 +50,10 @@
 MPL_SUPP_CFLAGS=""
 MPL_SUPP_CXXFLAGS=""
 
+# If you want to spawn multiple Make jobs, here's the place to set the
+# arguments.  For instance, "-j4"
+MAKE_PROCS=""
+
 #------------------------------------------------------------------------------#
 #                                                                              #
 # Okay, the script starts here.  Feel free to play with it, but hopefully      #
@@ -239,12 +243,12 @@
     cd $1
     if [ ! -z `echo $1 | grep h5py` ]
     then
-	    echo "${DEST_DIR}/bin/python2.6 setup.py configure --hdf5=${HDF5_DIR}"
-	    ( ${DEST_DIR}/bin/python2.6 setup.py configure --hdf5=${HDF5_DIR} 2>&1 ) 1>> ${LOG_FILE} || do_exit
+	    echo "${DEST_DIR}/bin/python2.7 setup.py configure --hdf5=${HDF5_DIR}"
+	    ( ${DEST_DIR}/bin/python2.7 setup.py configure --hdf5=${HDF5_DIR} 2>&1 ) 1>> ${LOG_FILE} || do_exit
     fi
     shift
-    ( ${DEST_DIR}/bin/python2.6 setup.py build   $* 2>&1 ) 1>> ${LOG_FILE} || do_exit
-    ( ${DEST_DIR}/bin/python2.6 setup.py install    2>&1 ) 1>> ${LOG_FILE} || do_exit
+    ( ${DEST_DIR}/bin/python2.7 setup.py build   $* 2>&1 ) 1>> ${LOG_FILE} || do_exit
+    ( ${DEST_DIR}/bin/python2.7 setup.py install    2>&1 ) 1>> ${LOG_FILE} || do_exit
     touch done
     cd ..
 }
@@ -274,7 +278,7 @@
 if [ -z "$HDF5_DIR" ]
 then
     echo "Downloading HDF5"
-    get_enzotools hdf5-1.6.9.tar.gz
+    get_enzotools hdf5-1.8.6.tar.gz
 fi
 
 [ $INST_ZLIB -eq 1 ] && get_enzotools zlib-1.2.3.tar.bz2 
@@ -282,15 +286,14 @@
 [ $INST_PNG -eq 1 ] && get_enzotools libpng-1.2.43.tar.gz
 [ $INST_FTYPE -eq 1 ] && get_enzotools freetype-2.4.4.tar.gz
 [ $INST_SQLITE3 -eq 1 ] && get_enzotools sqlite-autoconf-3070500.tar.gz
-get_enzotools Python-2.6.3.tgz
+get_enzotools Python-2.7.1.tgz
 get_enzotools numpy-1.5.1.tar.gz
 get_enzotools matplotlib-1.0.0.tar.gz
-get_enzotools mercurial-1.7.3.tar.gz
+get_enzotools mercurial-1.8.1.tar.gz
 get_enzotools ipython-0.10.tar.gz
 get_enzotools h5py-1.2.0.tar.gz
 get_enzotools Cython-0.14.tar.gz
 get_enzotools Forthon-0.8.4.tar.gz
-get_enzotools yt.hg
 
 if [ $INST_BZLIB -eq 1 ]
 then
@@ -335,7 +338,7 @@
         [ ! -e libpng-1.2.43 ] && tar xfz libpng-1.2.43.tar.gz
         echo "Installing PNG"
         cd libpng-1.2.43
-        ( ./configure CFLAGS=-I${DEST_DIR}/include --prefix=${DEST_DIR}/ 2>&1 ) 1>> ${LOG_FILE} || do_exit
+        ( ./configure CPPFLAGS=-I${DEST_DIR}/include CFLAGS=-I${DEST_DIR}/include --prefix=${DEST_DIR}/ 2>&1 ) 1>> ${LOG_FILE} || do_exit
         ( make install 2>&1 ) 1>> ${LOG_FILE} || do_exit
         touch done
         cd ..
@@ -364,13 +367,13 @@
 
 if [ -z "$HDF5_DIR" ]
 then
-    if [ ! -e hdf5-1.6.9/done ]
+    if [ ! -e hdf5-1.8.6/done ]
     then
-        [ ! -e hdf5-1.6.9 ] && tar xfz hdf5-1.6.9.tar.gz
+        [ ! -e hdf5-1.8.6 ] && tar xfz hdf5-1.8.6.tar.gz
         echo "Installing HDF5"
-        cd hdf5-1.6.9
+        cd hdf5-1.8.6
         ( ./configure --prefix=${DEST_DIR}/ --enable-shared 2>&1 ) 1>> ${LOG_FILE} || do_exit
-        ( make install 2>&1 ) 1>> ${LOG_FILE} || do_exit
+        ( make ${MAKE_PROCS} install 2>&1 ) 1>> ${LOG_FILE} || do_exit
         touch done
         cd ..
     fi
@@ -388,31 +391,32 @@
         echo "Installing SQLite3"
         cd sqlite-autoconf-3070500
         ( ./configure --prefix=${DEST_DIR}/ 2>&1 ) 1>> ${LOG_FILE} || do_exit
-        ( make install 2>&1 ) 1>> ${LOG_FILE} || do_exit
+        ( make ${MAKE_PROCS} install 2>&1 ) 1>> ${LOG_FILE} || do_exit
         touch done
         cd ..
     fi
 fi
 
-if [ ! -e Python-2.6.3/done ]
+if [ ! -e Python-2.7.1/done ]
 then
     echo "Installing Python.  This may take a while, but don't worry.  YT loves you."
-    [ ! -e Python-2.6.3 ] && tar xfz Python-2.6.3.tgz
-    cd Python-2.6.3
+    [ ! -e Python-2.7.1 ] && tar xfz Python-2.7.1.tgz
+    cd Python-2.7.1
     ( ./configure --prefix=${DEST_DIR}/ 2>&1 ) 1>> ${LOG_FILE} || do_exit
 
-    ( make 2>&1 ) 1>> ${LOG_FILE} || do_exit
+    ( make ${MAKE_PROCS} 2>&1 ) 1>> ${LOG_FILE} || do_exit
     ( make install 2>&1 ) 1>> ${LOG_FILE} || do_exit
+    ( ln -sf ${DEST_DIR}/bin/python2.7 ${DEST_DIR}/bin/pyyt 2>&1 ) 1>> ${LOG_FILE}
     touch done
     cd ..
 fi
 
-export PYTHONPATH=${DEST_DIR}/lib/python2.6/site-packages/
+export PYTHONPATH=${DEST_DIR}/lib/python2.7/site-packages/
 
 if [ $INST_HG -eq 1 ]
 then
     echo "Installing Mercurial."
-    do_setup_py mercurial-1.7.3
+    do_setup_py mercurial-1.8.1
     export HG_EXEC=${DEST_DIR}/bin/hg
 else
     # We assume that hg can be found in the path.
@@ -436,16 +440,11 @@
     elif [ ! -e yt-hg ] 
     then
         YT_DIR="$PWD/yt-hg/"
+        ( ${HG_EXEC} --debug clone http://hg.enzotools.org/yt-supplemental/ 2>&1 ) 1>> ${LOG_FILE}
         # Recently the hg server has had some issues with timeouts.  In lieu of
         # a new webserver, we are now moving to a three-stage process.
         # First we clone the repo, but only up to r0.
-        ( ${HG_EXEC} --debug clone -r0 http://hg.enzotools.org/yt/ ./yt-hg 2>&1 ) 1>> ${LOG_FILE}
-        # Now we unbundle our previously downloaded bundle of changesets.
-        # This bundle has been created to include most of the recent
-        # changesets, which should avoid any problematic timeouts.
-        ( ${HG_EXEC} -R ${YT_DIR} unbundle yt.hg 2>&1 ) 1>> ${LOG_FILE}
-        # Now we pull new changes
-        ( ${HG_EXEC} -R ${YT_DIR} pull 2>&1 ) 1>> ${LOG_FILE}
+        ( ${HG_EXEC} --debug clone http://hg.enzotools.org/yt/ ./yt-hg 2>&1 ) 1>> ${LOG_FILE}
         # Now we update to the branch we're interested in.
         ( ${HG_EXEC} -R ${YT_DIR} up -C ${BRANCH} 2>&1 ) 1>> ${LOG_FILE}
     elif [ -e yt-hg ] 
@@ -459,10 +458,10 @@
 unset LDFLAGS 
 
 echo "Installing distribute"
-( ${DEST_DIR}/bin/python2.6 ${YT_DIR}/distribute_setup.py 2>&1 ) 1>> ${LOG_FILE} || do_exit
+( ${DEST_DIR}/bin/python2.7 ${YT_DIR}/distribute_setup.py 2>&1 ) 1>> ${LOG_FILE} || do_exit
 
 echo "Installing pip"
-( ${DEST_DIR}/bin/easy_install-2.6 pip 2>&1 ) 1>> ${LOG_FILE} || do_exit
+( ${DEST_DIR}/bin/easy_install-2.7 pip 2>&1 ) 1>> ${LOG_FILE} || do_exit
 
 do_setup_py numpy-1.5.1 ${NUMPY_ARGS}
 
@@ -511,7 +510,7 @@
 [ $INST_PNG -eq 1 ] && echo $PNG_DIR > png.cfg
 [ $INST_FTYPE -eq 1 ] && echo $FTYPE_DIR > freetype.cfg
 [ $INST_FORTHON -eq 1 ] && ( ( cd yt/utilities/kdtree && FORTHON_EXE=${DEST_DIR}/bin/Forthon make 2>&1 ) 1>> ${LOG_FILE} )
-( ${DEST_DIR}/bin/python2.6 setup.py develop 2>&1 ) 1>> ${LOG_FILE} || do_exit
+( ${DEST_DIR}/bin/python2.7 setup.py develop 2>&1 ) 1>> ${LOG_FILE} || do_exit
 touch done
 cd $MY_PWD
 
@@ -523,6 +522,20 @@
     cd $MY_PWD
 fi
 
+if [ -e $HOME/.matplotlib/fontList.cache ] && \
+   ( grep -q python2.6 $HOME/.matplotlib/fontList.cache )
+then
+    echo "WARNING WARNING WARNING WARNING WARNING WARNING WARNING"
+    echo "*******************************************************"
+    echo
+    echo "  You likely need to remove your old fontList.cache!"
+    echo "  You can do this with this command:"
+    echo ""
+    echo "  rm $HOME/.matplotlib/fontList.cache"
+    echo
+    echo "*******************************************************"
+fi
+
 function print_afterword
 {
     echo
@@ -535,7 +548,7 @@
     echo
     echo "YT_DEST         => $DEST_DIR"
     echo "PATH            => $DEST_DIR/bin/"
-    echo "PYTHONPATH      => $DEST_DIR/lib/python2.6/site-packages/"
+    echo "PYTHONPATH      => $DEST_DIR/lib/python2.7/site-packages/"
     echo "LD_LIBRARY_PATH => $DEST_DIR/lib/"
     echo
     echo "For interactive data analysis and visualization, we recommend running"
@@ -547,6 +560,10 @@
     echo
     echo "$DEST_DIR/bin/yt"
     echo
+    echo "To bootstrap a development environment for yt, run:"
+    echo 
+    echo "$DEST_DIR/bin/yt bootstrap_dev"
+    echo
     echo "Note of interest: this installation will use the directory:"
     echo "    $YT_DIR"
     echo "as the source for all the YT code.  This means you probably shouldn't"
@@ -571,13 +588,9 @@
       echo
     fi
     echo
-    echo "For support, see one of the following websites:"
+    echo "For support, see the website and join the mailing list:"
     echo
-    echo "    http://yt.enzotools.org/wiki/"
-    echo "    http://yt.enzotools.org/doc/"
-    echo
-    echo "Please also join the mailing list:"
-    echo 
+    echo "    http://yt.enzotools.org/"
     echo "    http://lists.spacepope.org/listinfo.cgi/yt-users-spacepope.org"
     echo
     echo "========================================================================"


--- a/yt/analysis_modules/halo_profiler/multi_halo_profiler.py	Mon Mar 28 13:44:14 2011 -0600
+++ b/yt/analysis_modules/halo_profiler/multi_halo_profiler.py	Mon Mar 28 13:44:34 2011 -0600
@@ -46,8 +46,7 @@
     parallel_root_only
 from yt.visualization.fixed_resolution import \
     FixedResolutionBuffer
-from yt.visualization.plot_collection import \
-    PlotCollection
+from yt.visualization.image_writer import write_image
 
 PROFILE_RADIUS_THRESHOLD = 2
 
@@ -259,10 +258,11 @@
 
         self.profile_fields.append({'field':field, 'weight_field':weight_field, 'accumulation':accumulation})
 
-    def add_projection(self, field, weight_field=None):
+    def add_projection(self, field, weight_field=None, cmap='algae'):
         "Add a field for projection."
 
-        self.projection_fields.append({'field':field, 'weight_field':weight_field})
+        self.projection_fields.append({'field':field, 'weight_field':weight_field, 
+                                       'cmap': cmap})
 
     @parallel_blocking_call
     def make_profiles(self, filename=None, prefilters=None, **kwargs):
@@ -466,14 +466,13 @@
             return
 
         # Set resolution for fixed resolution output.
-        if save_cube:
-            if self.project_at_level == 'max':
-                proj_level = self.pf.h.max_level
-            else:
-                proj_level = int(self.project_at_level)
-            proj_dx = self.pf.units[self.projection_width_units] / self.pf.parameters['TopGridDimensions'][0] / \
-                (self.pf.parameters['RefineBy']**proj_level)
-            projectionResolution = int(self.projection_width / proj_dx)
+        if self.project_at_level == 'max':
+            proj_level = self.pf.h.max_level
+        else:
+            proj_level = int(self.project_at_level)
+        proj_dx = self.pf.units[self.projection_width_units] / self.pf.parameters['TopGridDimensions'][0] / \
+            (self.pf.parameters['RefineBy']**proj_level)
+        projectionResolution = int(self.projection_width / proj_dx)
 
         # Create output directory.
         if self.output_dir is not None:
@@ -515,8 +514,7 @@
             # Make projections.
             if not isinstance(axes, types.ListType): axes = list([axes])
             for w in axes:
-                # Create a plot collection.
-                pc = PlotCollection(self.pf, center=center)
+                projections = []
                 # YT projections do not follow the right-hand rule.
                 coords = range(3)
                 del coords[w]
@@ -524,12 +522,15 @@
                 y_axis = coords[1]
 
                 for hp in self.projection_fields:
-                    pc.add_projection(hp['field'], w, weight_field=hp['weight_field'], data_source=region)
+                    projections.append(self.pf.h.proj(w, hp['field'], 
+                                                      weight_field=hp['weight_field'], 
+                                                      data_source=region, center=halo['center'],
+                                                      serialize=False))
                 
                 # Set x and y limits, shift image if it overlaps domain boundary.
                 if need_per:
                     pw = self.projection_width/self.pf.units[self.projection_width_units]
-                    shift_projections(self.pf, pc, halo['center'], center, w)
+                    #shift_projections(self.pf, projections, halo['center'], center, w)
                     # Projection has now been shifted to center of box.
                     proj_left = [center[x_axis]-0.5*pw, center[y_axis]-0.5*pw]
                     proj_right = [center[x_axis]+0.5*pw, center[y_axis]+0.5*pw]
@@ -537,30 +538,33 @@
                     proj_left = [leftEdge[x_axis], leftEdge[y_axis]]
                     proj_right = [rightEdge[x_axis], rightEdge[y_axis]]
 
-                pc.set_xlim(proj_left[0], proj_right[0])
-                pc.set_ylim(proj_left[1], proj_right[1])
+                # Save projection data to hdf5 file.
+                if save_cube or save_images:
+                    axis_labels = ['x', 'y', 'z']
 
-                # Save projection data to hdf5 file.
-                if save_cube:
-                    axis_labels = ['x', 'y', 'z']
-                    dataFilename = "%s/Halo_%04d_%s_data.h5" % \
+                    if save_cube:
+                        dataFilename = "%s/Halo_%04d_%s_data.h5" % \
                             (my_output_dir, halo['id'], axis_labels[w])
-                    mylog.info("Saving projection data to %s." % dataFilename)
+                        mylog.info("Saving projection data to %s." % dataFilename)
+                        output = h5py.File(dataFilename, "a")
 
-                    output = h5py.File(dataFilename, "a")
                     # Create fixed resolution buffer for each projection and write them out.
                     for e, hp in enumerate(self.projection_fields):
-                        frb = FixedResolutionBuffer(pc.plots[e].data, (proj_left[0], proj_right[0], 
-                                                                       proj_left[1], proj_right[1]),
-                                                          (projectionResolution, projectionResolution),
-                                                          antialias=False)
+                        frb = FixedResolutionBuffer(projections[e], (proj_left[0], proj_right[0], 
+                                                                     proj_left[1], proj_right[1]),
+                                                    (projectionResolution, projectionResolution),
+                                                    antialias=False)
                         dataset_name = "%s_%s" % (hp['field'], hp['weight_field'])
-                        if dataset_name in output.listnames(): del output[dataset_name]
-                        output.create_dataset(dataset_name, data=frb[hp['field']])
-                    output.close()
+                        if save_cube:
+                            if dataset_name in output.listnames(): del output[dataset_name]
+                            output.create_dataset(dataset_name, data=frb[hp['field']])
 
-                if save_images:
-                    pc.save("%s/Halo_%04d" % (my_output_dir, halo['id']), force_save=True)
+                        if save_images:
+                            filename = "%s/Halo_%04d_%s_%s.png" % (my_output_dir, halo['id'], 
+                                                                   dataset_name, axis_labels[w])
+                            write_image(na.log10(frb[hp['field']]), filename, cmap_name=hp['cmap'])
+
+                    if save_cube: output.close()
 
             del region
 
@@ -789,7 +793,7 @@
         else:
             os.mkdir(my_output_dir)
 
-def shift_projections(pf, pc, oldCenter, newCenter, axis):
+def shift_projections(pf, projections, oldCenter, newCenter, axis):
     """
     Shift projection data around.
     This is necessary when projecting a preiodic region.
@@ -801,10 +805,10 @@
     del offset[axis]
     del width[axis]
 
-    for plot in pc.plots:
+    for plot in projections:
         # Get name of data field.
         other_fields = {'px':True, 'py':True, 'pdx':True, 'pdy':True, 'weight_field':True}
-        for pfield in plot.data.data.keys():
+        for pfield in plot.data.keys():
             if not(other_fields.has_key(pfield)):
                 field = pfield
                 break


--- a/yt/analysis_modules/light_cone/light_cone.py	Mon Mar 28 13:44:14 2011 -0600
+++ b/yt/analysis_modules/light_cone/light_cone.py	Mon Mar 28 13:44:34 2011 -0600
@@ -34,20 +34,19 @@
 from yt.config import ytcfg
 from yt.convenience import load
 from yt.utilities.cosmology import Cosmology
-from yt.visualization.plot_collection import \
-    PlotCollection
+from yt.visualization.image_writer import write_image
 
 from .common_n_volume import commonNVolume
 from .halo_mask import light_cone_halo_map, \
     light_cone_halo_mask
-from .light_cone_projection import LightConeProjection
+from .light_cone_projection import _light_cone_projection
 
 class LightCone(EnzoSimulation):
     def __init__(self, EnzoParameterFile, initial_redshift=1.0, 
                  final_redshift=0.0, observer_redshift=0.0,
                  field_of_view_in_arcminutes=600.0, image_resolution_in_arcseconds=60.0, 
                  use_minimum_datasets=True, deltaz_min=0.0, minimum_coherent_box_fraction=0.0,
-                 output_dir='LC', output_prefix='LightCone', **kwargs):
+                 set_parameters=None, output_dir='LC', output_prefix='LightCone', **kwargs):
         """
         Initialize a LightCone object.
         :param initial_redshift (float): the initial (highest) redshift for the light cone.  Default: 1.0.
@@ -67,6 +66,7 @@
                the projection axis and center.  This was invented to allow light cones with thin slices to 
                sample coherent large scale structure, but in practice does not work so well.  Try setting 
                this parameter to 1 and see what happens.  Default: 0.0.
+        :param set_parameters (dict): dictionary of parameters to attach to pf.parameters.  Default: None.
         :param output_dir (str): the directory in which images and data files will be written.  Default: 'LC'.
         :param output_prefix (str): the prefix of all images and data files.  Default: 'LightCone'.
         """
@@ -79,6 +79,7 @@
         self.use_minimum_datasets = use_minimum_datasets
         self.deltaz_min = deltaz_min
         self.minimum_coherent_box_fraction = minimum_coherent_box_fraction
+        self.set_parameters = set_parameters
         self.output_dir = output_dir
         self.output_prefix = output_prefix
 
@@ -233,8 +234,8 @@
             del halo_mask_cube
 
     def project_light_cone(self, field, weight_field=None, apply_halo_mask=False, node=None,
-                           save_stack=True, save_slice_images=False, flatten_stack=False, photon_field=False,
-                           add_redshift_label=False, **kwargs):
+                           save_stack=True, save_slice_images=False, cmap_name='algae', 
+                           flatten_stack=False, photon_field=False):
         """
         Create projections for light cone, then add them together.
         :param weight_field (str): the weight field of the projection.  This has the same meaning as in standard 
@@ -246,6 +247,7 @@
         :param save_stack (bool): if True, the unflatted light cone data including each individual slice is written to 
                an hdf5 file.  Default: True.
         :param save_slice_images (bool): save images for each individual projection slice.  Default: False.
+        :param cmap_name (str): color map for images.  Default: 'algae'.
         :param flatten_stack (bool): if True, the light cone stack is continually flattened each time a slice is added 
                in order to save memory.  This is generally not necessary.  Default: False.
         :param photon_field (bool): if True, the projection data for each slice is decremented by 4 Pi R^2`, where R 
@@ -269,10 +271,13 @@
                 name = "%s%s_%s_%04d_%04d" % (self.output_dir, self.output_prefix,
                                               node, q, len(self.light_cone_solution))
             output['object'] = load(output['filename'])
-            frb = LightConeProjection(output, field, self.pixels, weight_field=weight_field,
-                                      save_image=save_slice_images,
-                                      name=name, node=node, **kwargs)
+            output['object'].parameters.update(self.set_parameters)
+            frb = _light_cone_projection(output, field, self.pixels, 
+                                         weight_field=weight_field, node=node)
             if ytcfg.getint("yt", "__parallel_rank") == 0:
+                if save_slice_images:
+                    write_image(na.log10(frb[field]), "%s_%s.png" % (name, field), cmap_name=cmap_name)
+
                 if photon_field:
                     # Decrement the flux by the luminosity distance. Assume field in frb is in erg/s/cm^2/Hz
                     co = Cosmology(HubbleConstantNow = (100.0 * self.enzoParameters['CosmologyHubbleConstantNow']),
@@ -286,7 +291,6 @@
                     mylog.info("Distance to slice = %e" % dL)
                     frb[field] *= factor #in erg/s/cm^2/Hz on observer's image plane.
 
-            if ytcfg.getint("yt", "__parallel_rank") == 0:
                 if weight_field is not None:
                     # Data come back normalized by the weight field.
                     # Undo that so it can be added up for the light cone.
@@ -305,9 +309,6 @@
                     if weight_field is not None:
                         self.projection_weight_field_stack = [sum(self.projection_weight_field_stack)]
 
-            # Delete the plot collection now that the frb is deleted.
-            del output['pc']
-
             # Unless this is the last slice, delete the dataset object.
             # The last one will be saved to make the plot collection.
             if (q < len(self.light_cone_solution) - 1):
@@ -329,6 +330,10 @@
             # but replace the data with the full light cone projection data.
             frb.data[field] = lightConeProjection
 
+            # Write image.
+            if save_slice_images:
+                write_image(na.log10(frb[field]), "%s_%s.png" % (filename, field), cmap_name=cmap_name)
+
             # Write stack to hdf5 file.
             if save_stack:
                 self._save_light_cone_stack(field=field, weight_field=weight_field, filename=filename)
@@ -341,17 +346,6 @@
                 else:
                     mylog.error("No halo mask loaded, call get_halo_mask.")
 
-            # Make a plot collection for the light cone projection.
-            center = [0.5 * (self.light_cone_solution[-1]['object'].parameters['DomainLeftEdge'][w] + 
-                             self.light_cone_solution[-1]['object'].parameters['DomainRightEdge'][w])
-                      for w in range(self.light_cone_solution[-1]['object'].parameters['TopGridRank'])]
-            pc = PlotCollection(self.light_cone_solution[-1]['object'], center=center)
-            pc.add_fixed_resolution_plot(frb, field, **kwargs)
-            pc.save(filename)
-
-            # Return the plot collection so the user can remake the plot if they want.
-            return pc
-
     def rerandomize_light_cone_solution(self, newSeed, recycle=True, filename=None):
         """
         When making a projection for a light cone, only randomizations along the line of sight make any 


--- a/yt/analysis_modules/light_cone/light_cone_projection.py	Mon Mar 28 13:44:14 2011 -0600
+++ b/yt/analysis_modules/light_cone/light_cone_projection.py	Mon Mar 28 13:44:34 2011 -0600
@@ -30,15 +30,12 @@
 from yt.config import ytcfg
 from yt.visualization.fixed_resolution import \
     FixedResolutionBuffer
-from yt.visualization.plot_collection import \
-    PlotCollection
 from yt.utilities.parallel_tools.parallel_analysis_interface import \
     parallel_blocking_call
 
 @parallel_blocking_call
-def LightConeProjection(lightConeSlice, field, pixels, weight_field=None, 
-                        save_image=False, name="", node=None, field_cuts=None,
-                        add_redshift_label=False, **kwargs):
+def _light_cone_projection(lightConeSlice, field, pixels, weight_field=None, 
+                           save_image=False, node=None, field_cuts=None):
     "Create a single projection to be added into the light cone stack."
 
     # Use some projection parameters to seed random number generator to make unique node name.
@@ -57,13 +54,9 @@
 
     mylog.info("Making projection at z = %f from %s." % (lightConeSlice['redshift'], lightConeSlice['filename']))
 
-    region_center = [0.5 * (lightConeSlice['object'].parameters['DomainRightEdge'][q] +
-                            lightConeSlice['object'].parameters['DomainLeftEdge'][q]) \
-                         for q in range(lightConeSlice['object'].parameters['TopGridRank'])]
-
-    # Make the plot collection and put it in the slice so we can delete it cleanly in the same scope 
-    # as where the frb will be deleted.
-    lightConeSlice['pc'] = PlotCollection(lightConeSlice['object'], center=region_center)
+    region_center = [0.5 * (lightConeSlice['object'].domain_right_edge[q] +
+                            lightConeSlice['object'].domain_left_edge[q]) \
+                         for q in range(lightConeSlice['object'].dimensionality)]
 
     # 1. The Depth Problem
     # Use coordinate field cut in line of sight to cut projection to proper depth.
@@ -90,11 +83,9 @@
         these_field_cuts.append(cut_mask)
 
     # Make projection.
-    lightConeSlice['pc'].add_projection(field, lightConeSlice['ProjectionAxis'], 
-                                        weight_field=weight_field, 
-                                        field_parameters=dict(field_cuts=these_field_cuts, 
-                                                              node_name=node_name),
-                                        **kwargs)
+    proj = lightConeSlice['object'].h.proj(lightConeSlice['ProjectionAxis'], field, 
+                                           weight_field, center=region_center, 
+                                           field_cuts=these_field_cuts, node_name=node_name)
 
     # If parallel: all the processes have the whole projection object, but we only need to do the tiling, shifting, and cutting once.
     if ytcfg.getint("yt", "__parallel_rank") == 0:
@@ -103,30 +94,24 @@
         # Tile projection to specified width.
 
         # Original projection data.
-        original_px = copy.deepcopy(lightConeSlice['pc'].plots[0].data['px'])
-        original_py = copy.deepcopy(lightConeSlice['pc'].plots[0].data['py'])
-        original_pdx = copy.deepcopy(lightConeSlice['pc'].plots[0].data['pdx'])
-        original_pdy = copy.deepcopy(lightConeSlice['pc'].plots[0].data['pdy'])
-        original_field = copy.deepcopy(lightConeSlice['pc'].plots[0].data[field])
-        original_weight_field = copy.deepcopy(lightConeSlice['pc'].plots[0].data['weight_field'])
+        original_px = copy.deepcopy(proj['px'])
+        original_py = copy.deepcopy(proj['py'])
+        original_pdx = copy.deepcopy(proj['pdx'])
+        original_pdy = copy.deepcopy(proj['pdy'])
+        original_field = copy.deepcopy(proj[field])
+        original_weight_field = copy.deepcopy(proj['weight_field'])
 
         # Copy original into offset positions to make tiles.
         for x in range(int(na.ceil(lightConeSlice['WidthBoxFraction']))):
             for y in range(int(na.ceil(lightConeSlice['WidthBoxFraction']))):
                 if ((x + y) > 0):
-                    lightConeSlice['pc'].plots[0].data['px'] = \
-                        na.concatenate([lightConeSlice['pc'].plots[0].data['px'], original_px+x])
-                    lightConeSlice['pc'].plots[0].data['py'] = \
-                        na.concatenate([lightConeSlice['pc'].plots[0].data['py'], original_py+y])
-                    lightConeSlice['pc'].plots[0].data['pdx'] = \
-                        na.concatenate([lightConeSlice['pc'].plots[0].data['pdx'], original_pdx])
-                    lightConeSlice['pc'].plots[0].data['pdy'] = \
-                        na.concatenate([lightConeSlice['pc'].plots[0].data['pdy'], original_pdy])
-                    lightConeSlice['pc'].plots[0].data[field] = \
-                        na.concatenate([lightConeSlice['pc'].plots[0].data[field], original_field])
-                    lightConeSlice['pc'].plots[0].data['weight_field'] = \
-                        na.concatenate([lightConeSlice['pc'].plots[0].data['weight_field'], 
-                                        original_weight_field])
+                    proj['px'] = na.concatenate([proj['px'], original_px+x])
+                    proj['py'] = na.concatenate([proj['py'], original_py+y])
+                    proj['pdx'] = na.concatenate([proj['pdx'], original_pdx])
+                    proj['pdy'] = na.concatenate([proj['pdy'], original_pdy])
+                    proj[field] = na.concatenate([proj[field], original_field])
+                    proj['weight_field'] = na.concatenate([proj['weight_field'], 
+                                                           original_weight_field])
 
         # Delete originals.
         del original_px
@@ -144,86 +129,74 @@
         del offset[lightConeSlice['ProjectionAxis']]
 
         # Shift x and y positions.
-        lightConeSlice['pc'].plots[0]['px'] -= offset[0]
-        lightConeSlice['pc'].plots[0]['py'] -= offset[1]
+        proj['px'] -= offset[0]
+        proj['py'] -= offset[1]
 
         # Wrap off-edge cells back around to other side (periodic boundary conditions).
-        lightConeSlice['pc'].plots[0]['px'][lightConeSlice['pc'].plots[0]['px'] < 0] += \
-            na.ceil(lightConeSlice['WidthBoxFraction'])
-        lightConeSlice['pc'].plots[0]['py'][lightConeSlice['pc'].plots[0]['py'] < 0] += \
-            na.ceil(lightConeSlice['WidthBoxFraction'])
+        proj['px'][proj['px'] < 0] += na.ceil(lightConeSlice['WidthBoxFraction'])
+        proj['py'][proj['py'] < 0] += na.ceil(lightConeSlice['WidthBoxFraction'])
 
         # After shifting, some cells have fractional coverage on both sides of the box.
         # Find those cells and make copies to be placed on the other side.
 
         # Cells hanging off the right edge.
-        add_x_right = lightConeSlice['pc'].plots[0]['px'] + \
-            0.5 * lightConeSlice['pc'].plots[0]['pdx'] > na.ceil(lightConeSlice['WidthBoxFraction'])
-        add_x_px = lightConeSlice['pc'].plots[0]['px'][add_x_right]
+        add_x_right = proj['px'] + 0.5 * proj['pdx'] > na.ceil(lightConeSlice['WidthBoxFraction'])
+        add_x_px = proj['px'][add_x_right]
         add_x_px -= na.ceil(lightConeSlice['WidthBoxFraction'])
-        add_x_py = lightConeSlice['pc'].plots[0]['py'][add_x_right]
-        add_x_pdx = lightConeSlice['pc'].plots[0]['pdx'][add_x_right]
-        add_x_pdy = lightConeSlice['pc'].plots[0]['pdy'][add_x_right]
-        add_x_field = lightConeSlice['pc'].plots[0][field][add_x_right]
-        add_x_weight_field = lightConeSlice['pc'].plots[0]['weight_field'][add_x_right]
+        add_x_py = proj['py'][add_x_right]
+        add_x_pdx = proj['pdx'][add_x_right]
+        add_x_pdy = proj['pdy'][add_x_right]
+        add_x_field = proj[field][add_x_right]
+        add_x_weight_field = proj['weight_field'][add_x_right]
         del add_x_right
 
         # Cells hanging off the left edge.
-        add_x_left = lightConeSlice['pc'].plots[0]['px'] - \
-            0.5 * lightConeSlice['pc'].plots[0]['pdx'] < 0
-        add2_x_px = lightConeSlice['pc'].plots[0]['px'][add_x_left]
+        add_x_left = proj['px'] - 0.5 * proj['pdx'] < 0
+        add2_x_px = proj['px'][add_x_left]
         add2_x_px += na.ceil(lightConeSlice['WidthBoxFraction'])
-        add2_x_py = lightConeSlice['pc'].plots[0]['py'][add_x_left]
-        add2_x_pdx = lightConeSlice['pc'].plots[0]['pdx'][add_x_left]
-        add2_x_pdy = lightConeSlice['pc'].plots[0]['pdy'][add_x_left]
-        add2_x_field = lightConeSlice['pc'].plots[0][field][add_x_left]
-        add2_x_weight_field = lightConeSlice['pc'].plots[0]['weight_field'][add_x_left]
+        add2_x_py = proj['py'][add_x_left]
+        add2_x_pdx = proj['pdx'][add_x_left]
+        add2_x_pdy = proj['pdy'][add_x_left]
+        add2_x_field = proj[field][add_x_left]
+        add2_x_weight_field = proj['weight_field'][add_x_left]
         del add_x_left
 
         # Cells hanging off the top edge.
-        add_y_right = lightConeSlice['pc'].plots[0]['py'] + \
-            0.5 * lightConeSlice['pc'].plots[0]['pdy'] > na.ceil(lightConeSlice['WidthBoxFraction'])
-        add_y_px = lightConeSlice['pc'].plots[0]['px'][add_y_right]
-        add_y_py = lightConeSlice['pc'].plots[0]['py'][add_y_right]
+        add_y_right = proj['py'] + 0.5 * proj['pdy'] > na.ceil(lightConeSlice['WidthBoxFraction'])
+        add_y_px = proj['px'][add_y_right]
+        add_y_py = proj['py'][add_y_right]
         add_y_py -= na.ceil(lightConeSlice['WidthBoxFraction'])
-        add_y_pdx = lightConeSlice['pc'].plots[0]['pdx'][add_y_right]
-        add_y_pdy = lightConeSlice['pc'].plots[0]['pdy'][add_y_right]
-        add_y_field = lightConeSlice['pc'].plots[0][field][add_y_right]
-        add_y_weight_field = lightConeSlice['pc'].plots[0]['weight_field'][add_y_right]
+        add_y_pdx = proj['pdx'][add_y_right]
+        add_y_pdy = proj['pdy'][add_y_right]
+        add_y_field = proj[field][add_y_right]
+        add_y_weight_field = proj['weight_field'][add_y_right]
         del add_y_right
 
         # Cells hanging off the bottom edge.
-        add_y_left = lightConeSlice['pc'].plots[0]['py'] - \
-            0.5 * lightConeSlice['pc'].plots[0]['pdy'] < 0
-        add2_y_px = lightConeSlice['pc'].plots[0]['px'][add_y_left]
-        add2_y_py = lightConeSlice['pc'].plots[0]['py'][add_y_left]
+        add_y_left = proj['py'] - 0.5 * proj['pdy'] < 0
+        add2_y_px = proj['px'][add_y_left]
+        add2_y_py = proj['py'][add_y_left]
         add2_y_py += na.ceil(lightConeSlice['WidthBoxFraction'])
-        add2_y_pdx = lightConeSlice['pc'].plots[0]['pdx'][add_y_left]
-        add2_y_pdy = lightConeSlice['pc'].plots[0]['pdy'][add_y_left]
-        add2_y_field = lightConeSlice['pc'].plots[0][field][add_y_left]
-        add2_y_weight_field = lightConeSlice['pc'].plots[0]['weight_field'][add_y_left]
+        add2_y_pdx = proj['pdx'][add_y_left]
+        add2_y_pdy = proj['pdy'][add_y_left]
+        add2_y_field = proj[field][add_y_left]
+        add2_y_weight_field = proj['weight_field'][add_y_left]
         del add_y_left
 
         # Add the hanging cells back to the projection data.
-        lightConeSlice['pc'].plots[0].data['px'] = \
-            na.concatenate([lightConeSlice['pc'].plots[0]['px'], add_x_px, add_y_px, 
-                            add2_x_px, add2_y_px])
-        lightConeSlice['pc'].plots[0].data['py'] = \
-            na.concatenate([lightConeSlice['pc'].plots[0]['py'], add_x_py, add_y_py, 
-                            add2_x_py, add2_y_py])
-        lightConeSlice['pc'].plots[0].data['pdx'] = \
-            na.concatenate([lightConeSlice['pc'].plots[0]['pdx'], add_x_pdx, add_y_pdx, 
-                            add2_x_pdx, add2_y_pdx])
-        lightConeSlice['pc'].plots[0].data['pdy'] = \
-            na.concatenate([lightConeSlice['pc'].plots[0]['pdy'], add_x_pdy, add_y_pdy, 
-                            add2_x_pdy, add2_y_pdy])
-        lightConeSlice['pc'].plots[0].data[field] = \
-            na.concatenate([lightConeSlice['pc'].plots[0][field], add_x_field, add_y_field, 
-                            add2_x_field, add2_y_field])
-        lightConeSlice['pc'].plots[0].data['weight_field'] = \
-            na.concatenate([lightConeSlice['pc'].plots[0]['weight_field'], 
-                            add_x_weight_field, add_y_weight_field,
-                            add2_x_weight_field, add2_y_weight_field])
+        proj['px'] = na.concatenate([proj['px'], add_x_px, add_y_px, 
+                                     add2_x_px, add2_y_px])
+        proj['py'] = na.concatenate([proj['py'], add_x_py, add_y_py, 
+                                     add2_x_py, add2_y_py])
+        proj['pdx'] = na.concatenate([proj['pdx'], add_x_pdx, add_y_pdx, 
+                                      add2_x_pdx, add2_y_pdx])
+        proj['pdy'] = na.concatenate([proj['pdy'], add_x_pdy, add_y_pdy, 
+                                      add2_x_pdy, add2_y_pdy])
+        proj[field] = na.concatenate([proj[field], add_x_field, add_y_field, 
+                                      add2_x_field, add2_y_field])
+        proj['weight_field'] = na.concatenate([proj['weight_field'], 
+                                               add_x_weight_field, add_y_weight_field,
+                                               add2_x_weight_field, add2_y_weight_field])
 
         # Delete original copies of hanging cells.
         del add_x_px, add_y_px, add2_x_px, add2_y_px
@@ -236,42 +209,29 @@
         # Tiles were made rounding up the width to the nearest integer.
         # Cut off the edges to get the specified width.
         # Cut in the x direction.
-        cut_x = lightConeSlice['pc'].plots[0].data['px'] - \
-            0.5 * lightConeSlice['pc'].plots[0].data['pdx'] < lightConeSlice['WidthBoxFraction']
-        lightConeSlice['pc'].plots[0].data['px'] = lightConeSlice['pc'].plots[0].data['px'][cut_x]
-        lightConeSlice['pc'].plots[0].data['py'] = lightConeSlice['pc'].plots[0].data['py'][cut_x]
-        lightConeSlice['pc'].plots[0].data['pdx'] = lightConeSlice['pc'].plots[0].data['pdx'][cut_x]
-        lightConeSlice['pc'].plots[0].data['pdy'] = lightConeSlice['pc'].plots[0].data['pdy'][cut_x]
-        lightConeSlice['pc'].plots[0].data[field] = lightConeSlice['pc'].plots[0].data[field][cut_x]
-        lightConeSlice['pc'].plots[0].data['weight_field'] = \
-            lightConeSlice['pc'].plots[0].data['weight_field'][cut_x]
+        cut_x = proj['px'] - 0.5 * proj['pdx'] < lightConeSlice['WidthBoxFraction']
+        proj['px'] = proj['px'][cut_x]
+        proj['py'] = proj['py'][cut_x]
+        proj['pdx'] = proj['pdx'][cut_x]
+        proj['pdy'] = proj['pdy'][cut_x]
+        proj[field] = proj[field][cut_x]
+        proj['weight_field'] = proj['weight_field'][cut_x]
         del cut_x
 
         # Cut in the y direction.
-        cut_y = lightConeSlice['pc'].plots[0].data['py'] - \
-            0.5 * lightConeSlice['pc'].plots[0].data['pdy'] < lightConeSlice['WidthBoxFraction']
-        lightConeSlice['pc'].plots[0].data['px'] = lightConeSlice['pc'].plots[0].data['px'][cut_y]
-        lightConeSlice['pc'].plots[0].data['py'] = lightConeSlice['pc'].plots[0].data['py'][cut_y]
-        lightConeSlice['pc'].plots[0].data['pdx'] = lightConeSlice['pc'].plots[0].data['pdx'][cut_y]
-        lightConeSlice['pc'].plots[0].data['pdy'] = lightConeSlice['pc'].plots[0].data['pdy'][cut_y]
-        lightConeSlice['pc'].plots[0].data[field] = lightConeSlice['pc'].plots[0].data[field][cut_y]
-        lightConeSlice['pc'].plots[0].data['weight_field'] = lightConeSlice['pc'].plots[0].data['weight_field'][cut_y]
+        cut_y = proj['py'] - 0.5 * proj['pdy'] < lightConeSlice['WidthBoxFraction']
+        proj['px'] = proj['px'][cut_y]
+        proj['py'] = proj['py'][cut_y]
+        proj['pdx'] = proj['pdx'][cut_y]
+        proj['pdy'] = proj['pdy'][cut_y]
+        proj[field] = proj[field][cut_y]
+        proj['weight_field'] = proj['weight_field'][cut_y]
         del cut_y
 
-        # Save an image if requested.
-        if save_image:
-            lightConeSlice['pc'].set_xlim(0, lightConeSlice['WidthBoxFraction'])
-            lightConeSlice['pc'].set_ylim(0, lightConeSlice['WidthBoxFraction'])
-            if add_redshift_label:
-                lightConeSlice['pc'].plots[-1].modify['text']((0.5, 0.03), "z = %.3f" % lightConeSlice['redshift'], 
-                                                              dict(color='black',size=50))
-            lightConeSlice['pc'].save(name)
-
         # Create fixed resolution buffer to return back to the light cone object.
         # These buffers will be stacked together to make the light cone.
-        frb = FixedResolutionBuffer(lightConeSlice['pc'].plots[0].data, \
-                                        (0, lightConeSlice['WidthBoxFraction'], 
-                                         0, lightConeSlice['WidthBoxFraction']),
+        frb = FixedResolutionBuffer(proj, (0, lightConeSlice['WidthBoxFraction'], 
+                                           0, lightConeSlice['WidthBoxFraction']),
                                     (pixels, pixels), antialias=False)
 
         return frb


--- a/yt/config.py	Mon Mar 28 13:44:14 2011 -0600
+++ b/yt/config.py	Mon Mar 28 13:44:34 2011 -0600
@@ -45,6 +45,7 @@
     loadfieldplugins = 'True',
     pluginfilename = 'my_plugins.py',
     parallel_traceback = 'False',
+    pasteboard_repo = '',
     )
 # Here is the upgrade.  We're actually going to parse the file in its entirety
 # here.  Then, if it has any of the Forbidden Sections, it will be rewritten


--- a/yt/data_objects/api.py	Mon Mar 28 13:44:14 2011 -0600
+++ b/yt/data_objects/api.py	Mon Mar 28 13:44:34 2011 -0600
@@ -51,10 +51,8 @@
     BinnedProfile2D, \
     BinnedProfile3D
 
-# Disabled for now
 from time_series import \
     TimeSeriesData, \
-    EnzoTimeSeries, \
     TimeSeriesDataObject
 
 from analyzer_objects import \


--- a/yt/data_objects/data_containers.py	Mon Mar 28 13:44:14 2011 -0600
+++ b/yt/data_objects/data_containers.py	Mon Mar 28 13:44:34 2011 -0600
@@ -639,6 +639,97 @@
         self._ts[grid.id] = na.abs(ts)
         return mask
 
+class AMRStreamlineBase(AMR1DData):
+    _type_name = "streamline"
+    _con_args = ('positions')
+    sort_by = 't'
+    def __init__(self, positions, fields=None, pf=None, **kwargs):
+        """
+        This is a streamline, which is a set of points defined as
+        being parallel to some vector field.
+
+        This object is typically accessed through the Streamlines.path
+        function.  The resulting arrays have their dimensionality
+        reduced to one, and an ordered list of points at an (x,y)
+        tuple along `axis` are available, as is the `t` field, which
+        corresponds to a unitless measurement along the ray from start
+        to end.
+
+        Parameters
+        ----------
+        positions : array-like
+            List of streamline positions
+        fields : list of strings, optional
+            If you want the object to pre-retrieve a set of fields, supply them
+            here.  This is not necessary.
+        pf : Parameter file object
+            Passed in to access the hierarchy
+        kwargs : dict of items
+            Any additional values are passed as field parameters that can be
+            accessed by generated fields.
+
+        Examples
+        --------
+
+        >>> from yt.visualization.api import Streamlines
+        >>> streamlines = Streamlines(pf, [0.5]*3) 
+        >>> streamlines.integrate_through_volume()
+        >>> stream = streamlines.path(0)
+        >>> matplotlib.pylab.semilogy(stream['t'], stream['Density'], '-x')
+        
+        """
+        AMR1DData.__init__(self, pf, fields, **kwargs)
+        self.positions = positions
+        self.dts = na.empty_like(positions[:,0])
+        self.dts[:-1] = na.sqrt(na.sum((self.positions[1:]-
+                                        self.positions[:-1])**2,axis=1))
+        self.dts[-1] = self.dts[-1]
+        self.ts = na.add.accumulate(self.dts)
+        self._set_center(self.positions[0])
+        self.set_field_parameter('center', self.positions[0])
+        self._dts, self._ts = {}, {}
+        #self._refresh_data()
+
+    def _get_list_of_grids(self):
+        # Get the value of the line at each LeftEdge and RightEdge
+        LE = self.pf.h.grid_left_edge
+        RE = self.pf.h.grid_right_edge
+        # Check left faces first
+        min_streampoint = na.min(self.positions, axis=0)
+        max_streampoint = na.max(self.positions, axis=0)
+        p = na.all((min_streampoint <= RE) & (max_streampoint > LE), axis=1)
+        self._grids = self.hierarchy.grids[p]
+
+    def _get_data_from_grid(self, grid, field):
+        mask = na.logical_and(self._get_cut_mask(grid),
+                              grid.child_mask)
+        if field == 'dts': return self._dts[grid.id][mask]
+        if field == 't': return self._ts[grid.id][mask]
+        return grid[field][mask]
+        
+    @cache_mask
+    def _get_cut_mask(self, grid):
+        mask = na.zeros(grid.ActiveDimensions, dtype='int')
+        dts = na.zeros(grid.ActiveDimensions, dtype='float64')
+        ts = na.zeros(grid.ActiveDimensions, dtype='float64')
+        #pdb.set_trace()
+        points_in_grid = na.all(self.positions > grid.LeftEdge, axis=1) & \
+                         na.all(self.positions <= grid.RightEdge, axis=1) 
+        pids = na.where(points_in_grid)[0]
+        for i, pos in zip(pids, self.positions[points_in_grid]):
+            if not points_in_grid[i]: continue
+            ci = ((pos - grid.LeftEdge)/grid.dds).astype('int')
+            for j in range(3):
+                ci[j] = min(ci[j], grid.ActiveDimensions[j]-1)
+            if mask[ci[0], ci[1], ci[2]]:
+                continue
+            mask[ci[0], ci[1], ci[2]] = 1
+            dts[ci[0], ci[1], ci[2]] = self.dts[i]
+            ts[ci[0], ci[1], ci[2]] = self.ts[i]
+        self._dts[grid.id] = dts
+        self._ts[grid.id] = ts
+        return mask
+
 class AMR2DData(AMRData, GridPropertiesMixin, ParallelAnalysisInterface):
     _key_fields = ['px','py','pdx','pdy']
     """


--- a/yt/data_objects/time_series.py	Mon Mar 28 13:44:14 2011 -0600
+++ b/yt/data_objects/time_series.py	Mon Mar 28 13:44:34 2011 -0600
@@ -52,9 +52,13 @@
         return key in analysis_task_registry
 
 class TimeSeriesData(object):
-    def __init__(self, name):
-        self.outputs = []
+    def __init__(self, outputs = None):
+        if outputs is None: outputs = []
+        self.outputs = outputs
         self.tasks = AnalysisTaskProxy(self)
+        for type_name in data_object_registry:
+            setattr(self, type_name, functools.partial(
+                TimeSeriesDataObject, self, type_name))
 
     def __iter__(self):
         # We can make this fancier, but this works
@@ -94,27 +98,24 @@
                 return_values[-1].append(rv)
         return return_values
 
-class EnzoTimeSeries(TimeSeriesData):
-    _enzo_header = "DATASET WRITTEN "
-    def __init__(self, name, **kwargs):
-        TimeSeriesData.__init__(self, name)
-        output_list = kwargs.pop('output_list', None)
-        output_log = kwargs.pop('output_log', None)
-        if output_list: self._populate_output_list(output_list)
-        if output_log: self._populate_output_log(output_log)
-        for type_name in data_object_registry:
-            setattr(self, type_name, functools.partial(
-                TimeSeriesDataObject, self, type_name))
+    @classmethod
+    def from_filenames(cls, filename_list):
+        outputs = []
+        for fn in filename_list:
+            outputs.append(load(fn))
+        obj = cls(outputs)
+        return obj
 
-    def _populate_output_list(self, output_list):
-        for output in output_list:
-            self._insert(EnzoStaticOutput(output))
-
-    def _populate_output_log(self, output_log):
+    @classmethod
+    def from_output_log(cls, output_log,
+                        line_prefix = "DATASET WRITTEN"):
+        outputs = []
         for line in open(output_log):
-            if not line.startswith(self._enzo_header): continue
-            fn = line[len(self._enzo_header):].strip()
-            self._insert(load(fn))
+            if not line.startswith(line_prefix): continue
+            fn = line[len(line_prefix):].strip()
+            outputs.append(load(fn))
+        obj = cls(outputs)
+        return obj
 
 class TimeSeriesQuantitiesContainer(object):
     def __init__(self, data_object, quantities):


--- a/yt/data_objects/universal_fields.py	Mon Mar 28 13:44:14 2011 -0600
+++ b/yt/data_objects/universal_fields.py	Mon Mar 28 13:44:34 2011 -0600
@@ -35,6 +35,7 @@
 from yt.funcs import *
 
 from yt.utilities.amr_utils import CICDeposit_3
+from yt.utilities.cosmology import Cosmology
 from field_info_container import \
     add_field, \
     ValidateDataField, \
@@ -53,7 +54,9 @@
      sigma_thompson, \
      clight, \
      kboltz, \
-     G
+     G, \
+     rho_crit_now, \
+     speed_of_light_cgs
      
 # Note that, despite my newfound efforts to comply with PEP-8,
 # I violate it here in order to keep the name/func_name relationship
@@ -388,6 +391,63 @@
     return (data['Density'] + data['Dark_Matter_Density'])
 add_field("Matter_Density",function=_Matter_Density,units=r"\rm{g}/\rm{cm^3}")
 
+def _ComovingDensity(field, data):
+    ef = (1.0 + data.pf.current_redshift)**3.0
+    return data["Density"]/ef
+add_field("ComovingDensity", function=_ComovingDensity, units=r"\rm{g}/\rm{cm}^3")
+
+# This is rho_total / rho_cr(z).
+def _Convert_Overdensity(data):
+    return 1 / (rho_crit_now * data.pf.hubble_constant**2 * 
+                (1+data.pf.current_redshift)**3)
+add_field("Overdensity",function=_Matter_Density,
+          convert_function=_Convert_Overdensity, units=r"")
+
+# This is (rho_total - <rho_total>) / <rho_total>.
+def _DensityPerturbation(field, data):
+    rho_bar = rho_crit_now * data.pf.omega_matter * \
+        data.pf.hubble_constant**2 * \
+        (1.0 + data.pf.current_redshift)**3
+    return ((data['Matter_Density'] - rho_bar) / rho_bar)
+add_field("DensityPerturbation",function=_DensityPerturbation,units=r"")
+
+# This is rho_b / <rho_b>.
+def _Baryon_Overdensity(field, data):
+    return data['Density']
+def _Convert_Baryon_Overdensity(data):
+    if data.pf.parameters.has_key('omega_baryon_now'):
+        omega_baryon_now = data.pf.parameters['omega_baryon_now']
+    else:
+        omega_baryon_now = 0.0441
+    return 1 / (omega_baryon_now * rho_crit_now * 
+                (data.pf['CosmologyHubbleConstantNow']**2) * 
+                ((1+data.pf['CosmologyCurrentRedshift'])**3))
+add_field("Baryon_Overdensity", function=_Baryon_Overdensity, 
+          convert_function=_Convert_Baryon_Overdensity, units=r"")
+
+# Weak lensing convergence.
+# Eqn 4 of Metzler, White, & Loken (2001, ApJ, 547, 560).
+def _convertConvergence(data):
+    if not data.pf.parameters.has_key('cosmology_calculator'):
+        data.pf.parameters['cosmology_calculator'] = Cosmology(
+            HubbleConstantNow=(100.*data.pf.hubble_constant),
+            OmegaMatterNow=data.pf.omega_matter, OmegaLambdaNow=data.pf.omega_lambda)
+    # observer to lens
+    DL = data.pf.parameters['cosmology_calculator'].AngularDiameterDistance(
+        data.pf.parameters['observer_redshift'], data.pf.current_redshift)
+    # observer to source
+    DS = data.pf.parameters['cosmology_calculator'].AngularDiameterDistance(
+        data.pf.parameters['observer_redshift'], data.pf.parameters['lensing_source_redshift'])
+    # lens to source
+    DLS = data.pf.parameters['cosmology_calculator'].AngularDiameterDistance(
+        data.pf.current_redshift, data.pf.parameters['lensing_source_redshift'])
+    return (((DL * DLS) / DS) * (1.5e14 * data.pf.omega_matter * 
+                                (data.pf.hubble_constant / speed_of_light_cgs)**2 *
+                                (1 + data.pf.current_redshift)))
+add_field("WeakLensingConvergence", function=_DensityPerturbation, 
+          convert_function=_convertConvergence, 
+          projection_conversion='mpccm')
+
 def _CellVolume(field, data):
     if data['dx'].size == 1:
         try:


--- a/yt/frontends/enzo/data_structures.py	Mon Mar 28 13:44:14 2011 -0600
+++ b/yt/frontends/enzo/data_structures.py	Mon Mar 28 13:44:34 2011 -0600
@@ -38,6 +38,7 @@
 from itertools import izip
 
 from yt.funcs import *
+from yt.config import ytcfg
 from yt.data_objects.grid_patch import \
     AMRGridPatch
 from yt.data_objects.hierarchy import \
@@ -137,7 +138,7 @@
         self.hierarchy_filename = os.path.abspath(
             "%s.hierarchy" % (pf.parameter_filename))
         harray_fn = self.hierarchy_filename[:-9] + "harrays"
-        if os.path.exists(harray_fn):
+        if ytcfg.getboolean("yt","serialize") and os.path.exists(harray_fn):
             try:
                 harray_fp = h5py.File(harray_fn)
                 self.num_grids = harray_fp["/Level"].len()
@@ -286,6 +287,7 @@
     _bn = "%s.cpu%%04i"
     def _parse_binary_hierarchy(self):
         mylog.info("Getting the binary hierarchy")
+        if not ytcfg.getboolean("yt","serialize"): return False
         try:
             f = h5py.File(self.hierarchy_filename[:-9] + "harrays")
         except h5py.h5.H5Error:
@@ -569,9 +571,14 @@
         if self.__class__._cached_field_list is None:
             EnzoHierarchy._detect_fields(self)
             self.__class__._cached_field_list = self.field_list
+        else:
+            self.field_list = self.__class__._cached_field_list
+
+    def _setup_derived_fields(self):
+        if self.__class__._cached_derived_field_list is None:
+            EnzoHierarchy._setup_derived_fields(self)
             self.__class__._cached_derived_field_list = self.derived_field_list
         else:
-            self.field_list = self.__class__._cached_field_list
             self.derived_field_list = self.__class__._cached_derived_field_list
 
     def _generate_random_grids(self):
@@ -630,6 +637,7 @@
         paarmeter file and a *conversion_override* dictionary that consists
         of {fieldname : conversion_to_cgs} that will override the #DataCGS.
         """
+        if filename.endswith(".hierarchy"): filename = filename[:-10]
         if parameter_override is None: parameter_override = {}
         self._parameter_override = parameter_override
         if conversion_override is None: conversion_override = {}
@@ -874,6 +882,8 @@
 
     @classmethod
     def _is_valid(cls, *args, **kwargs):
+        if ("%s" % (args[0])).endswith(".hierarchy"):
+            return True
         return os.path.exists("%s.hierarchy" % args[0])
 
 class EnzoStaticOutputInMemory(EnzoStaticOutput):


--- a/yt/frontends/enzo/fields.py	Mon Mar 28 13:44:14 2011 -0600
+++ b/yt/frontends/enzo/fields.py	Mon Mar 28 13:44:34 2011 -0600
@@ -34,7 +34,7 @@
     ValidateGridType
 import yt.data_objects.universal_fields
 from yt.utilities.physical_constants import \
-    mh, rho_crit_now
+    mh
 import yt.utilities.amr_utils as amr_utils
 
 class EnzoFieldContainer(CodeFieldInfoContainer):
@@ -206,31 +206,6 @@
           function=_NumberDensity,
           convert_function=_ConvertNumberDensity)
 
-def _ComovingDensity(field,data):
-    ef = (1.0 + data.pf.current_redshift)**3.0
-    return data["Density"]/ef
-add_field("ComovingDensity", function=_ComovingDensity, units=r"\rm{g}/\rm{cm}^3")
-
-# This is rho_total / rho_cr(z).
-def Overdensity(field,data):
-    return (data['Density'] + data['Dark_Matter_Density']) / \
-        (rho_crit_now * (data.pf.hubble_constant**2) * ((1+data.pf.current_redshift)**3))
-add_field("Overdensity",function=Overdensity,units=r"")
-
-# This is rho_b / <rho_b>.
-def _Baryon_Overdensity(field, data):
-    return data['Density']
-def _Convert_Baryon_Overdensity(data):
-    if data.pf.parameters.has_key('omega_baryon_now'):
-        omega_baryon_now = data.pf.parameters['omega_baryon_now']
-    else:
-        omega_baryon_now = 0.0441
-    return 1 / (omega_baryon_now * rho_crit_now * 
-                (data.pf['CosmologyHubbleConstantNow']**2) * 
-                ((1+data.pf['CosmologyCurrentRedshift'])**3))
-add_field("Baryon_Overdensity", function=_Baryon_Overdensity, 
-          convert_function=_Convert_Baryon_Overdensity, units=r"")
-
 # Now we add all the fields that we want to control, but we give a null function
 # This is every Enzo field we can think of.  This will be installation-dependent,
 
@@ -240,7 +215,7 @@
 _default_fields = ["Density","Temperature",
                    "x-velocity","y-velocity","z-velocity",
                    "x-momentum","y-momentum","z-momentum",
-                   "Bx", "By", "Bz"]
+                   "Bx", "By", "Bz", "Dust_Temperature_Density"]
 # else:
 #     _default_fields = ["Density","Temperature","Gas_Energy","Total_Energy",
 #                        "x-velocity","y-velocity","z-velocity"]
@@ -280,6 +255,17 @@
     f._convert_function = _convertVelocity
     f.take_log = False
 
+# Dust temperature - raw field is T_dust * Density
+def _dust_temperature(field, data):
+    return data['Dust_Temperature_Density'] / data['Density']
+def _convert_dust_temperature(data):
+    ef = (1.0 + data.pf.current_redshift)**3.0
+    return data.convert("Density") / ef
+add_field("Dust_Temperature", function=_dust_temperature, 
+          convert_function=_convert_dust_temperature, take_log=True,
+          validators=[ValidateDataField('Dust_Temperature_Density')],
+          units = r"K")
+
 def _spdensity(field, data):
     blank = na.zeros(data.ActiveDimensions, dtype='float32')
     if data.NumberOfParticles == 0: return blank


--- a/yt/frontends/orion/data_structures.py	Mon Mar 28 13:44:14 2011 -0600
+++ b/yt/frontends/orion/data_structures.py	Mon Mar 28 13:44:34 2011 -0600
@@ -470,6 +470,7 @@
         self.parameters["Time"] = 1. # default unit is 1...
         self.parameters["DualEnergyFormalism"] = 0 # always off.
         self.parameters["EOSType"] = -1 # default
+
         if self.fparameters.has_key("mu"):
             self.parameters["mu"] = self.fparameters["mu"]
 
@@ -504,6 +505,9 @@
                 self.fparameter_filename, 'probin')
         if os.path.isfile(self.fparameter_filename):
             self._parse_fparameter_file()
+            for param in self.fparameters:
+                if orion2enzoDict.has_key(param):
+                    self.parameters[orion2enzoDict[param]]=self.fparameters[param]
         # Let's read the file
         self.unique_identifier = \
             int(os.stat(self.parameter_filename)[ST_CTIME])
@@ -541,6 +545,20 @@
         self.domain_dimensions = self.parameters["TopGridDimensions"]
         self.refine_by = self.parameters["RefineBy"]
 
+        if self.parameters.has_key("ComovingCoordinates") and bool(self.parameters["ComovingCoordinates"]):
+            self.cosmological_simulation = 1
+            self.omega_lambda = self.parameters["CosmologyOmegaLambdaNow"]
+            self.omega_matter = self.parameters["CosmologyOmegaMatterNow"]
+            self.hubble_constant = self.parameters["CosmologyHubbleConstantNow"]
+            a_file = open(os.path.join(self.fullplotdir,'comoving_a'))
+            line = a_file.readline().strip()
+            a_file.close()
+            self.parameters["CosmologyCurrentRedshift"] = 1/float(line) - 1
+            self.current_redshift = self.parameters["CosmologyCurrentRedshift"]
+        else:
+            self.current_redshift = self.omega_lambda = self.omega_matter = \
+                self.hubble_constant = self.cosmological_simulation = 0.0
+
     def _parse_fparameter_file(self):
         """
         Parses the fortran parameter file for Orion. Most of this will
@@ -574,6 +592,7 @@
         n_fields = int(lines[1])
         self.current_time = float(lines[3+n_fields])
 
+
                 
     def _set_units(self):
         """


--- a/yt/frontends/orion/definitions.py	Mon Mar 28 13:44:14 2011 -0600
+++ b/yt/frontends/orion/definitions.py	Mon Mar 28 13:44:34 2011 -0600
@@ -63,7 +63,12 @@
 # throughout the code. key is Orion name, value is Enzo/yt equivalent
 orion2enzoDict = {"amr.n_cell": "TopGridDimensions",
                   "materials.gamma": "Gamma",
-                  "amr.ref_ratio": "RefineBy"
+                  "amr.ref_ratio": "RefineBy",
+                  "castro.use_comoving": "ComovingCoordinates",
+                  "castro.redshift_in": "CosmologyInitialRedshift",
+                  "comoving_OmL": "CosmologyOmegaLambdaNow",
+                  "comoving_OmM": "CosmologyOmegaMatterNow",
+                  "comoving_h": "CosmologyHubbleConstantNow"
                   }
 
 yt2orionFieldsDict = {}


--- a/yt/gui/plot_editors.py	Mon Mar 28 13:44:14 2011 -0600
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,128 +0,0 @@
-"""
-Figure editors for the Traits GUI
-
-Author: Matthew Turk <matthewturk at gmail.com>
-Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
-License:
-  Copyright (C) 2009 Matthew Turk.  All Rights Reserved.
-
-  This file is part of yt.
-
-  yt is free software; you can redistribute it and/or modify
-  it under the terms of the GNU General Public License as published by
-  the Free Software Foundation; either version 3 of the License, or
-  (at your option) any later version.
-
-  This program is distributed in the hope that it will be useful,
-  but WITHOUT ANY WARRANTY; without even the implied warranty of
-  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-  GNU General Public License for more details.
-
-  You should have received a copy of the GNU General Public License
-  along with this program.  If not, see <http://www.gnu.org/licenses/>.
-"""
-
-import sys, matplotlib
-# We want matplotlib to use a wxPython backend
-matplotlib.use('QT4Agg')
-from matplotlib.backends.backend_qt4agg import FigureCanvasQTAgg as FigureCanvas
-from matplotlib.figure import Figure
-from matplotlib.axes import Axes
-
-from enthought.traits.api import Any, Instance
-from enthought.traits.ui.qt4.editor import Editor
-from enthought.traits.ui.qt4.basic_editor_factory import BasicEditorFactory
-
-from enthought.pyface.action.api import ActionController
-
-from enthought.traits.ui.menu import \
-    Menu, Action, Separator, OKCancelButtons, OKButton
-
-from matplotlib.backend_bases import Event as MPLEvent
-
-class _MPLFigureEditor(Editor):
-    """ Snagged from Gael's tutorial """
-
-    scrollable  = True
-    mpl_control = Instance(FigureCanvas)
-
-    def init(self, parent):
-        self.control = self._create_canvas(parent)
-        self.set_tooltip()
-
-    def update_editor(self):
-        pass
-
-    def _create_canvas(self, parent):
-        """ Create the MPL canvas. """
-        # The panel lets us add additional controls.
-        panel = wx.Panel(parent, -1)
-        sizer = wx.BoxSizer(wx.VERTICAL)
-        panel.SetSizer(sizer)
-        # matplotlib commands to create a canvas
-        self.mpl_control = FigureCanvas(panel, -1, self.value)
-        sizer.Add(self.mpl_control, 1, wx.LEFT | wx.TOP | wx.GROW | wx.SHAPED)
-        self.value.canvas.SetMinSize((10,8))
-        return panel
-
-class MPLFigureEditor(BasicEditorFactory):
-    klass = _MPLFigureEditor
-
-class MPLAction(Action):
-    event = Instance(MPLEvent)
-
-class _MPLVMPlotEditor(_MPLFigureEditor, ActionController):
-
-    def _create_canvas(self, parent):
-        panel = _MPLFigureEditor._create_canvas(self, parent)
-        self.mpl_control.mpl_connect("button_press_event", self.on_click)
-        return panel
-
-    def on_click(self, event):
-        if not event.inaxes: return
-        if event.button == 3:
-            my_menu = Menu(MPLAction(name="Recenter", action="object.recenter",
-                                     event=event),
-                           MPLAction(name="Yo!", action="object.do_something",
-                                     event=event))
-            wxmenu = my_menu.create_menu(self.mpl_control, self)
-            self.mpl_control.PopupMenuXY(wxmenu)
-
-    def perform ( self, action ):
-        """
-        This is largely taken/modified from the TreeEditor _perform method.
-        """
-        object            = self.object
-        method_name       = action.action
-        info              = self.ui.info
-        handler           = self.ui.handler
-        event             = action.event
-
-        if method_name.find( '.' ) >= 0:
-            if method_name.find( '(' ) < 0:
-                method_name += '(event)'
-            try:
-                eval( method_name, globals(),
-                      { 'object':  object,
-                        'editor':  self,
-                        'info':    info,
-                        'event':   event,
-                        'handler': handler } )
-            except:
-                # fixme: Should the exception be logged somewhere?
-                print sys.exc_info()
-                
-            return
-
-        method = getattr( handler, method_name, None )
-        if method is not None:
-            method( info, object )
-            return
-
-        if action.on_perform is not None:
-            action.on_perform( object )
-
-class MPLVMPlotEditor(BasicEditorFactory):
-    klass = _MPLVMPlotEditor
-


--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/yt/gui/reason/basic_repl.py	Mon Mar 28 13:44:34 2011 -0600
@@ -0,0 +1,140 @@
+"""
+A read-eval-print-loop.  This code was released in the CherryPy project
+initially, but has been heavily modified and again re-released in compliance
+with the terms of its original license.
+
+Author: Matthew Turk <matthewturk at gmail.com>
+Affiliation: NSF / Columbia
+Homepage: http://yt.enzotools.org/
+License:
+  Copyright (C) 2011 Matthew Turk.  All Rights Reserved.
+
+  This file is part of yt.
+
+  yt is free software; you can redistribute it and/or modify
+  it under the terms of the GNU General Public License as published by
+  the Free Software Foundation; either version 3 of the License, or
+  (at your option) any later version.
+
+  This program is distributed in the hope that it will be useful,
+  but WITHOUT ANY WARRANTY; without even the implied warranty of
+  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+  GNU General Public License for more details.
+
+  You should have received a copy of the GNU General Public License
+  along with this program.  If not, see <http://www.gnu.org/licenses/>.
+"""
+
+import codeop
+import inspect
+import re
+import json
+import sys
+import traceback
+from cStringIO import StringIO
+
+class ProgrammaticREPL(object):
+    
+    def __init__(self, locals=None):
+        self.locals = {}
+        if locals:
+            self.locals.update(locals)
+        self.buffer = []
+        # Nominally at this point we could populate our namespace with widgets
+        # or other useful functions.  We aren't really ready for that just yet.
+    
+    def push(self, line):
+        """Push 'line' and return exec results (None if more input needed)."""
+        if line == "help":
+            return "Type help(object) for help about object."
+        if line == "help()":
+            return "You cannot call help() without an argument."
+        
+        self.buffer.append(line)
+        source = "\n".join(self.buffer)
+        
+        try:
+            code = codeop.compile_command(source, "<HTTP input>", 'single')
+        except (OverflowError, SyntaxError, ValueError):
+            self.buffer = []
+            return traceback.format_exc()
+        
+        if code is None:
+            # More lines needed.
+            return None
+        
+        self.buffer = []
+        return self.execute(code)
+    
+    def execute(self, code):
+        """Execute the given code in self.locals and return any stdout/sterr."""
+        out = StringIO()
+        oldout = sys.stdout
+        olderr = sys.stderr
+        sys.stdout = sys.stderr = out
+        try:
+            try:
+                exec code in self.locals
+            except:
+                result = traceback.format_exc()
+            else:
+                result = out.getvalue()
+        finally:
+            sys.stdout = oldout
+            sys.stderr = olderr
+        out.close()
+        return result
+    
+    def dir(self, line):
+        """Examine a partial line and provide attr list of final expr."""
+        line = re.split(r"\s", line)[-1].strip()
+        # Support lines like "thing.attr" as "thing.", because the browser
+        # may not finish calculating the partial line until after the user
+        # has clicked on a few more keys.
+        line = ".".join(line.split(".")[:-1])
+        try:
+            result = eval("dir(%s)" % line, {}, self.locals)
+        except:
+            return []
+        return result
+    
+    def doc(self, line):
+        """Examine a partial line and provide sig+doc of final expr."""
+        line = re.split(r"\s", line)[-1].strip()
+        # Support lines like "func(text" as "func(", because the browser
+        # may not finish calculating the partial line until after the user
+        # has clicked on a few more keys.
+        line = "(".join(line.split("(")[:-1])
+        try:
+            result = eval(line, {}, self.locals)
+            try:
+                if isinstance(result, type):
+                    func = result.__init__
+                else:
+                    func = result
+                args, varargs, varkw, defaults = inspect.getargspec(func)
+            except TypeError:
+                if callable(result):
+                    doc = getattr(result, "__doc__", "") or ""
+                    return "%s\n\n%s" % (line, doc)
+                return None
+        except:
+            return None
+        
+        if args and args[0] == 'self':
+            args.pop(0)
+        missing = object()
+        defaults = defaults or []
+        defaults = ([missing] * (len(args) - len(defaults))) + list(defaults)
+        arglist = []
+        for a, d in zip(args, defaults):
+            if d is missing:
+                arglist.append(a)
+            else:
+                arglist.append("%s=%s" % (a, d))
+        if varargs:
+            arglist.append("*%s" % varargs)
+        if varkw:
+            arglist.append("**%s" % varkw)
+        doc = getattr(result, "__doc__", "") or ""
+        return "%s(%s)\n%s" % (line, ", ".join(arglist), doc)


--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/yt/gui/reason/bottle.py	Mon Mar 28 13:44:34 2011 -0600
@@ -0,0 +1,2095 @@
+# -*- coding: utf-8 -*-
+"""
+Bottle is a fast and simple micro-framework for small web applications. It
+offers request dispatching (Routes) with url parameter support, templates,
+a built-in HTTP Server and adapters for many third party WSGI/HTTP-server and
+template engines - all in a single file and with no dependencies other than the
+Python Standard Library.
+
+Homepage and documentation: http://bottle.paws.de/
+
+Copyright (c) 2010, Marcel Hellkamp.
+License: MIT (see LICENSE.txt for details)
+"""
+
+from __future__ import with_statement
+
+__author__ = 'Marcel Hellkamp'
+__version__ = '0.9.dev'
+__license__ = 'MIT'
+
+import base64
+import cgi
+import email.utils
+import functools
+import hmac
+import httplib
+import inspect
+import itertools
+import mimetypes
+import os
+import re
+import subprocess
+import sys
+import tempfile
+import thread
+import threading
+import time
+import tokenize
+import warnings
+
+from Cookie import SimpleCookie
+from tempfile import TemporaryFile
+from traceback import format_exc
+from urllib import quote as urlquote
+from urlparse import urlunsplit, urljoin
+
+try: from collections import MutableMapping as DictMixin
+except ImportError: # pragma: no cover
+    from UserDict import DictMixin
+
+try: from urlparse import parse_qs
+except ImportError: # pragma: no cover
+    from cgi import parse_qs
+
+try: import cPickle as pickle
+except ImportError: # pragma: no cover
+    import pickle
+
+try: from json import dumps as json_dumps
+except ImportError: # pragma: no cover
+    try: from simplejson import dumps as json_dumps
+    except ImportError: # pragma: no cover
+        try: from django.utils.simplejson import dumps as json_dumps
+        except ImportError: # pragma: no cover
+            json_dumps = None
+
+if sys.version_info >= (3,0,0): # pragma: no cover
+    # See Request.POST
+    from io import BytesIO
+    from io import TextIOWrapper
+    class NCTextIOWrapper(TextIOWrapper):
+        ''' Garbage collecting an io.TextIOWrapper(buffer) instance closes the
+            wrapped buffer. This subclass keeps it open. '''
+        def close(self): pass
+    StringType = bytes
+    def touni(x, enc='utf8'):
+        """ Convert anything to unicode """
+        return str(x, encoding=enc) if isinstance(x, bytes) else str(x)
+else:
+    from StringIO import StringIO as BytesIO
+    from types import StringType
+    NCTextIOWrapper = None
+    def touni(x, enc='utf8'):
+        """ Convert anything to unicode """
+        return x if isinstance(x, unicode) else unicode(str(x), encoding=enc)
+
+def tob(data, enc='utf8'):
+    """ Convert anything to bytes """
+    return data.encode(enc) if isinstance(data, unicode) else StringType(data)
+
+# Convert strings and unicode to native strings
+if sys.version_info >= (3,0,0):
+    tonat = touni
+else:
+    tonat = tob
+tonat.__doc__ = """ Convert anything to native strings """
+
+
+# Background compatibility
+def depr(message, critical=False):
+    if critical: raise DeprecationWarning(message)
+    warnings.warn(message, DeprecationWarning, stacklevel=3)
+
+# Small helpers
+def makelist(data):
+    if isinstance(data, (tuple, list, set, dict)): return list(data)
+    elif data: return [data]
+    else: return []
+
+
+
+
+
+
+###############################################################################
+# Exceptions and Events ########################################################
+###############################################################################
+
+class BottleException(Exception):
+    """ A base class for exceptions used by bottle. """
+    pass
+
+
+class HTTPResponse(BottleException):
+    """ Used to break execution and immediately finish the response """
+    def __init__(self, output='', status=200, header=None):
+        super(BottleException, self).__init__("HTTP Response %d" % status)
+        self.status = int(status)
+        self.output = output
+        self.headers = HeaderDict(header) if header else None
+
+    def apply(self, response):
+        if self.headers:
+            for key, value in self.headers.iterallitems():
+                response.headers[key] = value
+        response.status = self.status
+
+
+class HTTPError(HTTPResponse):
+    """ Used to generate an error page """
+    def __init__(self, code=500, output='Unknown Error', exception=None, traceback=None, header=None):
+        super(HTTPError, self).__init__(output, code, header)
+        self.exception = exception
+        self.traceback = traceback
+
+    def __repr__(self):
+        return ''.join(ERROR_PAGE_TEMPLATE.render(e=self))
+
+
+
+
+
+
+###############################################################################
+# Routing ######################################################################
+###############################################################################
+
+class RouteError(BottleException):
+    """ This is a base class for all routing related exceptions """
+
+
+class RouteSyntaxError(RouteError):
+    """ The route parser found something not supported by this router """
+
+
+class RouteBuildError(RouteError):
+    """ The route could not been built """
+
+
+class Route(object):
+    ''' Represents a single route and can parse the dynamic route syntax '''
+    syntax = re.compile(r'(?<!\\):([a-zA-Z_]+)?(?:#(.*?)#)?')
+    default = '[^/]+'
+
+    def __init__(self, route, target=None, name=None, static=False):
+        """ Create a Route. The route string may contain `:key`,
+            `:key#regexp#` or `:#regexp#` tokens for each dynamic part of the
+            route. These can be escaped with a backslash in front of the `:`
+            and are completely ignored if static is true. A name may be used
+            to refer to this route later (depends on Router)
+        """
+        self.route = route.replace('\\:',':')
+        self.target = target
+        self.name = name
+        self.realroute = route.replace(':','\\:') if static else route
+        self.tokens = self.syntax.split(self.realroute)
+
+    def group_re(self):
+        ''' Return a regexp pattern with named groups '''
+        out = ''
+        for i, part in enumerate(self.tokens):
+            if i%3 == 0:   out += re.escape(part.replace('\:',':'))
+            elif i%3 == 1: out += '(?P<%s>' % part if part else '(?:'
+            else:          out += '%s)' % (part or self.default)
+        return out
+        
+    def flat_re(self):
+        ''' Return a regexp pattern with non-grouping parentheses '''
+        rf = lambda m: m.group(0) if len(m.group(1)) % 2 else m.group(1) + '(?:'
+        return re.sub(r'(\\*)(\(\?P<[^>]*>|\((?!\?))', rf, self.group_re())
+
+    def format_str(self):
+        ''' Return a format string with named fields. '''
+        out, c = '', 0
+        for i, part in enumerate(self.tokens):
+            if i%3 == 0:  out += part.replace('\\:',':').replace('%','%%')
+            elif i%3 == 1:
+                if not part: part = 'anon%d' % c; c+=1
+                out += '%%(%s)s' % part
+        return out
+
+    @property
+    def static(self):
+        return len(self.tokens) == 1
+
+    def __repr__(self):
+        return "<Route(%s) />" % repr(self.realroute)
+
+    def __eq__(self, other):
+        return (self.realroute) == (other.realroute)
+
+
+class Router(object):
+    ''' A route associates a string (e.g. URL) with an object (e.g. function)
+        Some dynamic routes may extract parts of the string and provide them as
+        a dictionary. This router matches a string against multiple routes and
+        returns the associated object along with the extracted data.
+    '''
+
+    def __init__(self):
+        self.routes  = []  # List of all installed routes
+        self.named   = {}  # Cache for named routes and their format strings
+        self.static  = {}  # Cache for static routes
+        self.dynamic = []  # Search structure for dynamic routes
+        self.compiled = False
+
+    def add(self, route, target=None, **ka):
+        """ Add a route->target pair or a :class:`Route` object to the Router.
+            Return the Route object. See :class:`Route` for details.
+        """
+        if not isinstance(route, Route):
+            route = Route(route, target, **ka)
+        if self.get_route(route):
+            return RouteError('Route %s is not uniqe.' % route)
+        self.routes.append(route)
+        self.compiled, self.named, self.static, self.dynamic = False, {}, {}, []
+        return route
+
+    def get_route(self, route, target=None, **ka):
+        ''' Get a route from the router by specifying either the same
+            parameters as in :meth:`add` or comparing to an instance of
+            :class:`Route`. Note that not all parameters are considered by the
+            compare function. '''
+        if not isinstance(route, Route):
+            route = Route(route, **ka)
+        for known in self.routes:
+            if route == known:
+                return known
+        return None
+
+    def match(self, uri):
+        ''' Match an URI and return a (target, urlargs) tuple '''
+        if uri in self.static:
+            return self.static[uri], {}
+        for combined, subroutes in self.dynamic:
+            match = combined.match(uri)
+            if not match: continue
+            target, args_re = subroutes[match.lastindex - 1]
+            args = args_re.match(uri).groupdict() if args_re else {}
+            return target, args
+        if not self.compiled: # Late check to reduce overhead on hits
+            self.compile() # Compile and try again.
+            return self.match(uri)
+        return None, {}
+
+    def build(self, _name, **args):
+        ''' Build an URI out of a named route and values for the wildcards. '''
+        try:
+            return self.named[_name] % args
+        except KeyError:
+            if not self.compiled: # Late check to reduce overhead on hits
+                self.compile() # Compile and try again.
+                return self.build(_name, **args)
+            raise RouteBuildError("No route found with name '%s'." % _name)
+
+    def compile(self):
+        ''' Build the search structures. Call this before actually using the
+            router.'''
+        self.named = {}
+        self.static = {}
+        self.dynamic = []
+        for route in self.routes:
+            if route.name:
+                self.named[route.name] = route.format_str()
+            if route.static:
+                self.static[route.route] = route.target
+                continue
+            gpatt = route.group_re()
+            fpatt = route.flat_re()
+            try:
+                gregexp = re.compile('^(%s)$' % gpatt) if '(?P' in gpatt else None
+                combined = '%s|(^%s$)' % (self.dynamic[-1][0].pattern, fpatt)
+                self.dynamic[-1] = (re.compile(combined), self.dynamic[-1][1])
+                self.dynamic[-1][1].append((route.target, gregexp))
+            except (AssertionError, IndexError), e: # AssertionError: Too many groups
+                self.dynamic.append((re.compile('(^%s$)'%fpatt),
+                                    [(route.target, gregexp)]))
+            except re.error, e:
+                raise RouteSyntaxError("Could not add Route: %s (%s)" % (route, e))
+        self.compiled = True
+
+    def __eq__(self, other):
+        return self.routes == other.routes
+
+
+
+
+
+
+###############################################################################
+# Application Object ###########################################################
+###############################################################################
+
+class Bottle(object):
+    """ WSGI application """
+
+    def __init__(self, catchall=True, autojson=True, config=None):
+        """ Create a new bottle instance.
+            You usually don't do that. Use `bottle.app.push()` instead.
+        """
+        self.routes = Router()
+        self.mounts = {}
+        self.error_handler = {}
+        self.catchall = catchall
+        self.config = config or {}
+        self.serve = True
+        self.castfilter = []
+        if autojson and json_dumps:
+            self.add_filter(dict, dict2json)
+        self.hooks = {'before_request': [], 'after_request': []}
+
+    def optimize(self, *a, **ka):
+        depr("Bottle.optimize() is obsolete.")
+
+    def mount(self, app, script_path):
+        ''' Mount a Bottle application to a specific URL prefix '''
+        if not isinstance(app, Bottle):
+            raise TypeError('Only Bottle instances are supported for now.')
+        script_path = '/'.join(filter(None, script_path.split('/')))
+        path_depth = script_path.count('/') + 1
+        if not script_path:
+            raise TypeError('Empty script_path. Perhaps you want a merge()?')
+        for other in self.mounts:
+            if other.startswith(script_path):
+                raise TypeError('Conflict with existing mount: %s' % other)
+        @self.route('/%s/:#.*#' % script_path, method="ANY")
+        def mountpoint():
+            request.path_shift(path_depth)
+            return app.handle(request.path, request.method)
+        self.mounts[script_path] = app
+
+    def add_filter(self, ftype, func):
+        ''' Register a new output filter. Whenever bottle hits a handler output
+            matching `ftype`, `func` is applied to it. '''
+        if not isinstance(ftype, type):
+            raise TypeError("Expected type object, got %s" % type(ftype))
+        self.castfilter = [(t, f) for (t, f) in self.castfilter if t != ftype]
+        self.castfilter.append((ftype, func))
+        self.castfilter.sort()
+
+    def match_url(self, path, method='GET'):
+        """ Find a callback bound to a path and a specific HTTP method.
+            Return (callback, param) tuple or raise HTTPError.
+            method: HEAD falls back to GET. All methods fall back to ANY.
+        """
+        path, method = path.strip().lstrip('/'), method.upper()
+        callbacks, args = self.routes.match(path)
+        if not callbacks:
+            raise HTTPError(404, "Not found: " + path)
+        if method in callbacks:
+            return callbacks[method], args
+        if method == 'HEAD' and 'GET' in callbacks:
+            return callbacks['GET'], args
+        if 'ANY' in callbacks:
+            return callbacks['ANY'], args
+        allow = [m for m in callbacks if m != 'ANY']
+        if 'GET' in allow and 'HEAD' not in allow:
+            allow.append('HEAD')
+        raise HTTPError(405, "Method not allowed.",
+                        header=[('Allow',",".join(allow))])
+
+    def get_url(self, routename, **kargs):
+        """ Return a string that matches a named route """
+        scriptname = request.environ.get('SCRIPT_NAME', '').strip('/') + '/'
+        location = self.routes.build(routename, **kargs).lstrip('/')
+        return urljoin(urljoin('/', scriptname), location)
+
+    def route(self, path=None, method='GET', no_hooks=False, decorate=None,
+              template=None, template_opts={}, callback=None, **kargs):
+        """ Decorator: Bind a callback function to a request path.
+
+            :param path: The request path or a list of paths to listen to. See 
+              :class:`Router` for syntax details. If no path is specified, it
+              is automatically generated from the callback signature. See
+              :func:`yieldroutes` for details.
+            :param method: The HTTP method (POST, GET, ...) or a list of
+              methods to listen to. (default: GET)
+            :param decorate: A decorator or a list of decorators. These are
+              applied to the callback in reverse order.
+            :param no_hooks: If true, application hooks are not triggered
+              by this route. (default: False)
+            :param template: The template to use for this callback.
+              (default: no template)
+            :param template_opts: A dict with additional template parameters.
+            :param static: If true, all paths are static even if they contain
+              dynamic syntax tokens. (default: False)
+            :param name: The name for this route. (default: None)
+            :param callback: If set, the route decorator is directly applied
+              to the callback and the callback is returned instead. This
+              equals ``Bottle.route(...)(callback)``.
+        """
+        # @route can be used without any parameters
+        if callable(path): path, callback = None, path
+        # Build up the list of decorators
+        decorators = makelist(decorate)
+        if template:     decorators.insert(0, view(template, **template_opts))
+        if not no_hooks: decorators.append(self._add_hook_wrapper)
+        def wrapper(func):
+            callback = func
+            for decorator in reversed(decorators):
+                callback = decorator(callback)
+            functools.update_wrapper(callback, func)
+            for route in makelist(path) or yieldroutes(func):
+                for meth in makelist(method):
+                    route = route.strip().lstrip('/')
+                    meth = meth.strip().upper()
+                    old = self.routes.get_route(route, **kargs)
+                    if old:
+                        old.target[meth] = callback
+                    else:
+                        self.routes.add(route, {meth: callback}, **kargs)
+            return func
+        return wrapper(callback) if callback else wrapper
+
+    def _add_hook_wrapper(self, func):
+        ''' Add hooks to a callable. See #84 '''
+        @functools.wraps(func)
+        def wrapper(*a, **ka):
+            for hook in self.hooks['before_request']: hook()
+            response.output = func(*a, **ka)
+            for hook in self.hooks['after_request']: hook()
+            return response.output
+        return wrapper
+
+    def get(self, path=None, method='GET', **kargs):
+        """ Decorator: Bind a function to a GET request path.
+            See :meth:'route' for details. """
+        return self.route(path, method, **kargs)
+
+    def post(self, path=None, method='POST', **kargs):
+        """ Decorator: Bind a function to a POST request path.
+            See :meth:'route' for details. """
+        return self.route(path, method, **kargs)
+
+    def put(self, path=None, method='PUT', **kargs):
+        """ Decorator: Bind a function to a PUT request path.
+            See :meth:'route' for details. """
+        return self.route(path, method, **kargs)
+
+    def delete(self, path=None, method='DELETE', **kargs):
+        """ Decorator: Bind a function to a DELETE request path.
+            See :meth:'route' for details. """
+        return self.route(path, method, **kargs)
+
+    def error(self, code=500):
+        """ Decorator: Register an output handler for a HTTP error code"""
+        def wrapper(handler):
+            self.error_handler[int(code)] = handler
+            return handler
+        return wrapper
+
+    def hook(self, name):
+        """ Return a decorator that adds a callback to the specified hook. """
+        def wrapper(func):
+            self.add_hook(name, func)
+            return func
+        return wrapper
+
+    def add_hook(self, name, func):
+        ''' Add a callback from a hook. '''
+        if name not in self.hooks:
+            raise ValueError("Unknown hook name %s" % name)
+        if name in ('after_request'):
+            self.hooks[name].insert(0, func)
+        else:
+            self.hooks[name].append(func)
+
+    def remove_hook(self, name, func):
+        ''' Remove a callback from a hook. '''
+        if name not in self.hooks:
+            raise ValueError("Unknown hook name %s" % name)
+        self.hooks[name].remove(func)
+
+    def handle(self, url, method):
+        """ Execute the handler bound to the specified url and method and return
+        its output. If catchall is true, exceptions are catched and returned as
+        HTTPError(500) objects. """
+        if not self.serve:
+            return HTTPError(503, "Server stopped")
+        try:
+            handler, args = self.match_url(url, method)
+            return handler(**args)
+        except HTTPResponse, e:
+            return e
+        except Exception, e:
+            if isinstance(e, (KeyboardInterrupt, SystemExit, MemoryError))\
+            or not self.catchall:
+                raise
+            return HTTPError(500, 'Unhandled exception', e, format_exc(10))
+
+    def _cast(self, out, request, response, peek=None):
+        """ Try to convert the parameter into something WSGI compatible and set
+        correct HTTP headers when possible.
+        Support: False, str, unicode, dict, HTTPResponse, HTTPError, file-like,
+        iterable of strings and iterable of unicodes
+        """
+        # Filtered types (recursive, because they may return anything)
+        for testtype, filterfunc in self.castfilter:
+            if isinstance(out, testtype):
+                return self._cast(filterfunc(out), request, response)
+
+        # Empty output is done here
+        if not out:
+            response.headers['Content-Length'] = 0
+            return []
+        # Join lists of byte or unicode strings. Mixed lists are NOT supported
+        if isinstance(out, (tuple, list))\
+        and isinstance(out[0], (StringType, unicode)):
+            out = out[0][0:0].join(out) # b'abc'[0:0] -> b''
+        # Encode unicode strings
+        if isinstance(out, unicode):
+            out = out.encode(response.charset)
+        # Byte Strings are just returned
+        if isinstance(out, StringType):
+            response.headers['Content-Length'] = str(len(out))
+            return [out]
+        # HTTPError or HTTPException (recursive, because they may wrap anything)
+        if isinstance(out, HTTPError):
+            out.apply(response)
+            return self._cast(self.error_handler.get(out.status, repr)(out), request, response)
+        if isinstance(out, HTTPResponse):
+            out.apply(response)
+            return self._cast(out.output, request, response)
+
+        # File-like objects.
+        if hasattr(out, 'read'):
+            if 'wsgi.file_wrapper' in request.environ:
+                return request.environ['wsgi.file_wrapper'](out)
+            elif hasattr(out, 'close') or not hasattr(out, '__iter__'):
+                return WSGIFileWrapper(out)
+
+        # Handle Iterables. We peek into them to detect their inner type.
+        try:
+            out = iter(out)
+            first = out.next()
+            while not first:
+                first = out.next()
+        except StopIteration:
+            return self._cast('', request, response)
+        except HTTPResponse, e:
+            first = e
+        except Exception, e:
+            first = HTTPError(500, 'Unhandled exception', e, format_exc(10))
+            if isinstance(e, (KeyboardInterrupt, SystemExit, MemoryError))\
+            or not self.catchall:
+                raise
+        # These are the inner types allowed in iterator or generator objects.
+        if isinstance(first, HTTPResponse):
+            return self._cast(first, request, response)
+        if isinstance(first, StringType):
+            return itertools.chain([first], out)
+        if isinstance(first, unicode):
+            return itertools.imap(lambda x: x.encode(response.charset),
+                                  itertools.chain([first], out))
+        return self._cast(HTTPError(500, 'Unsupported response type: %s'\
+                                         % type(first)), request, response)
+
+    def wsgi(self, environ, start_response):
+        """ The bottle WSGI-interface. """
+        try:
+            environ['bottle.app'] = self
+            request.bind(environ)
+            response.bind()
+            out = self.handle(request.path, request.method)
+            out = self._cast(out, request, response)
+            # rfc2616 section 4.3
+            if response.status in (100, 101, 204, 304) or request.method == 'HEAD':
+                if hasattr(out, 'close'): out.close()
+                out = []
+            status = '%d %s' % (response.status, HTTP_CODES[response.status])
+            start_response(status, response.headerlist)
+            return out
+        except (KeyboardInterrupt, SystemExit, MemoryError):
+            raise
+        except Exception, e:
+            if not self.catchall: raise
+            err = '<h1>Critical error while processing request: %s</h1>' \
+                  % environ.get('PATH_INFO', '/')
+            if DEBUG:
+                err += '<h2>Error:</h2>\n<pre>%s</pre>\n' % repr(e)
+                err += '<h2>Traceback:</h2>\n<pre>%s</pre>\n' % format_exc(10)
+            environ['wsgi.errors'].write(err) #TODO: wsgi.error should not get html
+            start_response('500 INTERNAL SERVER ERROR', [('Content-Type', 'text/html')])
+            return [tob(err)]
+        
+    def __call__(self, environ, start_response):
+        return self.wsgi(environ, start_response)
+
+
+
+
+
+
+###############################################################################
+# HTTP and WSGI Tools ##########################################################
+###############################################################################
+
+class Request(threading.local, DictMixin):
+    """ Represents a single HTTP request using thread-local attributes.
+        The Request object wraps a WSGI environment and can be used as such.
+    """
+    def __init__(self, environ=None):
+        """ Create a new Request instance.
+        
+            You usually don't do this but use the global `bottle.request`
+            instance instead.
+        """
+        self.bind(environ or {},)
+
+    def bind(self, environ):
+        """ Bind a new WSGI environment.
+            
+            This is done automatically for the global `bottle.request`
+            instance on every request.
+        """
+        self.environ = environ
+        # These attributes are used anyway, so it is ok to compute them here
+        self.path = '/' + environ.get('PATH_INFO', '/').lstrip('/')
+        self.method = environ.get('REQUEST_METHOD', 'GET').upper()
+
+    @property
+    def _environ(self):
+        depr("Request._environ renamed to Request.environ")
+        return self.environ
+
+    def copy(self):
+        ''' Returns a copy of self '''
+        return Request(self.environ.copy())
+
+    def path_shift(self, shift=1):
+        ''' Shift path fragments from PATH_INFO to SCRIPT_NAME and vice versa.
+
+          :param shift: The number of path fragments to shift. May be negative to
+            change the shift direction. (default: 1)
+        '''
+        script_name = self.environ.get('SCRIPT_NAME','/')
+        self['SCRIPT_NAME'], self.path = path_shift(script_name, self.path, shift)
+        self['PATH_INFO'] = self.path
+
+    def __getitem__(self, key): return self.environ[key]
+    def __delitem__(self, key): self[key] = ""; del(self.environ[key])
+    def __iter__(self): return iter(self.environ)
+    def __len__(self): return len(self.environ)
+    def keys(self): return self.environ.keys()
+    def __setitem__(self, key, value):
+        """ Shortcut for Request.environ.__setitem__ """
+        self.environ[key] = value
+        todelete = []
+        if key in ('PATH_INFO','REQUEST_METHOD'):
+            self.bind(self.environ)
+        elif key == 'wsgi.input': todelete = ('body','forms','files','params')
+        elif key == 'QUERY_STRING': todelete = ('get','params')
+        elif key.startswith('HTTP_'): todelete = ('headers', 'cookies')
+        for key in todelete:
+            if 'bottle.' + key in self.environ:
+                del self.environ['bottle.' + key]
+
+    @property
+    def query_string(self):
+        """ The content of the QUERY_STRING environment variable. """
+        return self.environ.get('QUERY_STRING', '')
+
+    @property
+    def fullpath(self):
+        """ Request path including SCRIPT_NAME (if present) """
+        return self.environ.get('SCRIPT_NAME', '').rstrip('/') + self.path
+
+    @property
+    def url(self):
+        """ Full URL as requested by the client (computed).
+
+            This value is constructed out of different environment variables
+            and includes scheme, host, port, scriptname, path and query string. 
+        """
+        scheme = self.environ.get('wsgi.url_scheme', 'http')
+        host   = self.environ.get('HTTP_X_FORWARDED_HOST', self.environ.get('HTTP_HOST', None))
+        if not host:
+            host = self.environ.get('SERVER_NAME')
+            port = self.environ.get('SERVER_PORT', '80')
+            if scheme + port not in ('https443', 'http80'):
+                host += ':' + port
+        parts = (scheme, host, urlquote(self.fullpath), self.query_string, '')
+        return urlunsplit(parts)
+
+    @property
+    def content_length(self):
+        """ Content-Length header as an integer, -1 if not specified """
+        return int(self.environ.get('CONTENT_LENGTH','') or -1)
+
+    @property
+    def header(self):
+        depr("The Request.header property was renamed to Request.headers")
+        return self.headers
+
+    @property
+    def headers(self):
+        ''' :class:`WSGIHeaderDict` filled with request headers. '''
+        if 'bottle.headers' not in self.environ:
+            self.environ['bottle.headers'] = WSGIHeaderDict(self.environ)
+        return self.environ['bottle.headers']
+
+    @property
+    def GET(self):
+        """ The QUERY_STRING parsed into a MultiDict.
+
+            Keys and values are strings. Multiple values per key are possible.
+            See MultiDict for details.
+        """
+        if 'bottle.get' not in self.environ:
+            data = parse_qs(self.query_string, keep_blank_values=True)
+            get = self.environ['bottle.get'] = MultiDict()
+            for key, values in data.iteritems():
+                for value in values:
+                    get[key] = value
+        return self.environ['bottle.get']
+
+    @property
+    def POST(self):
+        """ Property: The HTTP POST body parsed into a MultiDict.
+
+            This supports url-encoded and multipart POST requests. Multipart
+            is commonly used for file uploads and may result in some of the
+            values being cgi.FieldStorage objects instead of strings.
+
+            Multiple values per key are possible. See MultiDict for details.
+        """
+        if 'bottle.post' not in self.environ:
+            self.environ['bottle.post'] = MultiDict()
+            self.environ['bottle.forms'] = MultiDict()
+            self.environ['bottle.files'] = MultiDict()
+            safe_env = {'QUERY_STRING':''} # Build a safe environment for cgi
+            for key in ('REQUEST_METHOD', 'CONTENT_TYPE', 'CONTENT_LENGTH'):
+                if key in self.environ: safe_env[key] = self.environ[key]
+            if NCTextIOWrapper:
+                fb = NCTextIOWrapper(self.body, encoding='ISO-8859-1', newline='\n')
+                # TODO: Content-Length may be wrong now. Does cgi.FieldStorage
+                # use it at all? I think not, because all tests pass.
+            else:
+                fb = self.body
+            data = cgi.FieldStorage(fp=fb, environ=safe_env, keep_blank_values=True)
+            for item in data.list or []:
+                if item.filename:
+                    self.environ['bottle.post'][item.name] = item
+                    self.environ['bottle.files'][item.name] = item
+                else:
+                    self.environ['bottle.post'][item.name] = item.value
+                    self.environ['bottle.forms'][item.name] = item.value
+        return self.environ['bottle.post']
+
+    @property
+    def forms(self):
+        """ Property: HTTP POST form data parsed into a MultiDict. """
+        if 'bottle.forms' not in self.environ: self.POST
+        return self.environ['bottle.forms']
+
+    @property
+    def files(self):
+        """ Property: HTTP POST file uploads parsed into a MultiDict. """
+        if 'bottle.files' not in self.environ: self.POST
+        return self.environ['bottle.files']
+        
+    @property
+    def params(self):
+        """ A combined MultiDict with POST and GET parameters. """
+        if 'bottle.params' not in self.environ:
+            self.environ['bottle.params'] = MultiDict(self.GET)
+            self.environ['bottle.params'].update(dict(self.forms))
+        return self.environ['bottle.params']
+
+    @property
+    def body(self):
+        """ The HTTP request body as a seekable buffer object.
+        
+            This property returns a copy of the `wsgi.input` stream and should
+            be used instead of `environ['wsgi.input']`.
+         """
+        if 'bottle.body' not in self.environ:
+            maxread = max(0, self.content_length)
+            stream = self.environ['wsgi.input']
+            body = BytesIO() if maxread < MEMFILE_MAX else TemporaryFile(mode='w+b')
+            while maxread > 0:
+                part = stream.read(min(maxread, MEMFILE_MAX))
+                if not part: #TODO: Wrong content_length. Error? Do nothing?
+                    break
+                body.write(part)
+                maxread -= len(part)
+            self.environ['wsgi.input'] = body
+            self.environ['bottle.body'] = body
+        self.environ['bottle.body'].seek(0)
+        return self.environ['bottle.body']
+
+    @property
+    def auth(self): #TODO: Tests and docs. Add support for digest. namedtuple?
+        """ HTTP authorization data as a (user, passwd) tuple. (experimental)
+        
+            This implementation currently only supports basic auth and returns
+            None on errors.
+        """
+        return parse_auth(self.headers.get('Authorization',''))
+
+    @property
+    def COOKIES(self):
+        """ Cookie information parsed into a dictionary.
+        
+            Secure cookies are NOT decoded automatically. See
+            Request.get_cookie() for details.
+        """
+        if 'bottle.cookies' not in self.environ:
+            raw_dict = SimpleCookie(self.headers.get('Cookie',''))
+            self.environ['bottle.cookies'] = {}
+            for cookie in raw_dict.itervalues():
+                self.environ['bottle.cookies'][cookie.key] = cookie.value
+        return self.environ['bottle.cookies']
+
+    def get_cookie(self, key, secret=None):
+        """ Return the content of a cookie. To read a `Secure Cookies`, use the
+            same `secret` as used to create the cookie (see
+            :meth:`Response.set_cookie`). If anything goes wrong, None is
+            returned.
+        """
+        value = self.COOKIES.get(key)
+        if secret and value:
+            dec = cookie_decode(value, secret) # (key, value) tuple or None
+            return dec[1] if dec and dec[0] == key else None
+        return value or None
+
+    @property
+    def is_ajax(self):
+        ''' True if the request was generated using XMLHttpRequest '''
+        #TODO: write tests
+        return self.header.get('X-Requested-With') == 'XMLHttpRequest'
+
+
+
+class Response(threading.local):
+    """ Represents a single HTTP response using thread-local attributes.
+    """
+
+    def __init__(self):
+        self.bind()
+
+    def bind(self):
+        """ Resets the Response object to its factory defaults. """
+        self._COOKIES = None
+        self.status = 200
+        self.headers = HeaderDict()
+        self.content_type = 'text/html; charset=UTF-8'
+
+    @property
+    def header(self):
+        depr("Response.header renamed to Response.headers")
+        return self.headers
+
+    def copy(self):
+        ''' Returns a copy of self '''
+        copy = Response()
+        copy.status = self.status
+        copy.headers = self.headers.copy()
+        copy.content_type = self.content_type
+        return copy
+
+    def wsgiheader(self):
+        ''' Returns a wsgi conform list of header/value pairs. '''
+        for c in self.COOKIES.values():
+            if c.OutputString() not in self.headers.getall('Set-Cookie'):
+                self.headers.append('Set-Cookie', c.OutputString())
+        # rfc2616 section 10.2.3, 10.3.5
+        if self.status in (204, 304) and 'content-type' in self.headers:
+            del self.headers['content-type']
+        if self.status == 304:
+            for h in ('allow', 'content-encoding', 'content-language',
+                      'content-length', 'content-md5', 'content-range',
+                      'content-type', 'last-modified'): # + c-location, expires?
+                if h in self.headers:
+                     del self.headers[h]
+        return list(self.headers.iterallitems())
+    headerlist = property(wsgiheader)
+
+    @property
+    def charset(self):
+        """ Return the charset specified in the content-type header.
+        
+            This defaults to `UTF-8`.
+        """
+        if 'charset=' in self.content_type:
+            return self.content_type.split('charset=')[-1].split(';')[0].strip()
+        return 'UTF-8'
+
+    @property
+    def COOKIES(self):
+        """ A dict-like SimpleCookie instance. Use Response.set_cookie() instead. """
+        if not self._COOKIES:
+            self._COOKIES = SimpleCookie()
+        return self._COOKIES
+
+    def set_cookie(self, key, value, secret=None, **kargs):
+        ''' Add a cookie. If the `secret` parameter is set, this creates a
+            `Secure Cookie` (described below).
+
+            |param key| the name of the cookie.
+            |param value| the value of the cookie.
+            |param secret| required for secure cookies. (default: None)
+            |param max_age| maximum age in seconds. (default: None)
+            |param expires| a datetime object or UNIX timestamp. (defaut: None)
+            |param domain| the domain that is allowed to read the cookie.
+              (default: current domain)
+            |param path| limits the cookie to a given path (default: /)
+
+            If neither `expires` nor `max_age` are set (default), the cookie
+            lasts only as long as the browser is not closed.
+
+            Secure cookies may store any pickle-able object and are
+            cryptographically signed to prevent manipulation. Keep in mind that
+            cookies are limited to 4kb in most browsers.
+            
+            Warning: Secure cookies are not encrypted (the client can still see
+            the content) and not copy-protected (the client can restore an old
+            cookie). The main intention is to make pickling and unpickling
+            save, not to store secret information at client side.
+        '''
+        if secret:
+            value = touni(cookie_encode((key, value), secret))
+        elif not isinstance(value, basestring):
+            raise TypeError('Secret missing for non-string Cookie.')
+            
+        self.COOKIES[key] = value
+        for k, v in kargs.iteritems():
+            self.COOKIES[key][k.replace('_', '-')] = v
+
+    def delete_cookie(self, key, **kwargs):
+        ''' Delete a cookie. Be sure to use the same `domain` and `path`
+            parameters as used to create the cookie.
+        '''
+        kwargs['max_age'] = -1
+        kwargs['expires'] = 0
+        self.set_cookie(key, **kwargs)
+
+    def get_content_type(self):
+        """ Current 'Content-Type' header. """
+        return self.headers['Content-Type']
+
+    def set_content_type(self, value):
+        self.headers['Content-Type'] = value
+
+    content_type = property(get_content_type, set_content_type, None,
+                            get_content_type.__doc__)
+
+
+
+
+
+
+###############################################################################
+# Common Utilities #############################################################
+###############################################################################
+
+class MultiDict(DictMixin):
+    """ A dict that remembers old values for each key """
+    # collections.MutableMapping would be better for Python >= 2.6
+    def __init__(self, *a, **k):
+        self.dict = dict()
+        for k, v in dict(*a, **k).iteritems():
+            self[k] = v
+
+    def __len__(self): return len(self.dict)
+    def __iter__(self): return iter(self.dict)
+    def __contains__(self, key): return key in self.dict
+    def __delitem__(self, key): del self.dict[key]
+    def keys(self): return self.dict.keys()
+    def __getitem__(self, key): return self.get(key, KeyError, -1)
+    def __setitem__(self, key, value): self.append(key, value)
+
+    def append(self, key, value): self.dict.setdefault(key, []).append(value)
+    def replace(self, key, value): self.dict[key] = [value]
+    def getall(self, key): return self.dict.get(key) or []
+
+    def get(self, key, default=None, index=-1):
+        if key not in self.dict and default != KeyError:
+            return [default][index]
+        return self.dict[key][index]
+
+    def iterallitems(self):
+        for key, values in self.dict.iteritems():
+            for value in values:
+                yield key, value
+
+
+class HeaderDict(MultiDict):
+    """ Same as :class:`MultiDict`, but title()s the keys and overwrites by default. """
+    def __contains__(self, key): return MultiDict.__contains__(self, self.httpkey(key))
+    def __getitem__(self, key): return MultiDict.__getitem__(self, self.httpkey(key))
+    def __delitem__(self, key): return MultiDict.__delitem__(self, self.httpkey(key))
+    def __setitem__(self, key, value): self.replace(key, value)
+    def get(self, key, default=None, index=-1): return MultiDict.get(self, self.httpkey(key), default, index)
+    def append(self, key, value): return MultiDict.append(self, self.httpkey(key), str(value))
+    def replace(self, key, value): return MultiDict.replace(self, self.httpkey(key), str(value))
+    def getall(self, key): return MultiDict.getall(self, self.httpkey(key))
+    def httpkey(self, key): return str(key).replace('_','-').title()
+
+
+
+class WSGIHeaderDict(DictMixin):
+    ''' This dict-like class takes a WSGI environ dict and provides convenient
+        access to HTTP_* fields. Keys and values are stored as native strings
+        (bytes/unicode) based on the python version used (2/3) and keys are
+        case-insensitive. If the WSGI environment contains non-native strings,
+        these are de- or encoded using 'utf8' (default) or 'latin1' (fallback)
+        charset. To get the original value, use the .raw(key) method.
+
+        This is not a MultiDict because incoming headers are unique. The API
+        will remain stable even on WSGI spec changes, if possible.
+        '''
+
+    def __init__(self, environ):
+        self.cache = {}
+        self.environ = environ
+        for key, value in self.environ.iteritems():
+            key = tonat(key, 'latin1') # Headers are limited to ASCII anyway
+            if key.startswith('HTTP_'):
+                self[key[5:].replace('_','-')] = value
+
+    def __len__(self): return len(self.cache)
+    def keys(self): return self.cache.keys()
+    def __iter__(self): return iter(self.cache)
+    def __contains__(self, key): return key.title() in self.keys()
+    def __delitem__(self, key): del self.cache[key.title()]
+    def __getitem__(self, key): return self.cache[key.title()]
+    def __setitem__(self, key, value):
+        try:
+            self.cache[key.title()] = tonat(value, 'utf8')
+        except UnicodeError:
+            self.cache[key.title()] = tonat(value, 'latin1')
+
+    def raw(self, key, default=None):
+        ''' Return the raw WSGI header value for that key. '''
+        ekey = 'HTTP_%s' % key.replace('-','_').upper()
+        return self.environ.get(ekey, default)
+
+
+
+
+class AppStack(list):
+    """ A stack implementation. """
+
+    def __call__(self):
+        """ Return the current default app. """
+        return self[-1]
+
+    def push(self, value=None):
+        """ Add a new Bottle instance to the stack """
+        if not isinstance(value, Bottle):
+            value = Bottle()
+        self.append(value)
+        return value
+
+class WSGIFileWrapper(object):
+
+   def __init__(self, fp, buffer_size=1024*64):
+       self.fp, self.buffer_size = fp, buffer_size
+       for attr in ('fileno', 'close', 'read', 'readlines'):
+           if hasattr(fp, attr): setattr(self, attr, getattr(fp, attr))
+
+   def __iter__(self):
+       read, buff = self.fp.read, self.buffer_size
+       while True:
+           part = read(buff)
+           if not part: break
+           yield part
+
+
+
+
+
+
+###############################################################################
+# Application Helper ###########################################################
+###############################################################################
+
+def dict2json(d):
+    response.content_type = 'application/json'
+    return json_dumps(d)
+
+
+def abort(code=500, text='Unknown Error: Application stopped.'):
+    """ Aborts execution and causes a HTTP error. """
+    raise HTTPError(code, text)
+
+
+def redirect(url, code=303):
+    """ Aborts execution and causes a 303 redirect """
+    scriptname = request.environ.get('SCRIPT_NAME', '').rstrip('/') + '/'
+    location = urljoin(request.url, urljoin(scriptname, url))
+    raise HTTPResponse("", status=code, header=dict(Location=location))
+
+
+def send_file(*a, **k): #BC 0.6.4
+    """ Raises the output of static_file(). (deprecated) """
+    raise static_file(*a, **k)
+
+
+def static_file(filename, root, guessmime=True, mimetype=None, download=False):
+    """ Opens a file in a safe way and returns a HTTPError object with status
+        code 200, 305, 401 or 404. Sets Content-Type, Content-Length and
+        Last-Modified header. Obeys If-Modified-Since header and HEAD requests.
+    """
+    root = os.path.abspath(root) + os.sep
+    filename = os.path.abspath(os.path.join(root, filename.strip('/\\')))
+    header = dict()
+
+    if not filename.startswith(root):
+        return HTTPError(403, "Access denied.")
+    if not os.path.exists(filename) or not os.path.isfile(filename):
+        return HTTPError(404, "File does not exist.")
+    if not os.access(filename, os.R_OK):
+        return HTTPError(403, "You do not have permission to access this file.")
+
+    if not mimetype and guessmime:
+        header['Content-Type'] = mimetypes.guess_type(filename)[0]
+    else:
+        header['Content-Type'] = mimetype if mimetype else 'text/plain'
+
+    if download == True:
+        download = os.path.basename(filename)
+    if download:
+        header['Content-Disposition'] = 'attachment; filename="%s"' % download
+
+    stats = os.stat(filename)
+    lm = time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(stats.st_mtime))
+    header['Last-Modified'] = lm
+    ims = request.environ.get('HTTP_IF_MODIFIED_SINCE')
+    if ims:
+        ims = ims.split(";")[0].strip() # IE sends "<date>; length=146"
+        ims = parse_date(ims)
+        if ims is not None and ims >= int(stats.st_mtime):
+            header['Date'] = time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime())
+            return HTTPResponse(status=304, header=header)
+    header['Content-Length'] = stats.st_size
+    if request.method == 'HEAD':
+        return HTTPResponse('', header=header)
+    else:
+        return HTTPResponse(open(filename, 'rb'), header=header)
+
+
+
+
+
+
+###############################################################################
+# HTTP Utilities and MISC (TODO) ###############################################
+###############################################################################
+
+def debug(mode=True):
+    """ Change the debug level.
+    There is only one debug level supported at the moment."""
+    global DEBUG
+    DEBUG = bool(mode)
+
+
+def parse_date(ims):
+    """ Parse rfc1123, rfc850 and asctime timestamps and return UTC epoch. """
+    try:
+        ts = email.utils.parsedate_tz(ims)
+        return time.mktime(ts[:8] + (0,)) - (ts[9] or 0) - time.timezone
+    except (TypeError, ValueError, IndexError):
+        return None
+
+
+def parse_auth(header):
+    """ Parse rfc2617 HTTP authentication header string (basic) and return (user,pass) tuple or None"""
+    try:
+        method, data = header.split(None, 1)
+        if method.lower() == 'basic':
+            name, pwd = base64.b64decode(data).split(':', 1)
+            return name, pwd
+    except (KeyError, ValueError, TypeError):
+        return None
+
+
+def cookie_encode(data, key):
+    ''' Encode and sign a pickle-able object. Return a (byte) string '''
+    msg = base64.b64encode(pickle.dumps(data, -1))
+    sig = base64.b64encode(hmac.new(key, msg).digest())
+    return tob('!') + sig + tob('?') + msg
+
+
+def cookie_decode(data, key):
+    ''' Verify and decode an encoded string. Return an object or None.'''
+    data = tob(data)
+    if cookie_is_encoded(data):
+        sig, msg = data.split(tob('?'), 1)
+        if sig[1:] == base64.b64encode(hmac.new(key, msg).digest()):
+            return pickle.loads(base64.b64decode(msg))
+    return None
+
+
+def cookie_is_encoded(data):
+    ''' Return True if the argument looks like a encoded cookie.'''
+    return bool(data.startswith(tob('!')) and tob('?') in data)
+
+
+def yieldroutes(func):
+    """ Return a generator for routes that match the signature (name, args) 
+    of the func parameter. This may yield more than one route if the function
+    takes optional keyword arguments. The output is best described by example::
+    
+        a()         -> '/a'
+        b(x, y)     -> '/b/:x/:y'
+        c(x, y=5)   -> '/c/:x' and '/c/:x/:y'
+        d(x=5, y=6) -> '/d' and '/d/:x' and '/d/:x/:y'
+    """
+    path = func.__name__.replace('__','/').lstrip('/')
+    spec = inspect.getargspec(func)
+    argc = len(spec[0]) - len(spec[3] or [])
+    path += ('/:%s' * argc) % tuple(spec[0][:argc])
+    yield path
+    for arg in spec[0][argc:]:
+        path += '/:%s' % arg
+        yield path
+
+def path_shift(script_name, path_info, shift=1):
+    ''' Shift path fragments from PATH_INFO to SCRIPT_NAME and vice versa.
+
+        :return: The modified paths.
+        :param script_name: The SCRIPT_NAME path.
+        :param script_name: The PATH_INFO path.
+        :param shift: The number of path fragments to shift. May be negative to
+          change the shift direction. (default: 1)
+    '''
+    if shift == 0: return script_name, path_info
+    pathlist = path_info.strip('/').split('/')
+    scriptlist = script_name.strip('/').split('/')
+    if pathlist and pathlist[0] == '': pathlist = []
+    if scriptlist and scriptlist[0] == '': scriptlist = []
+    if shift > 0 and shift <= len(pathlist):
+        moved = pathlist[:shift]
+        scriptlist = scriptlist + moved
+        pathlist = pathlist[shift:]
+    elif shift < 0 and shift >= -len(scriptlist):
+        moved = scriptlist[shift:]
+        pathlist = moved + pathlist
+        scriptlist = scriptlist[:shift]
+    else:
+        empty = 'SCRIPT_NAME' if shift < 0 else 'PATH_INFO'
+        raise AssertionError("Cannot shift. Nothing left from %s" % empty)
+    new_script_name = '/' + '/'.join(scriptlist)
+    new_path_info = '/' + '/'.join(pathlist)
+    if path_info.endswith('/') and pathlist: new_path_info += '/'
+    return new_script_name, new_path_info
+
+
+
+# Decorators
+#TODO: Replace default_app() with app()
+
+def validate(**vkargs):
+    """
+    Validates and manipulates keyword arguments by user defined callables.
+    Handles ValueError and missing arguments by raising HTTPError(403).
+    """
+    def decorator(func):
+        def wrapper(**kargs):
+            for key, value in vkargs.iteritems():
+                if key not in kargs:
+                    abort(403, 'Missing parameter: %s' % key)
+                try:
+                    kargs[key] = value(kargs[key])
+                except ValueError:
+                    abort(403, 'Wrong parameter format for: %s' % key)
+            return func(**kargs)
+        return wrapper
+    return decorator
+
+
+def make_default_app_wrapper(name):
+    ''' Return a callable that relays calls to the current default app. '''
+    @functools.wraps(getattr(Bottle, name))
+    def wrapper(*a, **ka):
+        return getattr(app(), name)(*a, **ka)
+    return wrapper
+
+for name in 'route get post put delete error mount hook'.split():
+    globals()[name] = make_default_app_wrapper(name)
+
+url = make_default_app_wrapper('get_url')
+
+
+def default():
+    depr("The default() decorator is deprecated. Use @error(404) instead.")
+    return error(404)
+
+
+
+
+
+
+###############################################################################
+# Server Adapter ###############################################################
+###############################################################################
+
+class ServerAdapter(object):
+    quiet = False
+    def __init__(self, host='127.0.0.1', port=8080, **kargs):
+        self.options = kargs
+        self.host = host
+        self.port = int(port)
+
+    def run(self, handler): # pragma: no cover
+        pass
+        
+    def __repr__(self):
+        args = ', '.join(['%s=%s'%(k,repr(v)) for k, v in self.options.items()])
+        return "%s(%s)" % (self.__class__.__name__, args)
+
+
+class CGIServer(ServerAdapter):
+    quiet = True
+    def run(self, handler): # pragma: no cover
+        from wsgiref.handlers import CGIHandler
+        CGIHandler().run(handler) # Just ignore host and port here
+
+
+class FlupFCGIServer(ServerAdapter):
+    def run(self, handler): # pragma: no cover
+        import flup.server.fcgi
+        flup.server.fcgi.WSGIServer(handler, bindAddress=(self.host, self.port)).run()
+
+
+class WSGIRefServer(ServerAdapter):
+    def run(self, handler): # pragma: no cover
+        from wsgiref.simple_server import make_server, WSGIRequestHandler
+        if self.quiet:
+            class QuietHandler(WSGIRequestHandler):
+                def log_request(*args, **kw): pass
+            self.options['handler_class'] = QuietHandler
+        srv = make_server(self.host, self.port, handler, **self.options)
+        srv.serve_forever()
+
+
+class CherryPyServer(ServerAdapter):
+    def run(self, handler): # pragma: no cover
+        from cherrypy import wsgiserver
+        server = wsgiserver.CherryPyWSGIServer((self.host, self.port), handler)
+        server.start()
+
+
+class PasteServer(ServerAdapter):
+    def run(self, handler): # pragma: no cover
+        from paste import httpserver
+        if not self.quiet:
+            from paste.translogger import TransLogger
+            handler = TransLogger(handler)
+        httpserver.serve(handler, host=self.host, port=str(self.port),
+                         **self.options)
+                         
+class MeinheldServer(ServerAdapter):
+    def run(self, handler):
+        from meinheld import server
+        server.listen((self.host, self.port))
+        server.run(handler)
+
+class FapwsServer(ServerAdapter):
+    """
+    Extremely fast webserver using libev.
+    See http://william-os4y.livejournal.com/
+    """
+    def run(self, handler): # pragma: no cover
+        import fapws._evwsgi as evwsgi
+        from fapws import base, config
+        port = self.port
+        if float(config.SERVER_IDENT[-2:]) > 0.4:
+            # fapws3 silently changed its API in 0.5
+            port = str(port)
+        evwsgi.start(self.host, port)
+        # fapws3 never releases the GIL. Complain upstream. I tried. No luck.
+        if 'BOTTLE_CHILD' in os.environ and not self.quiet:
+            print "WARNING: Auto-reloading does not work with Fapws3."
+            print "         (Fapws3 breaks python thread support)"
+        evwsgi.set_base_module(base)
+        def app(environ, start_response):
+            environ['wsgi.multiprocess'] = False
+            return handler(environ, start_response)
+        evwsgi.wsgi_cb(('', app))
+        evwsgi.run()
+
+
+class TornadoServer(ServerAdapter):
+    """ Untested. As described here:
+        http://github.com/facebook/tornado/blob/master/tornado/wsgi.py#L187 """
+    def run(self, handler): # pragma: no cover
+        import tornado.wsgi
+        import tornado.httpserver
+        import tornado.ioloop
+        container = tornado.wsgi.WSGIContainer(handler)
+        server = tornado.httpserver.HTTPServer(container)
+        server.listen(port=self.port)
+        tornado.ioloop.IOLoop.instance().start()
+
+
+class AppEngineServer(ServerAdapter):
+    """ Untested. """
+    quiet = True
+    def run(self, handler):
+        from google.appengine.ext.webapp import util
+        util.run_wsgi_app(handler)
+
+
+class TwistedServer(ServerAdapter):
+    """ Untested. """
+    def run(self, handler):
+        from twisted.web import server, wsgi
+        from twisted.python.threadpool import ThreadPool
+        from twisted.internet import reactor
+        thread_pool = ThreadPool()
+        thread_pool.start()
+        reactor.addSystemEventTrigger('after', 'shutdown', thread_pool.stop)
+        factory = server.Site(wsgi.WSGIResource(reactor, thread_pool, handler))
+        reactor.listenTCP(self.port, factory, interface=self.host)
+        reactor.run()
+
+
+class DieselServer(ServerAdapter):
+    """ Untested. """
+    def run(self, handler):
+        from diesel.protocols.wsgi import WSGIApplication
+        app = WSGIApplication(handler, port=self.port)
+        app.run()
+
+
+class GeventServer(ServerAdapter):
+    """ Untested. """
+    def run(self, handler):
+        from gevent import wsgi
+        #from gevent.hub import getcurrent
+        #self.set_context_ident(getcurrent, weakref=True) # see contextlocal
+        wsgi.WSGIServer((self.host, self.port), handler).serve_forever()
+
+
+class GunicornServer(ServerAdapter):
+    """ Untested. """
+    def run(self, handler):
+        from gunicorn.arbiter import Arbiter
+        from gunicorn.config import Config
+        handler.cfg = Config({'bind': "%s:%d" % (self.host, self.port), 'workers': 4})
+        arbiter = Arbiter(handler)
+        arbiter.run()
+
+
+class EventletServer(ServerAdapter):
+    """ Untested """
+    def run(self, handler):
+        from eventlet import wsgi, listen
+        wsgi.server(listen((self.host, self.port)), handler)
+
+
+class RocketServer(ServerAdapter):
+    """ Untested. As requested in issue 63
+        http://github.com/defnull/bottle/issues/#issue/63 """
+    def run(self, handler):
+        from rocket import Rocket
+        server = Rocket((self.host, self.port), 'wsgi', { 'wsgi_app' : handler })
+        server.start()
+            
+        
+class AutoServer(ServerAdapter):
+    """ Untested. """
+    adapters = [PasteServer, CherryPyServer, TwistedServer, WSGIRefServer]
+    def run(self, handler):
+        for sa in self.adapters:
+            try:
+                return sa(self.host, self.port, **self.options).run(handler)
+            except ImportError:
+                pass
+
+
+server_names = {
+    'cgi': CGIServer,
+    'flup': FlupFCGIServer,
+    'wsgiref': WSGIRefServer,
+    'cherrypy': CherryPyServer,
+    'paste': PasteServer,
+    'fapws3': FapwsServer,
+    'tornado': TornadoServer,
+    'gae': AppEngineServer,
+    'twisted': TwistedServer,
+    'diesel': DieselServer,
+    'meinheld': MeinheldServer,
+    'gunicorn': GunicornServer,
+    'eventlet': EventletServer,
+    'gevent': GeventServer,
+    'rocket': RocketServer,
+    'auto': AutoServer,
+}
+
+
+
+
+
+
+###############################################################################
+# Application Control ##########################################################
+###############################################################################
+
+def load_app(target):
+    """ Load a bottle application based on a target string and return the app
+        object.
+        
+        The target should be a valid python import path
+        (e.g. mypackage.mymodule). The default application is returned.
+        If the target contains a colon (e.g. mypackage.mymodule:myapp) the
+        module variable specified after the colon is returned instead.
+    """
+    path, name = target.split(":", 1) if ':' in target else (target, None)
+    rv = None if name else app.push()
+    __import__(path)
+    module = sys.modules[path]
+    if rv and rv in app: app.remove(rv)
+    return rv if rv else getattr(module, target)
+
+
+def run(app=None, server='wsgiref', host='127.0.0.1', port=8080,
+        interval=1, reloader=False, quiet=False, **kargs):
+    """ Start a server instance. This method blocks until the server
+        terminates.
+
+        :param app: WSGI application or target string supported by
+               :func:`load_app`. (default: :func:`default_app`)
+        :param server: Server adapter to use. See :data:`server_names` dict
+               for valid names or pass a :class:`ServerAdapter` subclass.
+               (default: wsgiref)
+        :param host: Server address to bind to. Pass ``0.0.0.0`` to listens on
+               all interfaces including the external one. (default: 127.0.0.1)
+        :param host: Server port to bind to. Values below 1024 require root
+               privileges. (default: 8080)
+        :param reloader: Start auto-reloading server? (default: False)
+        :param interval: Auto-reloader interval in seconds (default: 1)
+        :param quiet: Suppress output to stdout and stderr? (default: False)
+        :param options: Options passed to the server adapter.
+     """
+    app = app or default_app()
+    if isinstance(app, basestring):
+        app = load_app(app)
+    if isinstance(server, basestring):
+        server = server_names.get(server)
+    if isinstance(server, type):
+        server = server(host=host, port=port, **kargs)
+    if not isinstance(server, ServerAdapter):
+        raise RuntimeError("Server must be a subclass of ServerAdapter")
+    server.quiet = server.quiet or quiet
+    if not server.quiet and not os.environ.get('BOTTLE_CHILD'):
+        print "Bottle server starting up (using %s)..." % repr(server)
+        print "Listening on http://%s:%d/" % (server.host, server.port)
+        print "Use Ctrl-C to quit."
+        print
+    try:
+        if reloader:
+            interval = min(interval, 1)
+            if os.environ.get('BOTTLE_CHILD'):
+                _reloader_child(server, app, interval)
+            else:
+                _reloader_observer(server, app, interval)
+        else:
+            server.run(app)
+    except KeyboardInterrupt:
+        pass
+    if not server.quiet and not os.environ.get('BOTTLE_CHILD'):
+        print "Shutting down..."
+
+
+class FileCheckerThread(threading.Thread):
+    ''' Thread that periodically checks for changed module files. '''
+
+    def __init__(self, lockfile, interval):
+        threading.Thread.__init__(self)
+        self.lockfile, self.interval = lockfile, interval
+        #1: lockfile to old; 2: lockfile missing
+        #3: module file changed; 5: external exit
+        self.status = 0
+
+    def run(self):
+        exists = os.path.exists
+        mtime = lambda path: os.stat(path).st_mtime
+        files = dict()
+        for module in sys.modules.values():
+            try:
+                path = inspect.getsourcefile(module)
+                if path and exists(path): files[path] = mtime(path)
+            except TypeError:
+                pass
+        while not self.status:
+            for path, lmtime in files.iteritems():
+                if not exists(path) or mtime(path) > lmtime:
+                    self.status = 3
+            if not exists(self.lockfile):
+                self.status = 2
+            elif mtime(self.lockfile) < time.time() - self.interval - 5:
+                self.status = 1
+            if not self.status:
+                time.sleep(self.interval)
+        if self.status != 5:
+            thread.interrupt_main()
+
+
+def _reloader_child(server, app, interval):
+    ''' Start the server and check for modified files in a background thread.
+        As soon as an update is detected, KeyboardInterrupt is thrown in
+        the main thread to exit the server loop. The process exists with status
+        code 3 to request a reload by the observer process. If the lockfile
+        is not modified in 2*interval second or missing, we assume that the
+        observer process died and exit with status code 1 or 2.
+    '''
+    lockfile = os.environ.get('BOTTLE_LOCKFILE')
+    bgcheck = FileCheckerThread(lockfile, interval)
+    try:
+        bgcheck.start()
+        server.run(app)
+    except KeyboardInterrupt:
+        pass
+    bgcheck.status, status = 5, bgcheck.status
+    bgcheck.join() # bgcheck.status == 5 --> silent exit
+    if status: sys.exit(status)
+
+
+def _reloader_observer(server, app, interval):
+    ''' Start a child process with identical commandline arguments and restart
+        it as long as it exists with status code 3. Also create a lockfile and
+        touch it (update mtime) every interval seconds.
+    '''
+    fd, lockfile = tempfile.mkstemp(prefix='bottle-reloader.', suffix='.lock')
+    os.close(fd) # We only need this file to exist. We never write to it
+    try:
+        while os.path.exists(lockfile):
+            args = [sys.executable] + sys.argv
+            environ = os.environ.copy()
+            environ['BOTTLE_CHILD'] = 'true'
+            environ['BOTTLE_LOCKFILE'] = lockfile
+            p = subprocess.Popen(args, env=environ)
+            while p.poll() is None: # Busy wait...
+                os.utime(lockfile, None) # I am alive!
+                time.sleep(interval)
+            if p.poll() != 3:
+                if os.path.exists(lockfile): os.unlink(lockfile)
+                sys.exit(p.poll())
+            elif not server.quiet:
+                print "Reloading server..."
+    except KeyboardInterrupt:
+        pass
+    if os.path.exists(lockfile): os.unlink(lockfile)
+
+
+
+
+
+
+###############################################################################
+# Template Adapters ############################################################
+###############################################################################
+
+class TemplateError(HTTPError):
+    def __init__(self, message):
+        HTTPError.__init__(self, 500, message)
+
+
+class BaseTemplate(object):
+    """ Base class and minimal API for template adapters """
+    extentions = ['tpl','html','thtml','stpl']
+    settings = {} #used in prepare()
+    defaults = {} #used in render()
+
+    def __init__(self, source=None, name=None, lookup=[], encoding='utf8', **settings):
+        """ Create a new template.
+        If the source parameter (str or buffer) is missing, the name argument
+        is used to guess a template filename. Subclasses can assume that
+        self.source and/or self.filename are set. Both are strings.
+        The lookup, encoding and settings parameters are stored as instance
+        variables.
+        The lookup parameter stores a list containing directory paths.
+        The encoding parameter should be used to decode byte strings or files.
+        The settings parameter contains a dict for engine-specific settings.
+        """
+        self.name = name
+        self.source = source.read() if hasattr(source, 'read') else source
+        self.filename = source.filename if hasattr(source, 'filename') else None
+        self.lookup = map(os.path.abspath, lookup)
+        self.encoding = encoding
+        self.settings = self.settings.copy() # Copy from class variable
+        self.settings.update(settings) # Apply 
+        if not self.source and self.name:
+            self.filename = self.search(self.name, self.lookup)
+            if not self.filename:
+                raise TemplateError('Template %s not found.' % repr(name))
+        if not self.source and not self.filename:
+            raise TemplateError('No template specified.')
+        self.prepare(**self.settings)
+
+    @classmethod
+    def search(cls, name, lookup=[]):
+        """ Search name in all directories specified in lookup.
+        First without, then with common extensions. Return first hit. """
+        if os.path.isfile(name): return name
+        for spath in lookup:
+            fname = os.path.join(spath, name)
+            if os.path.isfile(fname):
+                return fname
+            for ext in cls.extentions:
+                if os.path.isfile('%s.%s' % (fname, ext)):
+                    return '%s.%s' % (fname, ext)
+
+    @classmethod
+    def global_config(cls, key, *args):
+        ''' This reads or sets the global settings stored in class.settings. '''
+        if args:
+            cls.settings[key] = args[0]
+        else:
+            return cls.settings[key]
+
+    def prepare(self, **options):
+        """ Run preparations (parsing, caching, ...).
+        It should be possible to call this again to refresh a template or to
+        update settings.
+        """
+        raise NotImplementedError
+
+    def render(self, *args, **kwargs):
+        """ Render the template with the specified local variables and return
+        a single byte or unicode string. If it is a byte string, the encoding
+        must match self.encoding. This method must be thread-safe!
+        Local variables may be provided in dictionaries (*args)
+        or directly, as keywords (**kwargs).
+        """
+        raise NotImplementedError
+
+
+class MakoTemplate(BaseTemplate):
+    def prepare(self, **options):
+        from mako.template import Template
+        from mako.lookup import TemplateLookup
+        options.update({'input_encoding':self.encoding})
+        #TODO: This is a hack... http://github.com/defnull/bottle/issues#issue/8
+        mylookup = TemplateLookup(directories=['.']+self.lookup, **options)
+        if self.source:
+            self.tpl = Template(self.source, lookup=mylookup)
+        else: #mako cannot guess extentions. We can, but only at top level...
+            name = self.name
+            if not os.path.splitext(name)[1]:
+                name += os.path.splitext(self.filename)[1]
+            self.tpl = mylookup.get_template(name)
+
+    def render(self, *args, **kwargs):
+        for dictarg in args: kwargs.update(dictarg)
+        _defaults = self.defaults.copy()
+        _defaults.update(kwargs)
+        return self.tpl.render(**_defaults)
+
+
+class CheetahTemplate(BaseTemplate):
+    def prepare(self, **options):
+        from Cheetah.Template import Template
+        self.context = threading.local()
+        self.context.vars = {}
+        options['searchList'] = [self.context.vars]
+        if self.source:
+            self.tpl = Template(source=self.source, **options)
+        else:
+            self.tpl = Template(file=self.filename, **options)
+
+    def render(self, *args, **kwargs):
+        for dictarg in args: kwargs.update(dictarg)
+        self.context.vars.update(self.defaults)
+        self.context.vars.update(kwargs)
+        out = str(self.tpl)
+        self.context.vars.clear()
+        return [out]
+
+
+class Jinja2Template(BaseTemplate):
+    def prepare(self, filters=None, tests=None, **kwargs):
+        from jinja2 import Environment, FunctionLoader
+        if 'prefix' in kwargs: # TODO: to be removed after a while
+            raise RuntimeError('The keyword argument `prefix` has been removed. '
+                'Use the full jinja2 environment name line_statement_prefix instead.')
+        self.env = Environment(loader=FunctionLoader(self.loader), **kwargs)
+        if filters: self.env.filters.update(filters)
+        if tests: self.env.tests.update(tests)
+        if self.source:
+            self.tpl = self.env.from_string(self.source)
+        else:
+            self.tpl = self.env.get_template(self.filename)
+
+    def render(self, *args, **kwargs):
+        for dictarg in args: kwargs.update(dictarg)
+        _defaults = self.defaults.copy()
+        _defaults.update(kwargs)
+        return self.tpl.render(**_defaults).encode("utf-8")
+
+    def loader(self, name):
+        fname = self.search(name, self.lookup)
+        if fname:
+            with open(fname, "rb") as f:
+                return f.read().decode(self.encoding)
+
+
+class SimpleTemplate(BaseTemplate):
+    blocks = ('if','elif','else','try','except','finally','for','while','with','def','class')
+    dedent_blocks = ('elif', 'else', 'except', 'finally')
+
+    def prepare(self, escape_func=cgi.escape, noescape=False):
+        self.cache = {}
+        if self.source:
+            self.code = self.translate(self.source)
+            self.co = compile(self.code, '<string>', 'exec')
+        else:
+            self.code = self.translate(open(self.filename).read())
+            self.co = compile(self.code, self.filename, 'exec')
+        enc = self.encoding
+        self._str = lambda x: touni(x, enc)
+        self._escape = lambda x: escape_func(touni(x, enc))
+        if noescape:
+            self._str, self._escape = self._escape, self._str
+
+    def translate(self, template):
+        stack = [] # Current Code indentation
+        lineno = 0 # Current line of code
+        ptrbuffer = [] # Buffer for printable strings and token tuple instances
+        codebuffer = [] # Buffer for generated python code
+        multiline = dedent = oneline = False
+
+        def yield_tokens(line):
+            for i, part in enumerate(re.split(r'\{\{(.*?)\}\}', line)):
+                if i % 2:
+                    if part.startswith('!'): yield 'RAW', part[1:]
+                    else: yield 'CMD', part
+                else: yield 'TXT', part
+
+        def split_comment(codeline):
+            """ Removes comments from a line of code. """
+            line = codeline.splitlines()[0]
+            try:
+                tokens = list(tokenize.generate_tokens(iter(line).next))
+            except tokenize.TokenError:
+                return line.rsplit('#',1) if '#' in line else (line, '')
+            for token in tokens:
+                if token[0] == tokenize.COMMENT:
+                    start, end = token[2][1], token[3][1]
+                    return codeline[:start] + codeline[end:], codeline[start:end]
+            return line, ''
+
+        def flush(): # Flush the ptrbuffer
+            if not ptrbuffer: return
+            cline = ''
+            for line in ptrbuffer:
+                for token, value in line:
+                    if token == 'TXT': cline += repr(value)
+                    elif token == 'RAW': cline += '_str(%s)' % value
+                    elif token == 'CMD': cline += '_escape(%s)' % value
+                    cline +=  ', '
+                cline = cline[:-2] + '\\\n'
+            cline = cline[:-2]
+            if cline[:-1].endswith('\\\\\\\\\\n'):
+                cline = cline[:-7] + cline[-1] # 'nobr\\\\\n' --> 'nobr'
+            cline = '_printlist([' + cline + '])'
+            del ptrbuffer[:] # Do this before calling code() again
+            code(cline)
+
+        def code(stmt):
+            for line in stmt.splitlines():
+                codebuffer.append('  ' * len(stack) + line.strip())
+
+        for line in template.splitlines(True):
+            lineno += 1
+            line = line if isinstance(line, unicode)\
+                        else unicode(line, encoding=self.encoding)
+            if lineno <= 2:
+                m = re.search(r"%.*coding[:=]\s*([-\w\.]+)", line)
+                if m: self.encoding = m.group(1)
+                if m: line = line.replace('coding','coding (removed)')
+            if line.strip()[:2].count('%') == 1:
+                line = line.split('%',1)[1].lstrip() # Full line following the %
+                cline = split_comment(line)[0].strip()
+                cmd = re.split(r'[^a-zA-Z0-9_]', cline)[0]
+                flush() ##encodig (TODO: why?)
+                if cmd in self.blocks or multiline:
+                    cmd = multiline or cmd
+                    dedent = cmd in self.dedent_blocks # "else:"
+                    if dedent and not oneline and not multiline:
+                        cmd = stack.pop()
+                    code(line)
+                    oneline = not cline.endswith(':') # "if 1: pass"
+                    multiline = cmd if cline.endswith('\\') else False
+                    if not oneline and not multiline:
+                        stack.append(cmd)
+                elif cmd == 'end' and stack:
+                    code('#end(%s) %s' % (stack.pop(), line.strip()[3:]))
+                elif cmd == 'include':
+                    p = cline.split(None, 2)[1:]
+                    if len(p) == 2:
+                        code("_=_include(%s, _stdout, %s)" % (repr(p[0]), p[1]))
+                    elif p:
+                        code("_=_include(%s, _stdout)" % repr(p[0]))
+                    else: # Empty %include -> reverse of %rebase
+                        code("_printlist(_base)")
+                elif cmd == 'rebase':
+                    p = cline.split(None, 2)[1:]
+                    if len(p) == 2:
+                        code("globals()['_rebase']=(%s, dict(%s))" % (repr(p[0]), p[1]))
+                    elif p:
+                        code("globals()['_rebase']=(%s, {})" % repr(p[0]))
+                else:
+                    code(line)
+            else: # Line starting with text (not '%') or '%%' (escaped)
+                if line.strip().startswith('%%'):
+                    line = line.replace('%%', '%', 1)
+                ptrbuffer.append(yield_tokens(line))
+        flush()
+        return '\n'.join(codebuffer) + '\n'
+
+    def subtemplate(self, _name, _stdout, *args, **kwargs):
+        for dictarg in args: kwargs.update(dictarg)
+        if _name not in self.cache:
+            self.cache[_name] = self.__class__(name=_name, lookup=self.lookup)
+        return self.cache[_name].execute(_stdout, kwargs)
+
+    def execute(self, _stdout, *args, **kwargs):
+        for dictarg in args: kwargs.update(dictarg)
+        env = self.defaults.copy()
+        env.update({'_stdout': _stdout, '_printlist': _stdout.extend,
+               '_include': self.subtemplate, '_str': self._str,
+               '_escape': self._escape})
+        env.update(kwargs)
+        eval(self.co, env)
+        if '_rebase' in env:
+            subtpl, rargs = env['_rebase']
+            subtpl = self.__class__(name=subtpl, lookup=self.lookup)
+            rargs['_base'] = _stdout[:] #copy stdout
+            del _stdout[:] # clear stdout
+            return subtpl.execute(_stdout, rargs)
+        return env
+
+    def render(self, *args, **kwargs):
+        """ Render the template using keyword arguments as local variables. """
+        for dictarg in args: kwargs.update(dictarg)
+        stdout = []
+        self.execute(stdout, kwargs)
+        return ''.join(stdout)
+
+
+def template(*args, **kwargs):
+    '''
+    Get a rendered template as a string iterator.
+    You can use a name, a filename or a template string as first parameter.
+    Template rendering arguments can be passed as dictionaries
+    or directly (as keyword arguments).
+    '''
+    tpl = args[0] if args else None
+    template_adapter = kwargs.pop('template_adapter', SimpleTemplate)
+    if tpl not in TEMPLATES or DEBUG:
+        settings = kwargs.pop('template_settings', {})
+        lookup = kwargs.pop('template_lookup', TEMPLATE_PATH)
+        if isinstance(tpl, template_adapter):
+            TEMPLATES[tpl] = tpl
+            if settings: TEMPLATES[tpl].prepare(**settings)
+        elif "\n" in tpl or "{" in tpl or "%" in tpl or '$' in tpl:
+            TEMPLATES[tpl] = template_adapter(source=tpl, lookup=lookup, **settings)
+        else:
+            TEMPLATES[tpl] = template_adapter(name=tpl, lookup=lookup, **settings)
+    if not TEMPLATES[tpl]:
+        abort(500, 'Template (%s) not found' % tpl)
+    for dictarg in args[1:]: kwargs.update(dictarg)
+    return TEMPLATES[tpl].render(kwargs)
+
+mako_template = functools.partial(template, template_adapter=MakoTemplate)
+cheetah_template = functools.partial(template, template_adapter=CheetahTemplate)
+jinja2_template = functools.partial(template, template_adapter=Jinja2Template)
+
+def view(tpl_name, **defaults):
+    ''' Decorator: renders a template for a handler.
+        The handler can control its behavior like that:
+
+          - return a dict of template vars to fill out the template
+          - return something other than a dict and the view decorator will not
+            process the template, but return the handler result as is.
+            This includes returning a HTTPResponse(dict) to get,
+            for instance, JSON with autojson or other castfilters.
+    '''
+    def decorator(func):
+        @functools.wraps(func)
+        def wrapper(*args, **kwargs):
+            result = func(*args, **kwargs)
+            if isinstance(result, dict):
+                tplvars = defaults.copy()
+                tplvars.update(result)
+                return template(tpl_name, **tplvars)
+            return result
+        return wrapper
+    return decorator
+
+mako_view = functools.partial(view, template_adapter=MakoTemplate)
+cheetah_view = functools.partial(view, template_adapter=CheetahTemplate)
+jinja2_view = functools.partial(view, template_adapter=Jinja2Template)
+
+
+
+
+
+
+###############################################################################
+# Constants and Globals ########################################################
+###############################################################################
+
+TEMPLATE_PATH = ['./', './views/']
+TEMPLATES = {}
+DEBUG = False
+MEMFILE_MAX = 1024*100
+HTTP_CODES = httplib.responses
+HTTP_CODES[418] = "I'm a teapot" # RFC 2324
+
+ERROR_PAGE_TEMPLATE = SimpleTemplate("""
+%try:
+    %from bottle import DEBUG, HTTP_CODES, request
+    %status_name = HTTP_CODES.get(e.status, 'Unknown').title()
+    <!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML 2.0//EN">
+    <html>
+        <head>
+            <title>Error {{e.status}}: {{status_name}}</title>
+            <style type="text/css">
+              html {background-color: #eee; font-family: sans;}
+              body {background-color: #fff; border: 1px solid #ddd; padding: 15px; margin: 15px;}
+              pre {background-color: #eee; border: 1px solid #ddd; padding: 5px;}
+            </style>
+        </head>
+        <body>
+            <h1>Error {{e.status}}: {{status_name}}</h1>
+            <p>Sorry, the requested URL <tt>{{request.url}}</tt> caused an error:</p>
+            <pre>{{str(e.output)}}</pre>
+            %if DEBUG and e.exception:
+              <h2>Exception:</h2>
+              <pre>{{repr(e.exception)}}</pre>
+            %end
+            %if DEBUG and e.traceback:
+              <h2>Traceback:</h2>
+              <pre>{{e.traceback}}</pre>
+            %end
+        </body>
+    </html>
+%except ImportError:
+    <b>ImportError:</b> Could not generate the error page. Please add bottle to sys.path
+%end
+""")
+""" The HTML template used for error messages """
+
+request = Request()
+""" Whenever a page is requested, the :class:`Bottle` WSGI handler stores
+metadata about the current request into this instance of :class:`Request`.
+It is thread-safe and can be accessed from within handler functions. """
+
+response = Response()
+""" The :class:`Bottle` WSGI handler uses metadata assigned to this instance
+of :class:`Response` to generate the WSGI response. """
+
+local = threading.local()
+""" Thread-local namespace. Not used by Bottle, but could get handy """
+
+# Initialize app stack (create first empty Bottle app)
+# BC: 0.6.4 and needed for run()
+app = default_app = AppStack()
+app.push()


--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/yt/gui/reason/bottle_mods.py	Mon Mar 28 13:44:34 2011 -0600
@@ -0,0 +1,71 @@
+"""
+Modifications and extensions to Bottle, to make it slightly more useful for
+yt's purposes
+
+Author: Matthew Turk <matthewturk at gmail.com>
+Affiliation: NSF / Columbia
+Homepage: http://yt.enzotools.org/
+License:
+  Copyright (C) 2011 Matthew Turk.  All Rights Reserved.
+
+  This file is part of yt.
+
+  yt is free software; you can redistribute it and/or modify
+  it under the terms of the GNU General Public License as published by
+  the Free Software Foundation; either version 3 of the License, or
+  (at your option) any later version.
+
+  This program is distributed in the hope that it will be useful,
+  but WITHOUT ANY WARRANTY; without even the implied warranty of
+  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+  GNU General Public License for more details.
+
+  You should have received a copy of the GNU General Public License
+  along with this program.  If not, see <http://www.gnu.org/licenses/>.
+"""
+
+from .bottle import server_names, debug, route
+import uuid
+
+route_functions = {}
+
+def preroute(future_route, *args, **kwargs):
+    def router(func):
+        route_functions[future_route] = (args, kwargs, func)
+        return func
+    return router
+
+def uuid_serve_functions(pre_routed, open_browser=False):
+    debug(mode=True)
+    token = uuid.uuid1()
+    for r in pre_routed:
+        args, kwargs, f = pre_routed[r]
+        if r[0] == "/": r = r[1:]
+        rp = "/%s/%s" % (token, r)
+        print "Routing from %s => %s" % (rp, f.func_name)
+        route(rp, *args, **kwargs)(f)
+    print "Greetings! Your private token is %s ." % token
+    print
+    print "Please direct your browser to:"
+    print
+    print "     http://localhost:8080/%s/" % token
+    print
+    print
+    if open_browser:
+        # We do some fancy footwork so that we can open the browser while the
+        # server starts up.  I got this from some recipe whose URL escapes me.
+        # Thank you, to whoever wrote it!
+        def local_browse():
+            """Start a browser after waiting for half a second."""
+            import webbrowser, threading
+            def _local_browse():
+                webbrowser.open('http://localhost:%s/%s/' % (8080, token))
+            thread = threading.Timer(0.5, _open_browser)
+            thread.start()
+        local_browse()
+    # Right now we only really support the built-in wsgiref, but this may
+    # change if we start using Rocket.
+    server_type = server_names.get("wsgiref")
+    server = server_type(host='localhost', port=8080)
+    #repl.locals['server'] = server
+    run(server=server)


--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/yt/gui/reason/http_repl.py	Mon Mar 28 13:44:34 2011 -0600
@@ -0,0 +1,81 @@
+"""
+A read-eval-print-loop that is served up through Bottle and accepts its
+commands through HTTP
+
+Author: Matthew Turk <matthewturk at gmail.com>
+Affiliation: NSF / Columbia
+Homepage: http://yt.enzotools.org/
+License:
+  Copyright (C) 2011 Matthew Turk.  All Rights Reserved.
+
+  This file is part of yt.
+
+  yt is free software; you can redistribute it and/or modify
+  it under the terms of the GNU General Public License as published by
+  the Free Software Foundation; either version 3 of the License, or
+  (at your option) any later version.
+
+  This program is distributed in the hope that it will be useful,
+  but WITHOUT ANY WARRANTY; without even the implied warranty of
+  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+  GNU General Public License for more details.
+
+  You should have received a copy of the GNU General Public License
+  along with this program.  If not, see <http://www.gnu.org/licenses/>.
+"""
+
+import json
+import os
+
+from .bottle_mods import preroute
+from .basic_repl import ProgrammaticREPL
+
+class HTTPREPL(ProgrammaticREPL):
+
+    def __init__(self, locals=None):
+        # First we do the standard initialization
+        ProgrammaticREPL.__init__(self, locals)
+        # Now, since we want to only preroute functions we know about, and
+        # since they have different arguments, and most of all because we only
+        # want to add them to the routing tables (which are a singleton for the
+        # entire interpreter state) we apply all the pre-routing now, rather
+        # than through metaclasses or other fancy decorating.
+        preroute_table = dict(index = ("/", "GET"),
+                              push = ("/push", "POST"),
+                              dir = ("/dir", "GET"),
+                              doc = ("/doc", "GET"))
+        for v, args in preroute_table:
+            preroute(args[0], method=args[1])(getattr(self, v))
+
+    def index(self):
+        """Return an HTTP-based Read-Eval-Print-Loop terminal."""
+        # For now this doesn't work!  We will need to move to a better method
+        # for this.
+        return open(os.path.join(localDir, "httprepl.html")).read()
+        
+    def push(self):
+        """Push 'line' and return exec results as a bare response."""
+        line = request.POST['line']
+        result = ProgrammaticREPL.push(self, line)
+        new_values = self.locals.pop("new_values", "")
+        if result is None:
+            # More input lines needed.
+            response.status = 204
+        return json.dumps( dict(text = result, new_values = new_values ))
+
+    def dir(self):
+        """Push 'line' and return result of eval on the final expr."""
+        line = request.GET['line']
+        result = ProgrammaticREPL.dir(self, line)
+        if not result:
+            response.status = 204
+            return
+        return repr(result)
+
+    def doc(self):
+        """Push 'line' and return result of getargspec on the final expr."""
+        line = request.GET['line']
+        result = ProgrammaticREPL.doc(self, line)
+        if not result:
+            response.status = 204
+        return result


--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/yt/gui/reason/setup.py	Mon Mar 28 13:44:34 2011 -0600
@@ -0,0 +1,10 @@
+#!/usr/bin/env python
+import setuptools
+import os, sys, os.path
+
+def configuration(parent_package='',top_path=None):
+    from numpy.distutils.misc_util import Configuration
+    config = Configuration('reason',parent_package,top_path)
+    config.make_config_py() # installs __config__.py
+    config.make_svn_version_py()
+    return config


--- a/yt/gui/reason_v2.py	Mon Mar 28 13:44:14 2011 -0600
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,479 +0,0 @@
-"""
-New version of Reason, using a TraitsUI-based approach
-
-Author: Matthew Turk <matthewturk at gmail.com>
-Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
-License:
-  Copyright (C) 2009 Matthew Turk.  All Rights Reserved.
-
-  This file is part of yt.
-
-  yt is free software; you can redistribute it and/or modify
-  it under the terms of the GNU General Public License as published by
-  the Free Software Foundation; either version 3 of the License, or
-  (at your option) any later version.
-
-  This program is distributed in the hope that it will be useful,
-  but WITHOUT ANY WARRANTY; without even the implied warranty of
-  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-  GNU General Public License for more details.
-
-  You should have received a copy of the GNU General Public License
-  along with this program.  If not, see <http://www.gnu.org/licenses/>.
-"""
-
-from yt.mods import *
-from yt.utilities.definitions import \
-    x_dict, \
-    y_dict
-#pf = EnzoStaticOutput("/Users/matthewturk/Research/data/galaxy1200.dir/galaxy1200")
-
-from enthought.traits.api import \
-    HasTraits, List, Instance, Str, Float, Any, Code, PythonValue, Int, CArray, \
-    Property, Enum, cached_property, DelegatesTo, Callable, Array, \
-    Button
-from enthought.traits.ui.api import \
-    Group, VGroup, HGroup, Tabbed, View, Item, ShellEditor, InstanceEditor, ListStrEditor, \
-    ListEditor, VSplit, VFlow, HSplit, VFold, ValueEditor, TreeEditor, TreeNode, RangeEditor, \
-    EnumEditor, Handler, Controller, DNDEditor
-from enthought.traits.ui.menu import \
-    Menu, Action, Separator, OKCancelButtons, OKButton
-from enthought.pyface.action.api import \
-    ActionController
-from enthought.tvtk.pyface.scene_editor import SceneEditor
-from enthought.tvtk.pyface.api import \
-    DecoratedScene
-from enthought.tvtk.pyface.scene_model import SceneModel
-
-from plot_editors import Figure, MPLFigureEditor, MPLVMPlotEditor, Axes
-
-from yt.visualization.plot_types import VMPlot, ProjectionPlot, SlicePlot
-
-import traceback
-from tvtk_interface import \
-    HierarchyImporter, YTScene
-
-class PlotCreationHandler(Controller):
-    main_window = Instance(HasTraits)
-    pnode = Instance(HasTraits)
-
-    format = Str
-    plot_type = Any
-    
-    def close(self, info, is_ok):
-        if not is_ok:
-            super(Controller, self).close(info, True)
-            return
-        spt = self.plot_type(plot_spec=self.model, pf=self.pnode.pf,
-                           name=self.format % (self.model.axis))
-        self.pnode.data_objects.append(spt)
-        self.main_window.plot_frame_tabs.append(spt)
-        spt.plot
-
-class VTKSceneCreationHandler(PlotCreationHandler):
-    importer = Instance(HierarchyImporter)
-
-    def close(self, info, is_ok):
-        if is_ok: 
-            yt_scene = YTScene(importer=self.importer,
-                scene=SceneModel())
-            spt = VTKDataObject(name = "VTK: %s" % self.pnode.pf,
-                    scene=yt_scene.scene,
-                    yt_scene=yt_scene)
-            self.pnode.data_objects.append(spt)
-            self.main_window.plot_frame_tabs.append(spt)
-        super(Controller, self).close(info, True)
-        return True
-
-
-class DataObject(HasTraits):
-    name = Str
-
-class VTKDataObject(DataObject):
-    yt_scene = Instance(YTScene)
-    scene = DelegatesTo("yt_scene")
-    add_contours = Button
-    add_isocontour = Button
-    add_x_plane = Button
-    add_y_plane = Button
-    add_z_plane = Button
-    edit_camera = Button
-    edit_operators = Button
-    edit_pipeline = Button
-    center_on_max = Button
-    operators = DelegatesTo("yt_scene")
-    traits_view = View(
-            Item("scene", editor = 
-        SceneEditor(scene_class=DecoratedScene),
-                    resizable=True, show_label=False),
-            HGroup(Item("add_contours", show_label=False),
-                   Item("add_isocontour", show_label=False),
-                   Item("add_x_plane", show_label=False),
-                   Item("add_y_plane", show_label=False),
-                   Item("add_z_plane", show_label=False),
-                   Item("edit_camera", show_label=False),
-                   Item("edit_operators", show_label=False),
-                   Item("edit_pipeline", show_label=False),
-                   Item("center_on_max", show_label=False),
-                ),
-            )
-
-    operators_edit = View(
-        Item("operators", style='custom', show_label=False,
-             editor=ListEditor(editor=InstanceEditor(),
-                               use_notebook=True),
-              name="Edit Operators"),
-        height=500.0, width=500.0, resizable=True)
-    
-    def _edit_camera_fired(self):
-        self.yt_scene.camera_path.edit_traits()
-
-    def _edit_operators_fired(self):
-        self.edit_traits(view='operators_edit')
-
-    def _edit_pipeline_fired(self):
-        from enthought.tvtk.pipeline.browser import PipelineBrowser
-        pb = PipelineBrowser(self.scene)
-        pb.show()
-
-    def _add_contours_fired(self):
-        self.yt_scene.add_contour()
-
-    def _add_isocontour_fired(self):
-        self.yt_scene.add_isocontour()
-
-    def _add_x_plane_fired(self):
-        self.yt_scene.add_x_plane()
-
-    def _add_y_plane_fired(self):
-        self.yt_scene.add_y_plane()
-
-    def _add_z_plane_fired(self):
-        self.yt_scene.add_z_plane()
-
-    def _center_on_max_fired(self):
-        self.yt_scene.do_center_on_max()
-
-class ParameterFile(HasTraits):
-    pf = Instance(EnzoStaticOutput)
-    data_objects = List(Instance(DataObject))
-    name = Str
-
-    def _name_default(self):
-        return str(self.pf)
-
-    def do_slice(self):
-        cons_view = View(
-                Item('axis'), 
-                Item('center'), 
-                Item('field', editor=EnumEditor(name='field_list')),
-                buttons=OKCancelButtons, title="Slicer: %s" % self.pf)
-        ps = SlicePlotSpec(pf=self.pf)
-        hand = PlotCreationHandler(main_window=mw, pnode=self, model=ps,
-                                   plot_type=SlicePlotTab, format="Slice: %s")
-        ps.edit_traits(cons_view, handler=hand)
-
-    def do_proj(self):
-        cons_view = View(
-                Item('axis'), 
-                Item('field', editor=EnumEditor(name='field_list')),
-                Item('weight_field', editor=EnumEditor(name='none_field_list')),
-                buttons=OKCancelButtons, title="Projector: %s" % self.pf)
-        ps = ProjPlotSpec(pf=self.pf)
-        hand = PlotCreationHandler(main_window=mw, pnode=self, model=ps,
-                                   plot_type=ProjPlotTab, format="Proj: %s")
-        ps.edit_traits(cons_view, handler=hand)
-
-    def do_vtk(self):
-        from tvtk_interface import HierarchyImporter, \
-            HierarchyImportHandler
-        importer = HierarchyImporter(pf=self.pf, max_level=self.pf.h.max_level)
-        importer.edit_traits(handler = VTKSceneCreationHandler(
-            main_window=mw, pnode=self, importer = importer))
-
-class ParameterFileCollection(HasTraits):
-    parameter_files = List(Instance(ParameterFile))
-    name = Str
-    collection = Any
-
-    def _parameter_files_default(self):
-        my_list = []
-        for f in self.collection:
-            try:
-                pf = EnzoStaticOutput(f)
-                my_list.append(
-                    ParameterFile(pf=pf, 
-                            data_objects = []))
-            except IOError: pass
-        return my_list
-
-    def _name_default(self):
-        return str(self.collection)
-
-class ParameterFileCollectionList(HasTraits):
-    parameter_file_collections = List(Instance(ParameterFileCollection))
-
-    def _parameter_file_collections_default(self):
-        return [ParameterFileCollection(collection=c)
-                for c in fido.GrabCollections()]
-
-class DataObjectList(HasTraits):
-    data_objects = List(Str)
-
-    traits_view = View(
-              Item('data_objects', show_label=False,
-                   editor=ListStrEditor())
-               )
-
-    def _data_objects_default(self):
-        return ['a','b','c']
-
-class PlotFrameTab(DataObject):
-    figure = Instance(Figure)
-
-class VMPlotSpec(HasTraits):
-    pf = Instance(EnzoStaticOutput)
-    field = Str('Density')
-    field_list = Property(depends_on = 'pf')
-
-    center = Array(shape=(3,), dtype='float64')
-    axis = Enum(0,1,2)
-
-    @cached_property
-    def _get_field_list(self):
-        fl = self.pf.h.field_list
-        df = self.pf.h.derived_field_list
-        fl.sort(); df.sort()
-        return fl + df
-
-    def _center_default(self):
-        return self.pf.h.find_max("Density")[1]
-
-class SlicePlotSpec(VMPlotSpec):
-    pass
-
-class ProjPlotSpec(VMPlotSpec):
-    weight_field = Str("None")
-    none_field_list = Property(depends_on = 'field_list')
-
-    @cached_property
-    def _get_none_field_list(self):
-        return ["None"] + self.field_list
-
-class VMPlotTab(PlotFrameTab):
-    pf = Instance(EnzoStaticOutput)
-    figure = Instance(Figure, args=())
-    field = DelegatesTo('plot_spec')
-    field_list = DelegatesTo('plot_spec')
-    plot = Instance(VMPlot)
-    axes = Instance(Axes)
-    disp_width = Float(1.0)
-    unit = Str('unitary')
-    min_width = Property(Float, depends_on=['pf','unit'])
-    max_width = Property(Float, depends_on=['pf','unit'])
-    unit_list = Property(depends_on = 'pf')
-    smallest_dx = Property(depends_on = 'pf')
-
-    traits_view = View(VGroup(
-            HGroup(Item('figure', editor=MPLVMPlotEditor(),
-                     show_label=False)),
-            HGroup(Item('disp_width',
-                     editor=RangeEditor(format="%0.2e",
-                        low_name='min_width', high_name='max_width',
-                        mode='logslider', enter_set=True),
-                     show_label=False, width=400.0),
-                   Item('unit',
-                      editor=EnumEditor(name='unit_list')),),
-            HGroup(Item('field',
-                      editor=EnumEditor(name='field_list')),
-                )),
-             resizable=True)
-
-    def __init__(self, **traits):
-        super(VMPlotTab, self).__init__(**traits)
-        self.axes = self.figure.add_subplot(111, aspect='equal')
-
-    def _field_changed(self, old, new):
-        self.plot.switch_z(new)
-        self._redraw()
-
-    @cached_property
-    def _get_min_width(self):
-        return 50.0*self.smallest_dx*self.pf[self.unit]
-
-    @cached_property
-    def _get_max_width(self):
-        return self.pf['unitary']*self.pf[self.unit]
-
-    @cached_property
-    def _get_smallest_dx(self):
-        return self.pf.h.get_smallest_dx()
-
-    @cached_property
-    def _get_unit_list(self):
-        return self.pf.units.keys()
-
-    def _unit_changed(self, old, new):
-        self.disp_width = self.disp_width * self.pf[new]/self.pf[old]
-
-    def _disp_width_changed(self, old, new):
-        self.plot.set_width(new, self.unit)
-        self._redraw()
-
-    def _redraw(self):
-        self.figure.canvas.draw()
-
-    def recenter(self, event):
-        xp, yp = event.xdata, event.ydata
-        dx = abs(self.plot.xlim[0] - self.plot.xlim[1])/self.plot.pix[0]
-        dy = abs(self.plot.ylim[0] - self.plot.ylim[1])/self.plot.pix[1]
-        x = (dx * xp) + self.plot.xlim[0]
-        y = (dy * yp) + self.plot.ylim[0]
-        xi = x_dict[self.axis]
-        yi = y_dict[self.axis]
-        cc = self.center[:]
-        cc[xi] = x; cc[yi] = y
-        self.plot.data.center = cc[:]
-        self.plot.data.set_field_parameter('center', cc.copy())
-        self.center = cc
-
-class SlicePlotTab(VMPlotTab):
-    plot_spec = Instance(SlicePlotSpec)
-
-    axis = DelegatesTo('plot_spec')
-    center = DelegatesTo('plot_spec')
-    
-    plot = Instance(SlicePlot)
-
-    def _plot_default(self):
-        coord = self.center[self.axis]
-        sl = self.pf.h.slice(self.axis, coord, center=self.center[:])
-        sp = SlicePlot(sl, self.field, self.figure, self.axes)
-        self.figure.canvas.draw()
-        return sp
-
-    def _center_changed(self, old, new):
-        #traceback.print_stack()
-        if na.all(na.abs(old - new) == 0.0): return
-        print na.abs(old-new)
-        print "Re-slicing", old, new
-        pp = self.center
-        self.plot.data.reslice(pp[self.axis])
-        self.plot._refresh_display_width()
-        self.figure.canvas.draw()
-
-class ProjPlotTab(VMPlotTab):
-    plot_spec = Instance(ProjPlotSpec)
-
-    axis = DelegatesTo('plot_spec')
-    center = DelegatesTo('plot_spec')
-    weight_field = DelegatesTo('plot_spec')
-
-    plot = Instance(ProjectionPlot)
-
-    def _plot_default(self):
-        self.field = self.field[:]
-        self.weight_field = self.weight_field[:]
-        wf = self.weight_field
-        if str(wf) == "None": wf = None
-        proj = self.pf.h.proj(self.axis, self.field, wf,
-                        center=self.center[:])
-        pp = ProjectionPlot(proj, self.field, self.figure, self.axes)
-        self.figure.canvas.draw()
-        return pp
-
-    def _center_changed(self, old, new):
-        self.plot._refresh_display_width()
-
-class SphereWrapper(DataObject):
-    radius = Float
-    unit = Str
-
-class MainWindow(HasTraits):
-    parameter_file_collections = Instance(ParameterFileCollectionList)
-    parameter_files = Instance(ParameterFileCollection)
-    plot_frame_tabs = List(Instance(DataObject))
-    open_parameterfile = Button
-    shell = PythonValue
-
-    def _shell_default(self):
-        return globals()
-    notebook_editor = ListEditor(editor=InstanceEditor(editable=True),
-                                 use_notebook=True)
-
-    traits_view = View(VSplit(
-                    HSplit(VGroup(
-                       Item('parameter_file_collections', 
-                            width=120.0, height=500.0,
-                            show_label=False,
-                            editor = TreeEditor(editable=False,
-                    nodes=[
-                        TreeNode(node_for=[ParameterFileCollectionList],
-                                 children='parameter_file_collections',
-                                 label="=Data Collections"),
-                        TreeNode(node_for=[ParameterFileCollection],
-                                 children='parameter_files',
-                                 label="name",
-                                 view=View()),
-                        TreeNode(node_for=[ParameterFile],
-                                 children='data_objects',
-                                 label="name",
-                                 menu = Menu(Action(name='Slice',
-                                                    action='object.do_slice'),
-                                             Action(name='Project',
-                                                    action='object.do_proj'),
-                                             Action(name='VTK',
-                                                    action='object.do_vtk')),
-                                 view=View()),
-                        TreeNode(node_for=[DataObject],
-                                 children='',
-                                 label="name"),
-                                ], show_icons=False),),
-                        Item('open_parameterfile', show_label=False)),
-                       Item('plot_frame_tabs', style='custom',
-                            editor = notebook_editor,
-                            show_label=False, height=500.0, width=500.0),
-                    ),
-                    HGroup(
-                       #Item('shell', editor=ShellEditor(share=True),
-                            #show_label=False, height=120.0),
-                    ),
-                ),
-               resizable=True, width=800.0, height=660.0,
-               title="reason v2 [prototype]")
-
-    def _open_parameterfile_fired(self):
-        print "OPENING"
-
-    def _parameter_file_collections_default(self):
-        return ParameterFileCollectionList()
-
-class YTScript(HasTraits):
-    code = Code
-    traits_view = View(Item('code', show_label=False),
-                       height=0.8, width=0.8, resizable=True,
-                       buttons=OKCancelButtons)
-
-class ObjectViewer(HasTraits):
-    to_view=Any
-    traits_view = View(
-            Item('to_view', editor=ValueEditor(), show_label=False),
-                     resizable=True, height=0.8, width=0.8)
-
-def view_object(obj):
-    ObjectViewer(to_view=obj).edit_traits()
-
-def run_script():
-    my_script = YTScript()
-    my_script.edit_traits()
-    return my_script
-
-class event_mock(object):
-    inaxes = True
-    button = 3
-
-dol = DataObjectList()
-mw = MainWindow(plot_frame_tabs = [])
-mw.edit_traits()
-#mw.edit_traits()


--- a/yt/gui/setup.py	Mon Mar 28 13:44:14 2011 -0600
+++ b/yt/gui/setup.py	Mon Mar 28 13:44:34 2011 -0600
@@ -5,6 +5,9 @@
 def configuration(parent_package='',top_path=None):
     from numpy.distutils.misc_util import Configuration
     config = Configuration('gui',parent_package,top_path)
+    config.add_subpackage('opengl_widgets')
+    config.add_subpackage('traited_explorer')
+    config.add_subpackage('reason')
     config.make_config_py() # installs __config__.py
     config.make_svn_version_py()
     return config


--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/yt/gui/traited_explorer/plot_editors.py	Mon Mar 28 13:44:34 2011 -0600
@@ -0,0 +1,128 @@
+"""
+Figure editors for the Traits GUI
+
+Author: Matthew Turk <matthewturk at gmail.com>
+Affiliation: KIPAC/SLAC/Stanford
+Homepage: http://yt.enzotools.org/
+License:
+  Copyright (C) 2009 Matthew Turk.  All Rights Reserved.
+
+  This file is part of yt.
+
+  yt is free software; you can redistribute it and/or modify
+  it under the terms of the GNU General Public License as published by
+  the Free Software Foundation; either version 3 of the License, or
+  (at your option) any later version.
+
+  This program is distributed in the hope that it will be useful,
+  but WITHOUT ANY WARRANTY; without even the implied warranty of
+  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+  GNU General Public License for more details.
+
+  You should have received a copy of the GNU General Public License
+  along with this program.  If not, see <http://www.gnu.org/licenses/>.
+"""
+
+import sys, matplotlib
+# We want matplotlib to use a wxPython backend
+matplotlib.use('QT4Agg')
+from matplotlib.backends.backend_qt4agg import FigureCanvasQTAgg as FigureCanvas
+from matplotlib.figure import Figure
+from matplotlib.axes import Axes
+
+from enthought.traits.api import Any, Instance
+from enthought.traits.ui.qt4.editor import Editor
+from enthought.traits.ui.qt4.basic_editor_factory import BasicEditorFactory
+
+from enthought.pyface.action.api import ActionController
+
+from enthought.traits.ui.menu import \
+    Menu, Action, Separator, OKCancelButtons, OKButton
+
+from matplotlib.backend_bases import Event as MPLEvent
+
+class _MPLFigureEditor(Editor):
+    """ Snagged from Gael's tutorial """
+
+    scrollable  = True
+    mpl_control = Instance(FigureCanvas)
+
+    def init(self, parent):
+        self.control = self._create_canvas(parent)
+        self.set_tooltip()
+
+    def update_editor(self):
+        pass
+
+    def _create_canvas(self, parent):
+        """ Create the MPL canvas. """
+        # The panel lets us add additional controls.
+        panel = wx.Panel(parent, -1)
+        sizer = wx.BoxSizer(wx.VERTICAL)
+        panel.SetSizer(sizer)
+        # matplotlib commands to create a canvas
+        self.mpl_control = FigureCanvas(panel, -1, self.value)
+        sizer.Add(self.mpl_control, 1, wx.LEFT | wx.TOP | wx.GROW | wx.SHAPED)
+        self.value.canvas.SetMinSize((10,8))
+        return panel
+
+class MPLFigureEditor(BasicEditorFactory):
+    klass = _MPLFigureEditor
+
+class MPLAction(Action):
+    event = Instance(MPLEvent)
+
+class _MPLVMPlotEditor(_MPLFigureEditor, ActionController):
+
+    def _create_canvas(self, parent):
+        panel = _MPLFigureEditor._create_canvas(self, parent)
+        self.mpl_control.mpl_connect("button_press_event", self.on_click)
+        return panel
+
+    def on_click(self, event):
+        if not event.inaxes: return
+        if event.button == 3:
+            my_menu = Menu(MPLAction(name="Recenter", action="object.recenter",
+                                     event=event),
+                           MPLAction(name="Yo!", action="object.do_something",
+                                     event=event))
+            wxmenu = my_menu.create_menu(self.mpl_control, self)
+            self.mpl_control.PopupMenuXY(wxmenu)
+
+    def perform ( self, action ):
+        """
+        This is largely taken/modified from the TreeEditor _perform method.
+        """
+        object            = self.object
+        method_name       = action.action
+        info              = self.ui.info
+        handler           = self.ui.handler
+        event             = action.event
+
+        if method_name.find( '.' ) >= 0:
+            if method_name.find( '(' ) < 0:
+                method_name += '(event)'
+            try:
+                eval( method_name, globals(),
+                      { 'object':  object,
+                        'editor':  self,
+                        'info':    info,
+                        'event':   event,
+                        'handler': handler } )
+            except:
+                # fixme: Should the exception be logged somewhere?
+                print sys.exc_info()
+                
+            return
+
+        method = getattr( handler, method_name, None )
+        if method is not None:
+            method( info, object )
+            return
+
+        if action.on_perform is not None:
+            action.on_perform( object )
+
+class MPLVMPlotEditor(BasicEditorFactory):
+    klass = _MPLVMPlotEditor
+


--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/yt/gui/traited_explorer/setup.py	Mon Mar 28 13:44:34 2011 -0600
@@ -0,0 +1,10 @@
+#!/usr/bin/env python
+import setuptools
+import os, sys, os.path
+
+def configuration(parent_package='',top_path=None):
+    from numpy.distutils.misc_util import Configuration
+    config = Configuration('traited_explorer',parent_package,top_path)
+    config.make_config_py() # installs __config__.py
+    config.make_svn_version_py()
+    return config


--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/yt/gui/traited_explorer/traited_explorer.py	Mon Mar 28 13:44:34 2011 -0600
@@ -0,0 +1,479 @@
+"""
+New version of Reason, using a TraitsUI-based approach
+
+Author: Matthew Turk <matthewturk at gmail.com>
+Affiliation: KIPAC/SLAC/Stanford
+Homepage: http://yt.enzotools.org/
+License:
+  Copyright (C) 2009 Matthew Turk.  All Rights Reserved.
+
+  This file is part of yt.
+
+  yt is free software; you can redistribute it and/or modify
+  it under the terms of the GNU General Public License as published by
+  the Free Software Foundation; either version 3 of the License, or
+  (at your option) any later version.
+
+  This program is distributed in the hope that it will be useful,
+  but WITHOUT ANY WARRANTY; without even the implied warranty of
+  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+  GNU General Public License for more details.
+
+  You should have received a copy of the GNU General Public License
+  along with this program.  If not, see <http://www.gnu.org/licenses/>.
+"""
+
+from yt.mods import *
+from yt.utilities.definitions import \
+    x_dict, \
+    y_dict
+#pf = EnzoStaticOutput("/Users/matthewturk/Research/data/galaxy1200.dir/galaxy1200")
+
+from enthought.traits.api import \
+    HasTraits, List, Instance, Str, Float, Any, Code, PythonValue, Int, CArray, \
+    Property, Enum, cached_property, DelegatesTo, Callable, Array, \
+    Button
+from enthought.traits.ui.api import \
+    Group, VGroup, HGroup, Tabbed, View, Item, ShellEditor, InstanceEditor, ListStrEditor, \
+    ListEditor, VSplit, VFlow, HSplit, VFold, ValueEditor, TreeEditor, TreeNode, RangeEditor, \
+    EnumEditor, Handler, Controller, DNDEditor
+from enthought.traits.ui.menu import \
+    Menu, Action, Separator, OKCancelButtons, OKButton
+from enthought.pyface.action.api import \
+    ActionController
+from enthought.tvtk.pyface.scene_editor import SceneEditor
+from enthought.tvtk.pyface.api import \
+    DecoratedScene
+from enthought.tvtk.pyface.scene_model import SceneModel
+
+from plot_editors import Figure, MPLFigureEditor, MPLVMPlotEditor, Axes
+
+from yt.visualization.plot_types import VMPlot, ProjectionPlot, SlicePlot
+
+import traceback
+from tvtk_interface import \
+    HierarchyImporter, YTScene
+
+class PlotCreationHandler(Controller):
+    main_window = Instance(HasTraits)
+    pnode = Instance(HasTraits)
+
+    format = Str
+    plot_type = Any
+    
+    def close(self, info, is_ok):
+        if not is_ok:
+            super(Controller, self).close(info, True)
+            return
+        spt = self.plot_type(plot_spec=self.model, pf=self.pnode.pf,
+                           name=self.format % (self.model.axis))
+        self.pnode.data_objects.append(spt)
+        self.main_window.plot_frame_tabs.append(spt)
+        spt.plot
+
+class VTKSceneCreationHandler(PlotCreationHandler):
+    importer = Instance(HierarchyImporter)
+
+    def close(self, info, is_ok):
+        if is_ok: 
+            yt_scene = YTScene(importer=self.importer,
+                scene=SceneModel())
+            spt = VTKDataObject(name = "VTK: %s" % self.pnode.pf,
+                    scene=yt_scene.scene,
+                    yt_scene=yt_scene)
+            self.pnode.data_objects.append(spt)
+            self.main_window.plot_frame_tabs.append(spt)
+        super(Controller, self).close(info, True)
+        return True
+
+
+class DataObject(HasTraits):
+    name = Str
+
+class VTKDataObject(DataObject):
+    yt_scene = Instance(YTScene)
+    scene = DelegatesTo("yt_scene")
+    add_contours = Button
+    add_isocontour = Button
+    add_x_plane = Button
+    add_y_plane = Button
+    add_z_plane = Button
+    edit_camera = Button
+    edit_operators = Button
+    edit_pipeline = Button
+    center_on_max = Button
+    operators = DelegatesTo("yt_scene")
+    traits_view = View(
+            Item("scene", editor = 
+        SceneEditor(scene_class=DecoratedScene),
+                    resizable=True, show_label=False),
+            HGroup(Item("add_contours", show_label=False),
+                   Item("add_isocontour", show_label=False),
+                   Item("add_x_plane", show_label=False),
+                   Item("add_y_plane", show_label=False),
+                   Item("add_z_plane", show_label=False),
+                   Item("edit_camera", show_label=False),
+                   Item("edit_operators", show_label=False),
+                   Item("edit_pipeline", show_label=False),
+                   Item("center_on_max", show_label=False),
+                ),
+            )
+
+    operators_edit = View(
+        Item("operators", style='custom', show_label=False,
+             editor=ListEditor(editor=InstanceEditor(),
+                               use_notebook=True),
+              name="Edit Operators"),
+        height=500.0, width=500.0, resizable=True)
+    
+    def _edit_camera_fired(self):
+        self.yt_scene.camera_path.edit_traits()
+
+    def _edit_operators_fired(self):
+        self.edit_traits(view='operators_edit')
+
+    def _edit_pipeline_fired(self):
+        from enthought.tvtk.pipeline.browser import PipelineBrowser
+        pb = PipelineBrowser(self.scene)
+        pb.show()
+
+    def _add_contours_fired(self):
+        self.yt_scene.add_contour()
+
+    def _add_isocontour_fired(self):
+        self.yt_scene.add_isocontour()
+
+    def _add_x_plane_fired(self):
+        self.yt_scene.add_x_plane()
+
+    def _add_y_plane_fired(self):
+        self.yt_scene.add_y_plane()
+
+    def _add_z_plane_fired(self):
+        self.yt_scene.add_z_plane()
+
+    def _center_on_max_fired(self):
+        self.yt_scene.do_center_on_max()
+
+class ParameterFile(HasTraits):
+    pf = Instance(EnzoStaticOutput)
+    data_objects = List(Instance(DataObject))
+    name = Str
+
+    def _name_default(self):
+        return str(self.pf)
+
+    def do_slice(self):
+        cons_view = View(
+                Item('axis'), 
+                Item('center'), 
+                Item('field', editor=EnumEditor(name='field_list')),
+                buttons=OKCancelButtons, title="Slicer: %s" % self.pf)
+        ps = SlicePlotSpec(pf=self.pf)
+        hand = PlotCreationHandler(main_window=mw, pnode=self, model=ps,
+                                   plot_type=SlicePlotTab, format="Slice: %s")
+        ps.edit_traits(cons_view, handler=hand)
+
+    def do_proj(self):
+        cons_view = View(
+                Item('axis'), 
+                Item('field', editor=EnumEditor(name='field_list')),
+                Item('weight_field', editor=EnumEditor(name='none_field_list')),
+                buttons=OKCancelButtons, title="Projector: %s" % self.pf)
+        ps = ProjPlotSpec(pf=self.pf)
+        hand = PlotCreationHandler(main_window=mw, pnode=self, model=ps,
+                                   plot_type=ProjPlotTab, format="Proj: %s")
+        ps.edit_traits(cons_view, handler=hand)
+
+    def do_vtk(self):
+        from tvtk_interface import HierarchyImporter, \
+            HierarchyImportHandler
+        importer = HierarchyImporter(pf=self.pf, max_level=self.pf.h.max_level)
+        importer.edit_traits(handler = VTKSceneCreationHandler(
+            main_window=mw, pnode=self, importer = importer))
+
+class ParameterFileCollection(HasTraits):
+    parameter_files = List(Instance(ParameterFile))
+    name = Str
+    collection = Any
+
+    def _parameter_files_default(self):
+        my_list = []
+        for f in self.collection:
+            try:
+                pf = EnzoStaticOutput(f)
+                my_list.append(
+                    ParameterFile(pf=pf, 
+                            data_objects = []))
+            except IOError: pass
+        return my_list
+
+    def _name_default(self):
+        return str(self.collection)
+
+class ParameterFileCollectionList(HasTraits):
+    parameter_file_collections = List(Instance(ParameterFileCollection))
+
+    def _parameter_file_collections_default(self):
+        return [ParameterFileCollection(collection=c)
+                for c in fido.GrabCollections()]
+
+class DataObjectList(HasTraits):
+    data_objects = List(Str)
+
+    traits_view = View(
+              Item('data_objects', show_label=False,
+                   editor=ListStrEditor())
+               )
+
+    def _data_objects_default(self):
+        return ['a','b','c']
+
+class PlotFrameTab(DataObject):
+    figure = Instance(Figure)
+
+class VMPlotSpec(HasTraits):
+    pf = Instance(EnzoStaticOutput)
+    field = Str('Density')
+    field_list = Property(depends_on = 'pf')
+
+    center = Array(shape=(3,), dtype='float64')
+    axis = Enum(0,1,2)
+
+    @cached_property
+    def _get_field_list(self):
+        fl = self.pf.h.field_list
+        df = self.pf.h.derived_field_list
+        fl.sort(); df.sort()
+        return fl + df
+
+    def _center_default(self):
+        return self.pf.h.find_max("Density")[1]
+
+class SlicePlotSpec(VMPlotSpec):
+    pass
+
+class ProjPlotSpec(VMPlotSpec):
+    weight_field = Str("None")
+    none_field_list = Property(depends_on = 'field_list')
+
+    @cached_property
+    def _get_none_field_list(self):
+        return ["None"] + self.field_list
+
+class VMPlotTab(PlotFrameTab):
+    pf = Instance(EnzoStaticOutput)
+    figure = Instance(Figure, args=())
+    field = DelegatesTo('plot_spec')
+    field_list = DelegatesTo('plot_spec')
+    plot = Instance(VMPlot)
+    axes = Instance(Axes)
+    disp_width = Float(1.0)
+    unit = Str('unitary')
+    min_width = Property(Float, depends_on=['pf','unit'])
+    max_width = Property(Float, depends_on=['pf','unit'])
+    unit_list = Property(depends_on = 'pf')
+    smallest_dx = Property(depends_on = 'pf')
+
+    traits_view = View(VGroup(
+            HGroup(Item('figure', editor=MPLVMPlotEditor(),
+                     show_label=False)),
+            HGroup(Item('disp_width',
+                     editor=RangeEditor(format="%0.2e",
+                        low_name='min_width', high_name='max_width',
+                        mode='logslider', enter_set=True),
+                     show_label=False, width=400.0),
+                   Item('unit',
+                      editor=EnumEditor(name='unit_list')),),
+            HGroup(Item('field',
+                      editor=EnumEditor(name='field_list')),
+                )),
+             resizable=True)
+
+    def __init__(self, **traits):
+        super(VMPlotTab, self).__init__(**traits)
+        self.axes = self.figure.add_subplot(111, aspect='equal')
+
+    def _field_changed(self, old, new):
+        self.plot.switch_z(new)
+        self._redraw()
+
+    @cached_property
+    def _get_min_width(self):
+        return 50.0*self.smallest_dx*self.pf[self.unit]
+
+    @cached_property
+    def _get_max_width(self):
+        return self.pf['unitary']*self.pf[self.unit]
+
+    @cached_property
+    def _get_smallest_dx(self):
+        return self.pf.h.get_smallest_dx()
+
+    @cached_property
+    def _get_unit_list(self):
+        return self.pf.units.keys()
+
+    def _unit_changed(self, old, new):
+        self.disp_width = self.disp_width * self.pf[new]/self.pf[old]
+
+    def _disp_width_changed(self, old, new):
+        self.plot.set_width(new, self.unit)
+        self._redraw()
+
+    def _redraw(self):
+        self.figure.canvas.draw()
+
+    def recenter(self, event):
+        xp, yp = event.xdata, event.ydata
+        dx = abs(self.plot.xlim[0] - self.plot.xlim[1])/self.plot.pix[0]
+        dy = abs(self.plot.ylim[0] - self.plot.ylim[1])/self.plot.pix[1]
+        x = (dx * xp) + self.plot.xlim[0]
+        y = (dy * yp) + self.plot.ylim[0]
+        xi = x_dict[self.axis]
+        yi = y_dict[self.axis]
+        cc = self.center[:]
+        cc[xi] = x; cc[yi] = y
+        self.plot.data.center = cc[:]
+        self.plot.data.set_field_parameter('center', cc.copy())
+        self.center = cc
+
+class SlicePlotTab(VMPlotTab):
+    plot_spec = Instance(SlicePlotSpec)
+
+    axis = DelegatesTo('plot_spec')
+    center = DelegatesTo('plot_spec')
+    
+    plot = Instance(SlicePlot)
+
+    def _plot_default(self):
+        coord = self.center[self.axis]
+        sl = self.pf.h.slice(self.axis, coord, center=self.center[:])
+        sp = SlicePlot(sl, self.field, self.figure, self.axes)
+        self.figure.canvas.draw()
+        return sp
+
+    def _center_changed(self, old, new):
+        #traceback.print_stack()
+        if na.all(na.abs(old - new) == 0.0): return
+        print na.abs(old-new)
+        print "Re-slicing", old, new
+        pp = self.center
+        self.plot.data.reslice(pp[self.axis])
+        self.plot._refresh_display_width()
+        self.figure.canvas.draw()
+
+class ProjPlotTab(VMPlotTab):
+    plot_spec = Instance(ProjPlotSpec)
+
+    axis = DelegatesTo('plot_spec')
+    center = DelegatesTo('plot_spec')
+    weight_field = DelegatesTo('plot_spec')
+
+    plot = Instance(ProjectionPlot)
+
+    def _plot_default(self):
+        self.field = self.field[:]
+        self.weight_field = self.weight_field[:]
+        wf = self.weight_field
+        if str(wf) == "None": wf = None
+        proj = self.pf.h.proj(self.axis, self.field, wf,
+                        center=self.center[:])
+        pp = ProjectionPlot(proj, self.field, self.figure, self.axes)
+        self.figure.canvas.draw()
+        return pp
+
+    def _center_changed(self, old, new):
+        self.plot._refresh_display_width()
+
+class SphereWrapper(DataObject):
+    radius = Float
+    unit = Str
+
+class MainWindow(HasTraits):
+    parameter_file_collections = Instance(ParameterFileCollectionList)
+    parameter_files = Instance(ParameterFileCollection)
+    plot_frame_tabs = List(Instance(DataObject))
+    open_parameterfile = Button
+    shell = PythonValue
+
+    def _shell_default(self):
+        return globals()
+    notebook_editor = ListEditor(editor=InstanceEditor(editable=True),
+                                 use_notebook=True)
+
+    traits_view = View(VSplit(
+                    HSplit(VGroup(
+                       Item('parameter_file_collections', 
+                            width=120.0, height=500.0,
+                            show_label=False,
+                            editor = TreeEditor(editable=False,
+                    nodes=[
+                        TreeNode(node_for=[ParameterFileCollectionList],
+                                 children='parameter_file_collections',
+                                 label="=Data Collections"),
+                        TreeNode(node_for=[ParameterFileCollection],
+                                 children='parameter_files',
+                                 label="name",
+                                 view=View()),
+                        TreeNode(node_for=[ParameterFile],
+                                 children='data_objects',
+                                 label="name",
+                                 menu = Menu(Action(name='Slice',
+                                                    action='object.do_slice'),
+                                             Action(name='Project',
+                                                    action='object.do_proj'),
+                                             Action(name='VTK',
+                                                    action='object.do_vtk')),
+                                 view=View()),
+                        TreeNode(node_for=[DataObject],
+                                 children='',
+                                 label="name"),
+                                ], show_icons=False),),
+                        Item('open_parameterfile', show_label=False)),
+                       Item('plot_frame_tabs', style='custom',
+                            editor = notebook_editor,
+                            show_label=False, height=500.0, width=500.0),
+                    ),
+                    HGroup(
+                       #Item('shell', editor=ShellEditor(share=True),
+                            #show_label=False, height=120.0),
+                    ),
+                ),
+               resizable=True, width=800.0, height=660.0,
+               title="reason v2 [prototype]")
+
+    def _open_parameterfile_fired(self):
+        print "OPENING"
+
+    def _parameter_file_collections_default(self):
+        return ParameterFileCollectionList()
+
+class YTScript(HasTraits):
+    code = Code
+    traits_view = View(Item('code', show_label=False),
+                       height=0.8, width=0.8, resizable=True,
+                       buttons=OKCancelButtons)
+
+class ObjectViewer(HasTraits):
+    to_view=Any
+    traits_view = View(
+            Item('to_view', editor=ValueEditor(), show_label=False),
+                     resizable=True, height=0.8, width=0.8)
+
+def view_object(obj):
+    ObjectViewer(to_view=obj).edit_traits()
+
+def run_script():
+    my_script = YTScript()
+    my_script.edit_traits()
+    return my_script
+
+class event_mock(object):
+    inaxes = True
+    button = 3
+
+dol = DataObjectList()
+mw = MainWindow(plot_frame_tabs = [])
+mw.edit_traits()
+#mw.edit_traits()


--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/yt/gui/traited_explorer/tvtk_interface.py	Mon Mar 28 13:44:34 2011 -0600
@@ -0,0 +1,692 @@
+"""
+This is the preliminary interface to VTK.  Note that as of VTK 5.2, it still
+requires a patchset prepared here:
+http://yt.enzotools.org/files/vtk_composite_data.zip
+
+Author: Matthew Turk <matthewturk at gmail.com>
+Affiliation: KIPAC/SLAC/Stanford
+Homepage: http://yt.enzotools.org/
+License:
+  Copyright (C) 2007-2009 Matthew Turk.  All Rights Reserved.
+
+  This file is part of yt.
+
+  yt is free software; you can redistribute it and/or modify
+  it under the terms of the GNU General Public License as published by
+  the Free Software Foundation; either version 3 of the License, or
+  (at your option) any later version.
+
+  This program is distributed in the hope that it will be useful,
+  but WITHOUT ANY WARRANTY; without even the implied warranty of
+  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+  GNU General Public License for more details.
+
+  You should have received a copy of the GNU General Public License
+  along with this program.  If not, see <http://www.gnu.org/licenses/>.
+"""
+
+from enthought.tvtk.tools import ivtk
+from enthought.tvtk.api import tvtk 
+from enthought.traits.api import \
+    Float, HasTraits, Instance, Range, Any, Delegate, Tuple, File, Int, Str, \
+    CArray, List, Button, Bool, Property, cached_property
+from enthought.traits.ui.api import View, Item, HGroup, VGroup, TableEditor, \
+    Handler, Controller, RangeEditor, EnumEditor, InstanceEditor
+from enthought.traits.ui.menu import \
+    Menu, Action, Separator, OKCancelButtons, OKButton
+from enthought.traits.ui.table_column import ObjectColumn
+from enthought.tvtk.pyface.api import DecoratedScene
+
+import enthought.pyface.api as pyface
+
+#from yt.reason import *
+import sys
+import numpy as na
+import time, pickle, os, os.path
+from yt.funcs import *
+from yt.analysis_modules.hierarchy_subset.api import \
+        ExtractedHierarchy, ExtractedParameterFile
+
+#from enthought.tvtk.pyface.ui.wx.wxVTKRenderWindowInteractor \
+     #import wxVTKRenderWindowInteractor
+
+from enthought.mayavi.core.lut_manager import LUTManager
+
+#wxVTKRenderWindowInteractor.USE_STEREO = 1
+
+class TVTKMapperWidget(HasTraits):
+    alpha = Float(1.0)
+    post_call = Any
+    lut_manager = Instance(LUTManager)
+
+    def _alpha_changed(self, old, new):
+        self.lut_manager.lut.alpha_range = (new, new)
+        self.post_call()
+
+class MappingPlane(TVTKMapperWidget):
+    plane = Instance(tvtk.Plane)
+    _coord_redit = editor=RangeEditor(format="%0.2e",
+                              low_name='vmin', high_name='vmax',
+                              auto_set=False, enter_set=True)
+    auto_set = Bool(False)
+    traits_view = View(Item('coord', editor=_coord_redit),
+                       Item('auto_set'),
+                       Item('alpha', editor=RangeEditor(
+                              low=0.0, high=1.0,
+                              enter_set=True, auto_set=False)),
+                       Item('lut_manager', show_label=False,
+                            editor=InstanceEditor(), style='custom'))
+    vmin = Float
+    vmax = Float
+
+    def _auto_set_changed(self, old, new):
+        if new is True:
+            self._coord_redit.auto_set = True
+            self._coord_redit.enter_set = False
+        else:
+            self._coord_redit.auto_set = False
+            self._coord_redit.enter_set = True
+
+    def __init__(self, vmin, vmax, vdefault, **traits):
+        HasTraits.__init__(self, **traits)
+        self.vmin = vmin
+        self.vmax = vmax
+        trait = Range(float(vmin), float(vmax), value=vdefault)
+        self.add_trait("coord", trait)
+        self.coord = vdefault
+
+    def _coord_changed(self, old, new):
+        orig = self.plane.origin[:]
+        orig[self.axis] = new
+        self.plane.origin = orig
+        self.post_call()
+
+class MappingMarchingCubes(TVTKMapperWidget):
+    operator = Instance(tvtk.MarchingCubes)
+    mapper = Instance(tvtk.HierarchicalPolyDataMapper)
+    vmin = Float
+    vmax = Float
+    auto_set = Bool(False)
+    _val_redit = RangeEditor(format="%0.2f",
+                             low_name='vmin', high_name='vmax',
+                             auto_set=False, enter_set=True)
+    traits_view = View(Item('value', editor=_val_redit),
+                       Item('auto_set'),
+                       Item('alpha', editor=RangeEditor(
+                            low=0.0, high=1.0,
+                            enter_set=True, auto_set=False,)),
+                       Item('lut_manager', show_label=False,
+                            editor=InstanceEditor(), style='custom'))
+
+    def __init__(self, vmin, vmax, vdefault, **traits):
+        HasTraits.__init__(self, **traits)
+        self.vmin = vmin
+        self.vmax = vmax
+        trait = Range(float(vmin), float(vmax), value=vdefault)
+        self.add_trait("value", trait)
+        self.value = vdefault
+
+    def _auto_set_changed(self, old, new):
+        if new is True:
+            self._val_redit.auto_set = True
+            self._val_redit.enter_set = False
+        else:
+            self._val_redit.auto_set = False
+            self._val_redit.enter_set = True
+
+    def _value_changed(self, old, new):
+        self.operator.set_value(0, new)
+        self.post_call()
+
+class MappingIsoContour(MappingMarchingCubes):
+    operator = Instance(tvtk.ContourFilter)
+
+class CameraPosition(HasTraits):
+    position = CArray(shape=(3,), dtype='float64')
+    focal_point = CArray(shape=(3,), dtype='float64')
+    view_up = CArray(shape=(3,), dtype='float64')
+    clipping_range = CArray(shape=(2,), dtype='float64')
+    distance = Float
+    num_steps = Int(10)
+    orientation_wxyz = CArray(shape=(4,), dtype='float64')
+
+class CameraControl(HasTraits):
+    # Traits
+    positions = List(CameraPosition)
+    yt_scene = Instance('YTScene')
+    center = Delegate('yt_scene')
+    scene = Delegate('yt_scene')
+    camera = Instance(tvtk.OpenGLCamera)
+    reset_position = Instance(CameraPosition)
+    fps = Float(25.0)
+    export_filename = 'frames'
+    periodic = Bool
+
+    # UI elements
+    snapshot = Button()
+    play = Button()
+    export_frames = Button()
+    reset_path = Button()
+    recenter = Button()
+    save_path = Button()
+    load_path = Button()
+    export_path = Button()
+
+    table_def = TableEditor(
+        columns = [ ObjectColumn(name='position'),
+                    ObjectColumn(name='focal_point'),
+                    ObjectColumn(name='view_up'),
+                    ObjectColumn(name='clipping_range'),
+                    ObjectColumn(name='num_steps') ],
+        reorderable=True, deletable=True,
+        sortable=True, sort_model=True,
+        show_toolbar=True,
+        selection_mode='row',
+        selected = 'reset_position'
+                )
+
+    default_view = View(
+                VGroup(
+                  HGroup(
+                    Item('camera', show_label=False),
+                    Item('recenter', show_label=False),
+                    label='Camera'),
+                  HGroup(
+                    Item('snapshot', show_label=False),
+                    Item('play', show_label=False),
+                    Item('export_frames',show_label=False),
+                    Item('reset_path', show_label=False),
+                    Item('save_path', show_label=False),
+                    Item('load_path', show_label=False),
+                    Item('export_path', show_label=False),
+                    Item('export_filename'),
+                    Item('periodic'),
+                    Item('fps'),
+                    label='Playback'),
+                  VGroup(
+                    Item('positions', show_label=False,
+                        editor=table_def),
+                    label='Camera Path'),
+                 ),
+                resizable=True, title="Camera Path Editor",
+                       )
+
+    def _reset_position_changed(self, old, new):
+        if new is None: return
+        cam = self.scene.camera
+        cam.position = new.position
+        cam.focal_point = new.focal_point
+        cam.view_up = new.view_up
+        cam.clipping_range = new.clipping_range
+        self.scene.render()
+
+    def __init__(self, **traits):
+        HasTraits.__init__(self, **traits)
+
+    def take_snapshot(self):
+        cam = self.scene.camera
+        self.positions.append(CameraPosition(
+                position=cam.position,
+                focal_point=cam.focal_point,
+                view_up=cam.view_up,
+                clipping_range=cam.clipping_range,
+                distance=cam.distance,
+                orientation_wxyz=cam.orientation_wxyz))
+
+    def _export_path_fired(self): 
+        dlg = pyface.FileDialog(
+            action='save as',
+            wildcard="*.cpath",
+        )
+        if dlg.open() == pyface.OK:
+            print "Saving:", dlg.path
+            self.export_camera_path(dlg.path)
+
+    def export_camera_path(self, fn):
+        to_dump = dict(positions=[], focal_points=[],
+                       view_ups=[], clipping_ranges=[],
+                       distances=[], orientation_wxyzs=[])
+        def _write(cam):
+            to_dump['positions'].append(cam.position)
+            to_dump['focal_points'].append(cam.focal_point)
+            to_dump['view_ups'].append(cam.view_up)
+            to_dump['clipping_ranges'].append(cam.clipping_range)
+            to_dump['distances'].append(cam.distance)
+            to_dump['orientation_wxyzs'].append(cam.orientation_wxyz)
+        self.step_through(0.0, callback=_write)
+        pickle.dump(to_dump, open(fn, "wb"))
+
+    def _save_path_fired(self): 
+        dlg = pyface.FileDialog(
+            action='save as',
+            wildcard="*.cpath",
+        )
+        if dlg.open() == pyface.OK:
+            print "Saving:", dlg.path
+            self.dump_camera_path(dlg.path)
+
+    def dump_camera_path(self, fn):
+        to_dump = dict(positions=[], focal_points=[],
+                       view_ups=[], clipping_ranges=[],
+                       distances=[], orientation_wxyzs=[],
+                       num_stepss=[])
+        for p in self.positions:
+            to_dump['positions'].append(p.position)
+            to_dump['focal_points'].append(p.focal_point)
+            to_dump['view_ups'].append(p.view_up)
+            to_dump['clipping_ranges'].append(p.clipping_range)
+            to_dump['distances'].append(p.distance)
+            to_dump['num_stepss'].append(p.num_steps) # stupid s
+            to_dump['orientation_wxyzs'].append(p.orientation_wxyz)
+        pickle.dump(to_dump, open(fn, "wb"))
+
+    def _load_path_fired(self):
+        dlg = pyface.FileDialog(
+            action='open',
+            wildcard="*.cpath",
+        )
+        if dlg.open() == pyface.OK:
+            print "Loading:", dlg.path
+            self.load_camera_path(dlg.path)
+
+    def load_camera_path(self, fn):
+        to_use = pickle.load(open(fn, "rb"))
+        self.positions = []
+        for i in range(len(to_use['positions'])):
+            dd = {}
+            for kw in to_use:
+                # Strip the s
+                dd[kw[:-1]] = to_use[kw][i]
+            self.positions.append(
+                CameraPosition(**dd))
+
+    def _recenter_fired(self):
+        self.camera.focal_point = self.center
+        self.scene.render()
+
+    def _snapshot_fired(self):
+        self.take_snapshot()
+
+    def _play_fired(self):
+        self.step_through()
+
+    def _export_frames_fired(self):
+        self.step_through(save_frames=True)
+
+    def _reset_path_fired(self):
+        self.positions = []
+
+    def step_through(self, pause = 1.0, callback=None, save_frames=False):
+        cam = self.scene.camera
+        frame_counter=0
+        if self.periodic:
+            cyclic_pos = self.positions + [self.positions[0]]
+        else:
+            cyclic_pos = self.positions
+        for i in range(len(cyclic_pos)-1):
+            pos1 = cyclic_pos[i]
+            pos2 = cyclic_pos[i+1]
+            r = pos1.num_steps
+            for p in range(pos1.num_steps):
+                po = _interpolate(pos1.position, pos2.position, p, r)
+                fp = _interpolate(pos1.focal_point, pos2.focal_point, p, r)
+                vu = _interpolate(pos1.view_up, pos2.view_up, p, r)
+                cr = _interpolate(pos1.clipping_range, pos2.clipping_range, p, r)
+                _set_cpos(cam, po, fp, vu, cr)
+                self.scene.render()
+                if callback is not None: callback(cam)
+                if save_frames:
+                    self.scene.save("%s_%0.5d.png" % (self.export_filename,frame_counter))
+                else:
+                    time.sleep(pause * 1.0/self.fps)
+                frame_counter += 1
+
+def _interpolate(q1, q2, p, r):
+    return q1 + p*(q2 - q1)/float(r)
+
+def _set_cpos(cam, po, fp, vu, cr):
+    cam.position = po
+    cam.focal_point = fp
+    cam.view_up = vu
+    cam.clipping_range = cr
+
+class HierarchyImporter(HasTraits):
+    pf = Any
+    min_grid_level = Int(0)
+    max_level = Int(1)
+    number_of_levels = Range(0, 13)
+    max_import_levels = Property(depends_on='min_grid_level')
+    field = Str("Density")
+    field_list = List
+    center_on_max = Bool(True)
+    center = CArray(shape = (3,), dtype = 'float64')
+    cache = Bool(True)
+    smoothed = Bool(True)
+    show_grids = Bool(True)
+
+    def _field_list_default(self):
+        fl = self.pf.h.field_list
+        df = self.pf.h.derived_field_list
+        fl.sort(); df.sort()
+        return fl + df
+    
+    default_view = View(Item('min_grid_level',
+                              editor=RangeEditor(low=0,
+                                                 high_name='max_level')),
+                        Item('number_of_levels', 
+                              editor=RangeEditor(low=1,
+                                                 high_name='max_import_levels')),
+                        Item('field', editor=EnumEditor(name='field_list')),
+                        Item('center_on_max'),
+                        Item('center', enabled_when='not object.center_on_max'),
+                        Item('smoothed'),
+                        Item('cache', label='Pre-load data'),
+                        Item('show_grids'),
+                        buttons=OKCancelButtons)
+
+    def _center_default(self):
+        return [0.5,0.5,0.5]
+
+    @cached_property
+    def _get_max_import_levels(self):
+        return min(13, self.pf.h.max_level - self.min_grid_level + 1)
+
+class HierarchyImportHandler(Controller):
+    importer = Instance(HierarchyImporter)
+    
+
+    def close(self, info, is_ok):
+        if is_ok: 
+            yt_scene = YTScene(
+                importer=self.importer)
+        super(Controller, self).close(info, True)
+        return
+
+
+class YTScene(HasTraits):
+
+    # Traits
+    importer = Instance(HierarchyImporter)
+    pf = Delegate("importer")
+    min_grid_level = Delegate("importer")
+    number_of_levels = Delegate("importer")
+    field = Delegate("importer")
+    center = CArray(shape = (3,), dtype = 'float64')
+    center_on_max = Delegate("importer")
+    smoothed = Delegate("importer")
+    cache = Delegate("importer")
+    show_grids = Delegate("importer")
+
+    camera_path = Instance(CameraControl)
+    #window = Instance(ivtk.IVTKWithCrustAndBrowser)
+    #python_shell = Delegate('window')
+    #scene = Delegate('window')
+    scene = Instance(HasTraits)
+    operators = List(HasTraits)
+
+    # State variables
+    _grid_boundaries_actor = None
+
+    # Views
+    def _window_default(self):
+        # Should experiment with passing in a pipeline browser
+        # that has two root objects -- one for TVTKBases, i.e. the render
+        # window, and one that accepts our objects
+        return ivtk.IVTKWithCrustAndBrowser(size=(800,600), stereo=1)
+
+    def _camera_path_default(self):
+        return CameraControl(yt_scene=self, camera=self.scene.camera)
+
+    def __init__(self, **traits):
+        HasTraits.__init__(self, **traits)
+        max_level = min(self.pf.h.max_level,
+                        self.min_grid_level + self.number_of_levels - 1)
+        self.extracted_pf = ExtractedParameterFile(self.pf,
+                             self.min_grid_level, max_level, offset=None)
+        self.extracted_hierarchy = self.extracted_pf.h
+        self._hdata_set = tvtk.HierarchicalBoxDataSet()
+        self._ugs = []
+        self._grids = []
+        self._min_val = 1e60
+        self._max_val = -1e60
+        gid = 0
+        if self.cache:
+            for grid_set in self.extracted_hierarchy.get_levels():
+                for grid in grid_set:
+                    grid[self.field]
+        for l, grid_set in enumerate(self.extracted_hierarchy.get_levels()):
+            gid = self._add_level(grid_set, l, gid)
+        if self.show_grids:
+            self.toggle_grid_boundaries()
+            
+    def _center_default(self):
+        return self.extracted_hierarchy._convert_coords(
+                [0.5, 0.5, 0.5])
+
+    def do_center_on_max(self):
+        self.center = self.extracted_hierarchy._convert_coords(
+            self.pf.h.find_max("Density")[1])
+        self.scene.camera.focal_point = self.center
+
+    def _add_level(self, grid_set, level, gid):
+        for grid in grid_set:
+            self._hdata_set.set_refinement_ratio(level, 2)
+            gid = self._add_grid(grid, gid, level)
+        return gid
+
+    def _add_grid(self, grid, gid, level=0):
+        mylog.debug("Adding grid %s on level %s (%s)",
+                    grid.id, level, grid.Level)
+        if grid in self._grids: return
+        self._grids.append(grid)
+
+        scalars = grid.get_vertex_centered_data(self.field, smoothed=self.smoothed)
+
+        left_index = grid.get_global_startindex()
+        origin = grid.LeftEdge
+        dds = grid.dds
+        right_index = left_index + scalars.shape - 1
+        ug = tvtk.UniformGrid(origin=origin, spacing=dds,
+                              dimensions=grid.ActiveDimensions+1)
+        if self.field not in self.pf.field_info or \
+            self.pf.field_info[self.field].take_log:
+            scalars = na.log10(scalars)
+        ug.point_data.scalars = scalars.transpose().ravel()
+        ug.point_data.scalars.name = self.field
+        if grid.Level != self.min_grid_level + self.number_of_levels - 1:
+            ug.cell_visibility_array = grid.child_mask.transpose().ravel()
+        else:
+            ug.cell_visibility_array = na.ones(
+                    grid.ActiveDimensions, dtype='int').ravel()
+        self._ugs.append((grid,ug))
+        self._hdata_set.set_data_set(level, gid, left_index, right_index, ug)
+
+        self._min_val = min(self._min_val, scalars.min())
+        self._max_val = max(self._max_val, scalars.max())
+
+        gid += 1
+        return gid
+
+    def _add_data_to_ug(self, field):
+        for g, ug in self._ugs:
+            scalars_temp = grid.get_vertex_centered_data(field, smoothed=self.smoothed)
+            ii = ug.point_data.add_array(scalars_temp.transpose().ravel())
+            ug.point_data.get_array(ii).name = field
+
+    def zoom(self, dist, unit='1'):
+        vec = self.scene.camera.focal_point - \
+              self.scene.camera.position
+        self.scene.camera.position += \
+            vec * dist/self._grids[0].pf[unit]
+        self.scene.render()
+
+    def toggle_grid_boundaries(self):
+        if self._grid_boundaries_actor is None:
+            # We don't need to track this stuff right now.
+            ocf = tvtk.OutlineCornerFilter(
+                    executive=tvtk.CompositeDataPipeline(),
+                    corner_factor = 0.5)
+            ocf.input = self._hdata_set
+            ocm = tvtk.HierarchicalPolyDataMapper(
+                input_connection = ocf.output_port)
+            self._grid_boundaries_actor = tvtk.Actor(mapper = ocm)
+            self.scene.add_actor(self._grid_boundaries_actor)
+        else:
+            self._grid_boundaries_actor.visibility = \
+            (not self._grid_boundaries_actor.visibility)
+
+    def _add_sphere(self, origin=(0.0,0.0,0.0), normal=(0,1,0)):
+        sphere = tvtk.Sphere(center=origin, radius=0.25)
+        cutter = tvtk.Cutter(executive = tvtk.CompositeDataPipeline(),
+                             cut_function = sphere)
+        cutter.input = self._hdata_set
+        lut_manager = LUTManager(data_name=self.field, scene=self.scene)
+        smap = tvtk.HierarchicalPolyDataMapper(
+                        scalar_range=(self._min_val, self._max_val),
+                        lookup_table=lut_manager.lut,
+                        input_connection = cutter.output_port)
+        sactor = tvtk.Actor(mapper=smap)
+        self.scene.add_actors(sactor)
+        return sphere, lut_manager
+
+    def _add_plane(self, origin=(0.0,0.0,0.0), normal=(0,1,0)):
+        plane = tvtk.Plane(origin=origin, normal=normal)
+        cutter = tvtk.Cutter(executive = tvtk.CompositeDataPipeline(),
+                             cut_function = plane)
+        cutter.input = self._hdata_set
+        lut_manager = LUTManager(data_name=self.field, scene=self.scene)
+        smap = tvtk.HierarchicalPolyDataMapper(
+                        scalar_range=(self._min_val, self._max_val),
+                        lookup_table=lut_manager.lut,
+                        input_connection = cutter.output_port)
+        sactor = tvtk.Actor(mapper=smap)
+        self.scene.add_actors(sactor)
+        return plane, lut_manager
+
+    def add_plane(self, origin=(0.0,0.0,0.0), normal=(0,1,0)):
+        self.operators.append(self._add_plane(origin, normal))
+        return self.operators[-1]
+
+    def _add_axis_plane(self, axis):
+        normal = [0,0,0]
+        normal[axis] = 1
+        np, lut_manager = self._add_plane(self.center, normal=normal)
+        LE = self.extracted_hierarchy.min_left_edge
+        RE = self.extracted_hierarchy.max_right_edge
+        self.operators.append(MappingPlane(
+                vmin=LE[axis], vmax=RE[axis],
+                vdefault = self.center[axis],
+                post_call = self.scene.render,
+                plane = np, axis=axis, coord=0.0,
+                lut_manager = lut_manager,
+                scene=self.scene))
+
+    def add_x_plane(self):
+        self._add_axis_plane(0)
+        return self.operators[-1]
+
+    def add_y_plane(self):
+        self._add_axis_plane(1)
+        return self.operators[-1]
+
+    def add_z_plane(self):
+        self._add_axis_plane(2)
+        return self.operators[-1]
+
+    def add_contour(self, val=None):
+        if val is None: 
+            if self._min_val != self._min_val:
+                self._min_val = 1.0
+            val = (self._max_val+self._min_val) * 0.5
+        cubes = tvtk.MarchingCubes(
+                    executive = tvtk.CompositeDataPipeline())
+        cubes.input = self._hdata_set
+        cubes.set_value(0, val)
+        lut_manager = LUTManager(data_name=self.field, scene=self.scene)
+        cube_mapper = tvtk.HierarchicalPolyDataMapper(
+                                input_connection = cubes.output_port,
+                                lookup_table=lut_manager.lut)
+        cube_mapper.color_mode = 'map_scalars'
+        cube_mapper.scalar_range = (self._min_val, self._max_val)
+        cube_actor = tvtk.Actor(mapper=cube_mapper)
+        self.scene.add_actors(cube_actor)
+        self.operators.append(MappingMarchingCubes(operator=cubes,
+                    vmin=self._min_val, vmax=self._max_val,
+                    vdefault=val,
+                    mapper = cube_mapper,
+                    post_call = self.scene.render,
+                    lut_manager = lut_manager,
+                    scene=self.scene))
+        return self.operators[-1]
+
+    def add_isocontour(self, val=None):
+        if val is None: val = (self._max_val+self._min_val) * 0.5
+        isocontour = tvtk.ContourFilter(
+                    executive = tvtk.CompositeDataPipeline())
+        isocontour.input = self._hdata_set
+        isocontour.generate_values(1, (val, val))
+        lut_manager = LUTManager(data_name=self.field, scene=self.scene)
+        isocontour_normals = tvtk.PolyDataNormals(
+            executive=tvtk.CompositeDataPipeline())
+        isocontour_normals.input_connection = isocontour.output_port
+        iso_mapper = tvtk.HierarchicalPolyDataMapper(
+                                input_connection = isocontour_normals.output_port,
+                                lookup_table=lut_manager.lut)
+        iso_mapper.scalar_range = (self._min_val, self._max_val)
+        iso_actor = tvtk.Actor(mapper=iso_mapper)
+        self.scene.add_actors(iso_actor)
+        self.operators.append(MappingIsoContour(operator=isocontour,
+                    vmin=self._min_val, vmax=self._max_val,
+                    vdefault=val,
+                    mapper = iso_mapper,
+                    post_call = self.scene.render,
+                    lut_manager = lut_manager,
+                    scene=self.scene))
+        return self.operators[-1]
+
+    def display_points(self):
+        dd = self.pf.h.all_data()
+        points = tvtk.Points()
+        good = (dd["creation_time"] > 0.0)
+        points.data = na.array([ dd["particle_position_%s" % ax][good] for ax in 'xyz' ]).transpose()
+        mass = na.log10(dd["ParticleAge"][good])
+        self.conn = tvtk.CellArray()
+        for i in xrange(mass.shape[0]):
+            self.conn.insert_next_cell(1)
+            self.conn.insert_cell_point(i)
+        self.points = points
+        self.pd = tvtk.PolyData(points = self.points, verts = self.conn)
+        self.pd.point_data.scalars = mass
+        lut = tvtk.LookupTable()
+        self.pdm = tvtk.PolyDataMapper(input = self.pd,
+                                       lookup_table = lut)
+        self.pdm.scalar_range = (mass.min(), mass.max())
+        self.pdm.scalar_mode = 'use_point_data'
+        self.point_actor = tvtk.Actor(mapper = self.pdm)
+        self.scene.add_actor(self.point_actor)
+
+def get_all_parents(grid):
+    parents = []
+    if len(grid.Parents) == 0: return grid
+    for parent in grid.Parents: parents.append(get_all_parents(parent))
+    return list(set(parents))
+
+def run_vtk():
+    gui = pyface.GUI()
+    importer = HierarchyImporter()
+    importer.edit_traits(handler = HierarchyImportHandler(
+            importer = importer))
+    #ehds.edit_traits()
+    gui.start_event_loop()
+
+
+if __name__=="__main__":
+    print "This code probably won't work.  But if you want to give it a try,"
+    print "you need:"
+    print
+    print "VTK (CVS)"
+    print "Mayavi2 (from Enthought)"
+    print
+    print "If you have 'em, give it a try!"
+    print
+    run_vtk()


--- a/yt/gui/tvtk_interface.py	Mon Mar 28 13:44:14 2011 -0600
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,692 +0,0 @@
-"""
-This is the preliminary interface to VTK.  Note that as of VTK 5.2, it still
-requires a patchset prepared here:
-http://yt.enzotools.org/files/vtk_composite_data.zip
-
-Author: Matthew Turk <matthewturk at gmail.com>
-Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
-License:
-  Copyright (C) 2007-2009 Matthew Turk.  All Rights Reserved.
-
-  This file is part of yt.
-
-  yt is free software; you can redistribute it and/or modify
-  it under the terms of the GNU General Public License as published by
-  the Free Software Foundation; either version 3 of the License, or
-  (at your option) any later version.
-
-  This program is distributed in the hope that it will be useful,
-  but WITHOUT ANY WARRANTY; without even the implied warranty of
-  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-  GNU General Public License for more details.
-
-  You should have received a copy of the GNU General Public License
-  along with this program.  If not, see <http://www.gnu.org/licenses/>.
-"""
-
-from enthought.tvtk.tools import ivtk
-from enthought.tvtk.api import tvtk 
-from enthought.traits.api import \
-    Float, HasTraits, Instance, Range, Any, Delegate, Tuple, File, Int, Str, \
-    CArray, List, Button, Bool, Property, cached_property
-from enthought.traits.ui.api import View, Item, HGroup, VGroup, TableEditor, \
-    Handler, Controller, RangeEditor, EnumEditor, InstanceEditor
-from enthought.traits.ui.menu import \
-    Menu, Action, Separator, OKCancelButtons, OKButton
-from enthought.traits.ui.table_column import ObjectColumn
-from enthought.tvtk.pyface.api import DecoratedScene
-
-import enthought.pyface.api as pyface
-
-#from yt.reason import *
-import sys
-import numpy as na
-import time, pickle, os, os.path
-from yt.funcs import *
-from yt.analysis_modules.hierarchy_subset.api import \
-        ExtractedHierarchy, ExtractedParameterFile
-
-#from enthought.tvtk.pyface.ui.wx.wxVTKRenderWindowInteractor \
-     #import wxVTKRenderWindowInteractor
-
-from enthought.mayavi.core.lut_manager import LUTManager
-
-#wxVTKRenderWindowInteractor.USE_STEREO = 1
-
-class TVTKMapperWidget(HasTraits):
-    alpha = Float(1.0)
-    post_call = Any
-    lut_manager = Instance(LUTManager)
-
-    def _alpha_changed(self, old, new):
-        self.lut_manager.lut.alpha_range = (new, new)
-        self.post_call()
-
-class MappingPlane(TVTKMapperWidget):
-    plane = Instance(tvtk.Plane)
-    _coord_redit = editor=RangeEditor(format="%0.2e",
-                              low_name='vmin', high_name='vmax',
-                              auto_set=False, enter_set=True)
-    auto_set = Bool(False)
-    traits_view = View(Item('coord', editor=_coord_redit),
-                       Item('auto_set'),
-                       Item('alpha', editor=RangeEditor(
-                              low=0.0, high=1.0,
-                              enter_set=True, auto_set=False)),
-                       Item('lut_manager', show_label=False,
-                            editor=InstanceEditor(), style='custom'))
-    vmin = Float
-    vmax = Float
-
-    def _auto_set_changed(self, old, new):
-        if new is True:
-            self._coord_redit.auto_set = True
-            self._coord_redit.enter_set = False
-        else:
-            self._coord_redit.auto_set = False
-            self._coord_redit.enter_set = True
-
-    def __init__(self, vmin, vmax, vdefault, **traits):
-        HasTraits.__init__(self, **traits)
-        self.vmin = vmin
-        self.vmax = vmax
-        trait = Range(float(vmin), float(vmax), value=vdefault)
-        self.add_trait("coord", trait)
-        self.coord = vdefault
-
-    def _coord_changed(self, old, new):
-        orig = self.plane.origin[:]
-        orig[self.axis] = new
-        self.plane.origin = orig
-        self.post_call()
-
-class MappingMarchingCubes(TVTKMapperWidget):
-    operator = Instance(tvtk.MarchingCubes)
-    mapper = Instance(tvtk.HierarchicalPolyDataMapper)
-    vmin = Float
-    vmax = Float
-    auto_set = Bool(False)
-    _val_redit = RangeEditor(format="%0.2f",
-                             low_name='vmin', high_name='vmax',
-                             auto_set=False, enter_set=True)
-    traits_view = View(Item('value', editor=_val_redit),
-                       Item('auto_set'),
-                       Item('alpha', editor=RangeEditor(
-                            low=0.0, high=1.0,
-                            enter_set=True, auto_set=False,)),
-                       Item('lut_manager', show_label=False,
-                            editor=InstanceEditor(), style='custom'))
-
-    def __init__(self, vmin, vmax, vdefault, **traits):
-        HasTraits.__init__(self, **traits)
-        self.vmin = vmin
-        self.vmax = vmax
-        trait = Range(float(vmin), float(vmax), value=vdefault)
-        self.add_trait("value", trait)
-        self.value = vdefault
-
-    def _auto_set_changed(self, old, new):
-        if new is True:
-            self._val_redit.auto_set = True
-            self._val_redit.enter_set = False
-        else:
-            self._val_redit.auto_set = False
-            self._val_redit.enter_set = True
-
-    def _value_changed(self, old, new):
-        self.operator.set_value(0, new)
-        self.post_call()
-
-class MappingIsoContour(MappingMarchingCubes):
-    operator = Instance(tvtk.ContourFilter)
-
-class CameraPosition(HasTraits):
-    position = CArray(shape=(3,), dtype='float64')
-    focal_point = CArray(shape=(3,), dtype='float64')
-    view_up = CArray(shape=(3,), dtype='float64')
-    clipping_range = CArray(shape=(2,), dtype='float64')
-    distance = Float
-    num_steps = Int(10)
-    orientation_wxyz = CArray(shape=(4,), dtype='float64')
-
-class CameraControl(HasTraits):
-    # Traits
-    positions = List(CameraPosition)
-    yt_scene = Instance('YTScene')
-    center = Delegate('yt_scene')
-    scene = Delegate('yt_scene')
-    camera = Instance(tvtk.OpenGLCamera)
-    reset_position = Instance(CameraPosition)
-    fps = Float(25.0)
-    export_filename = 'frames'
-    periodic = Bool
-
-    # UI elements
-    snapshot = Button()
-    play = Button()
-    export_frames = Button()
-    reset_path = Button()
-    recenter = Button()
-    save_path = Button()
-    load_path = Button()
-    export_path = Button()
-
-    table_def = TableEditor(
-        columns = [ ObjectColumn(name='position'),
-                    ObjectColumn(name='focal_point'),
-                    ObjectColumn(name='view_up'),
-                    ObjectColumn(name='clipping_range'),
-                    ObjectColumn(name='num_steps') ],
-        reorderable=True, deletable=True,
-        sortable=True, sort_model=True,
-        show_toolbar=True,
-        selection_mode='row',
-        selected = 'reset_position'
-                )
-
-    default_view = View(
-                VGroup(
-                  HGroup(
-                    Item('camera', show_label=False),
-                    Item('recenter', show_label=False),
-                    label='Camera'),
-                  HGroup(
-                    Item('snapshot', show_label=False),
-                    Item('play', show_label=False),
-                    Item('export_frames',show_label=False),
-                    Item('reset_path', show_label=False),
-                    Item('save_path', show_label=False),
-                    Item('load_path', show_label=False),
-                    Item('export_path', show_label=False),
-                    Item('export_filename'),
-                    Item('periodic'),
-                    Item('fps'),
-                    label='Playback'),
-                  VGroup(
-                    Item('positions', show_label=False,
-                        editor=table_def),
-                    label='Camera Path'),
-                 ),
-                resizable=True, title="Camera Path Editor",
-                       )
-
-    def _reset_position_changed(self, old, new):
-        if new is None: return
-        cam = self.scene.camera
-        cam.position = new.position
-        cam.focal_point = new.focal_point
-        cam.view_up = new.view_up
-        cam.clipping_range = new.clipping_range
-        self.scene.render()
-
-    def __init__(self, **traits):
-        HasTraits.__init__(self, **traits)
-
-    def take_snapshot(self):
-        cam = self.scene.camera
-        self.positions.append(CameraPosition(
-                position=cam.position,
-                focal_point=cam.focal_point,
-                view_up=cam.view_up,
-                clipping_range=cam.clipping_range,
-                distance=cam.distance,
-                orientation_wxyz=cam.orientation_wxyz))
-
-    def _export_path_fired(self): 
-        dlg = pyface.FileDialog(
-            action='save as',
-            wildcard="*.cpath",
-        )
-        if dlg.open() == pyface.OK:
-            print "Saving:", dlg.path
-            self.export_camera_path(dlg.path)
-
-    def export_camera_path(self, fn):
-        to_dump = dict(positions=[], focal_points=[],
-                       view_ups=[], clipping_ranges=[],
-                       distances=[], orientation_wxyzs=[])
-        def _write(cam):
-            to_dump['positions'].append(cam.position)
-            to_dump['focal_points'].append(cam.focal_point)
-            to_dump['view_ups'].append(cam.view_up)
-            to_dump['clipping_ranges'].append(cam.clipping_range)
-            to_dump['distances'].append(cam.distance)
-            to_dump['orientation_wxyzs'].append(cam.orientation_wxyz)
-        self.step_through(0.0, callback=_write)
-        pickle.dump(to_dump, open(fn, "wb"))
-
-    def _save_path_fired(self): 
-        dlg = pyface.FileDialog(
-            action='save as',
-            wildcard="*.cpath",
-        )
-        if dlg.open() == pyface.OK:
-            print "Saving:", dlg.path
-            self.dump_camera_path(dlg.path)
-
-    def dump_camera_path(self, fn):
-        to_dump = dict(positions=[], focal_points=[],
-                       view_ups=[], clipping_ranges=[],
-                       distances=[], orientation_wxyzs=[],
-                       num_stepss=[])
-        for p in self.positions:
-            to_dump['positions'].append(p.position)
-            to_dump['focal_points'].append(p.focal_point)
-            to_dump['view_ups'].append(p.view_up)
-            to_dump['clipping_ranges'].append(p.clipping_range)
-            to_dump['distances'].append(p.distance)
-            to_dump['num_stepss'].append(p.num_steps) # stupid s
-            to_dump['orientation_wxyzs'].append(p.orientation_wxyz)
-        pickle.dump(to_dump, open(fn, "wb"))
-
-    def _load_path_fired(self):
-        dlg = pyface.FileDialog(
-            action='open',
-            wildcard="*.cpath",
-        )
-        if dlg.open() == pyface.OK:
-            print "Loading:", dlg.path
-            self.load_camera_path(dlg.path)
-
-    def load_camera_path(self, fn):
-        to_use = pickle.load(open(fn, "rb"))
-        self.positions = []
-        for i in range(len(to_use['positions'])):
-            dd = {}
-            for kw in to_use:
-                # Strip the s
-                dd[kw[:-1]] = to_use[kw][i]
-            self.positions.append(
-                CameraPosition(**dd))
-
-    def _recenter_fired(self):
-        self.camera.focal_point = self.center
-        self.scene.render()
-
-    def _snapshot_fired(self):
-        self.take_snapshot()
-
-    def _play_fired(self):
-        self.step_through()
-
-    def _export_frames_fired(self):
-        self.step_through(save_frames=True)
-
-    def _reset_path_fired(self):
-        self.positions = []
-
-    def step_through(self, pause = 1.0, callback=None, save_frames=False):
-        cam = self.scene.camera
-        frame_counter=0
-        if self.periodic:
-            cyclic_pos = self.positions + [self.positions[0]]
-        else:
-            cyclic_pos = self.positions
-        for i in range(len(cyclic_pos)-1):
-            pos1 = cyclic_pos[i]
-            pos2 = cyclic_pos[i+1]
-            r = pos1.num_steps
-            for p in range(pos1.num_steps):
-                po = _interpolate(pos1.position, pos2.position, p, r)
-                fp = _interpolate(pos1.focal_point, pos2.focal_point, p, r)
-                vu = _interpolate(pos1.view_up, pos2.view_up, p, r)
-                cr = _interpolate(pos1.clipping_range, pos2.clipping_range, p, r)
-                _set_cpos(cam, po, fp, vu, cr)
-                self.scene.render()
-                if callback is not None: callback(cam)
-                if save_frames:
-                    self.scene.save("%s_%0.5d.png" % (self.export_filename,frame_counter))
-                else:
-                    time.sleep(pause * 1.0/self.fps)
-                frame_counter += 1
-
-def _interpolate(q1, q2, p, r):
-    return q1 + p*(q2 - q1)/float(r)
-
-def _set_cpos(cam, po, fp, vu, cr):
-    cam.position = po
-    cam.focal_point = fp
-    cam.view_up = vu
-    cam.clipping_range = cr
-
-class HierarchyImporter(HasTraits):
-    pf = Any
-    min_grid_level = Int(0)
-    max_level = Int(1)
-    number_of_levels = Range(0, 13)
-    max_import_levels = Property(depends_on='min_grid_level')
-    field = Str("Density")
-    field_list = List
-    center_on_max = Bool(True)
-    center = CArray(shape = (3,), dtype = 'float64')
-    cache = Bool(True)
-    smoothed = Bool(True)
-    show_grids = Bool(True)
-
-    def _field_list_default(self):
-        fl = self.pf.h.field_list
-        df = self.pf.h.derived_field_list
-        fl.sort(); df.sort()
-        return fl + df
-    
-    default_view = View(Item('min_grid_level',
-                              editor=RangeEditor(low=0,
-                                                 high_name='max_level')),
-                        Item('number_of_levels', 
-                              editor=RangeEditor(low=1,
-                                                 high_name='max_import_levels')),
-                        Item('field', editor=EnumEditor(name='field_list')),
-                        Item('center_on_max'),
-                        Item('center', enabled_when='not object.center_on_max'),
-                        Item('smoothed'),
-                        Item('cache', label='Pre-load data'),
-                        Item('show_grids'),
-                        buttons=OKCancelButtons)
-
-    def _center_default(self):
-        return [0.5,0.5,0.5]
-
-    @cached_property
-    def _get_max_import_levels(self):
-        return min(13, self.pf.h.max_level - self.min_grid_level + 1)
-
-class HierarchyImportHandler(Controller):
-    importer = Instance(HierarchyImporter)
-    
-
-    def close(self, info, is_ok):
-        if is_ok: 
-            yt_scene = YTScene(
-                importer=self.importer)
-        super(Controller, self).close(info, True)
-        return
-
-
-class YTScene(HasTraits):
-
-    # Traits
-    importer = Instance(HierarchyImporter)
-    pf = Delegate("importer")
-    min_grid_level = Delegate("importer")
-    number_of_levels = Delegate("importer")
-    field = Delegate("importer")
-    center = CArray(shape = (3,), dtype = 'float64')
-    center_on_max = Delegate("importer")
-    smoothed = Delegate("importer")
-    cache = Delegate("importer")
-    show_grids = Delegate("importer")
-
-    camera_path = Instance(CameraControl)
-    #window = Instance(ivtk.IVTKWithCrustAndBrowser)
-    #python_shell = Delegate('window')
-    #scene = Delegate('window')
-    scene = Instance(HasTraits)
-    operators = List(HasTraits)
-
-    # State variables
-    _grid_boundaries_actor = None
-
-    # Views
-    def _window_default(self):
-        # Should experiment with passing in a pipeline browser
-        # that has two root objects -- one for TVTKBases, i.e. the render
-        # window, and one that accepts our objects
-        return ivtk.IVTKWithCrustAndBrowser(size=(800,600), stereo=1)
-
-    def _camera_path_default(self):
-        return CameraControl(yt_scene=self, camera=self.scene.camera)
-
-    def __init__(self, **traits):
-        HasTraits.__init__(self, **traits)
-        max_level = min(self.pf.h.max_level,
-                        self.min_grid_level + self.number_of_levels - 1)
-        self.extracted_pf = ExtractedParameterFile(self.pf,
-                             self.min_grid_level, max_level, offset=None)
-        self.extracted_hierarchy = self.extracted_pf.h
-        self._hdata_set = tvtk.HierarchicalBoxDataSet()
-        self._ugs = []
-        self._grids = []
-        self._min_val = 1e60
-        self._max_val = -1e60
-        gid = 0
-        if self.cache:
-            for grid_set in self.extracted_hierarchy.get_levels():
-                for grid in grid_set:
-                    grid[self.field]
-        for l, grid_set in enumerate(self.extracted_hierarchy.get_levels()):
-            gid = self._add_level(grid_set, l, gid)
-        if self.show_grids:
-            self.toggle_grid_boundaries()
-            
-    def _center_default(self):
-        return self.extracted_hierarchy._convert_coords(
-                [0.5, 0.5, 0.5])
-
-    def do_center_on_max(self):
-        self.center = self.extracted_hierarchy._convert_coords(
-            self.pf.h.find_max("Density")[1])
-        self.scene.camera.focal_point = self.center
-
-    def _add_level(self, grid_set, level, gid):
-        for grid in grid_set:
-            self._hdata_set.set_refinement_ratio(level, 2)
-            gid = self._add_grid(grid, gid, level)
-        return gid
-
-    def _add_grid(self, grid, gid, level=0):
-        mylog.debug("Adding grid %s on level %s (%s)",
-                    grid.id, level, grid.Level)
-        if grid in self._grids: return
-        self._grids.append(grid)
-
-        scalars = grid.get_vertex_centered_data(self.field, smoothed=self.smoothed)
-
-        left_index = grid.get_global_startindex()
-        origin = grid.LeftEdge
-        dds = grid.dds
-        right_index = left_index + scalars.shape - 1
-        ug = tvtk.UniformGrid(origin=origin, spacing=dds,
-                              dimensions=grid.ActiveDimensions+1)
-        if self.field not in self.pf.field_info or \
-            self.pf.field_info[self.field].take_log:
-            scalars = na.log10(scalars)
-        ug.point_data.scalars = scalars.transpose().ravel()
-        ug.point_data.scalars.name = self.field
-        if grid.Level != self.min_grid_level + self.number_of_levels - 1:
-            ug.cell_visibility_array = grid.child_mask.transpose().ravel()
-        else:
-            ug.cell_visibility_array = na.ones(
-                    grid.ActiveDimensions, dtype='int').ravel()
-        self._ugs.append((grid,ug))
-        self._hdata_set.set_data_set(level, gid, left_index, right_index, ug)
-
-        self._min_val = min(self._min_val, scalars.min())
-        self._max_val = max(self._max_val, scalars.max())
-
-        gid += 1
-        return gid
-
-    def _add_data_to_ug(self, field):
-        for g, ug in self._ugs:
-            scalars_temp = grid.get_vertex_centered_data(field, smoothed=self.smoothed)
-            ii = ug.point_data.add_array(scalars_temp.transpose().ravel())
-            ug.point_data.get_array(ii).name = field
-
-    def zoom(self, dist, unit='1'):
-        vec = self.scene.camera.focal_point - \
-              self.scene.camera.position
-        self.scene.camera.position += \
-            vec * dist/self._grids[0].pf[unit]
-        self.scene.render()
-
-    def toggle_grid_boundaries(self):
-        if self._grid_boundaries_actor is None:
-            # We don't need to track this stuff right now.
-            ocf = tvtk.OutlineCornerFilter(
-                    executive=tvtk.CompositeDataPipeline(),
-                    corner_factor = 0.5)
-            ocf.input = self._hdata_set
-            ocm = tvtk.HierarchicalPolyDataMapper(
-                input_connection = ocf.output_port)
-            self._grid_boundaries_actor = tvtk.Actor(mapper = ocm)
-            self.scene.add_actor(self._grid_boundaries_actor)
-        else:
-            self._grid_boundaries_actor.visibility = \
-            (not self._grid_boundaries_actor.visibility)
-
-    def _add_sphere(self, origin=(0.0,0.0,0.0), normal=(0,1,0)):
-        sphere = tvtk.Sphere(center=origin, radius=0.25)
-        cutter = tvtk.Cutter(executive = tvtk.CompositeDataPipeline(),
-                             cut_function = sphere)
-        cutter.input = self._hdata_set
-        lut_manager = LUTManager(data_name=self.field, scene=self.scene)
-        smap = tvtk.HierarchicalPolyDataMapper(
-                        scalar_range=(self._min_val, self._max_val),
-                        lookup_table=lut_manager.lut,
-                        input_connection = cutter.output_port)
-        sactor = tvtk.Actor(mapper=smap)
-        self.scene.add_actors(sactor)
-        return sphere, lut_manager
-
-    def _add_plane(self, origin=(0.0,0.0,0.0), normal=(0,1,0)):
-        plane = tvtk.Plane(origin=origin, normal=normal)
-        cutter = tvtk.Cutter(executive = tvtk.CompositeDataPipeline(),
-                             cut_function = plane)
-        cutter.input = self._hdata_set
-        lut_manager = LUTManager(data_name=self.field, scene=self.scene)
-        smap = tvtk.HierarchicalPolyDataMapper(
-                        scalar_range=(self._min_val, self._max_val),
-                        lookup_table=lut_manager.lut,
-                        input_connection = cutter.output_port)
-        sactor = tvtk.Actor(mapper=smap)
-        self.scene.add_actors(sactor)
-        return plane, lut_manager
-
-    def add_plane(self, origin=(0.0,0.0,0.0), normal=(0,1,0)):
-        self.operators.append(self._add_plane(origin, normal))
-        return self.operators[-1]
-
-    def _add_axis_plane(self, axis):
-        normal = [0,0,0]
-        normal[axis] = 1
-        np, lut_manager = self._add_plane(self.center, normal=normal)
-        LE = self.extracted_hierarchy.min_left_edge
-        RE = self.extracted_hierarchy.max_right_edge
-        self.operators.append(MappingPlane(
-                vmin=LE[axis], vmax=RE[axis],
-                vdefault = self.center[axis],
-                post_call = self.scene.render,
-                plane = np, axis=axis, coord=0.0,
-                lut_manager = lut_manager,
-                scene=self.scene))
-
-    def add_x_plane(self):
-        self._add_axis_plane(0)
-        return self.operators[-1]
-
-    def add_y_plane(self):
-        self._add_axis_plane(1)
-        return self.operators[-1]
-
-    def add_z_plane(self):
-        self._add_axis_plane(2)
-        return self.operators[-1]
-
-    def add_contour(self, val=None):
-        if val is None: 
-            if self._min_val != self._min_val:
-                self._min_val = 1.0
-            val = (self._max_val+self._min_val) * 0.5
-        cubes = tvtk.MarchingCubes(
-                    executive = tvtk.CompositeDataPipeline())
-        cubes.input = self._hdata_set
-        cubes.set_value(0, val)
-        lut_manager = LUTManager(data_name=self.field, scene=self.scene)
-        cube_mapper = tvtk.HierarchicalPolyDataMapper(
-                                input_connection = cubes.output_port,
-                                lookup_table=lut_manager.lut)
-        cube_mapper.color_mode = 'map_scalars'
-        cube_mapper.scalar_range = (self._min_val, self._max_val)
-        cube_actor = tvtk.Actor(mapper=cube_mapper)
-        self.scene.add_actors(cube_actor)
-        self.operators.append(MappingMarchingCubes(operator=cubes,
-                    vmin=self._min_val, vmax=self._max_val,
-                    vdefault=val,
-                    mapper = cube_mapper,
-                    post_call = self.scene.render,
-                    lut_manager = lut_manager,
-                    scene=self.scene))
-        return self.operators[-1]
-
-    def add_isocontour(self, val=None):
-        if val is None: val = (self._max_val+self._min_val) * 0.5
-        isocontour = tvtk.ContourFilter(
-                    executive = tvtk.CompositeDataPipeline())
-        isocontour.input = self._hdata_set
-        isocontour.generate_values(1, (val, val))
-        lut_manager = LUTManager(data_name=self.field, scene=self.scene)
-        isocontour_normals = tvtk.PolyDataNormals(
-            executive=tvtk.CompositeDataPipeline())
-        isocontour_normals.input_connection = isocontour.output_port
-        iso_mapper = tvtk.HierarchicalPolyDataMapper(
-                                input_connection = isocontour_normals.output_port,
-                                lookup_table=lut_manager.lut)
-        iso_mapper.scalar_range = (self._min_val, self._max_val)
-        iso_actor = tvtk.Actor(mapper=iso_mapper)
-        self.scene.add_actors(iso_actor)
-        self.operators.append(MappingIsoContour(operator=isocontour,
-                    vmin=self._min_val, vmax=self._max_val,
-                    vdefault=val,
-                    mapper = iso_mapper,
-                    post_call = self.scene.render,
-                    lut_manager = lut_manager,
-                    scene=self.scene))
-        return self.operators[-1]
-
-    def display_points(self):
-        dd = self.pf.h.all_data()
-        points = tvtk.Points()
-        good = (dd["creation_time"] > 0.0)
-        points.data = na.array([ dd["particle_position_%s" % ax][good] for ax in 'xyz' ]).transpose()
-        mass = na.log10(dd["ParticleAge"][good])
-        self.conn = tvtk.CellArray()
-        for i in xrange(mass.shape[0]):
-            self.conn.insert_next_cell(1)
-            self.conn.insert_cell_point(i)
-        self.points = points
-        self.pd = tvtk.PolyData(points = self.points, verts = self.conn)
-        self.pd.point_data.scalars = mass
-        lut = tvtk.LookupTable()
-        self.pdm = tvtk.PolyDataMapper(input = self.pd,
-                                       lookup_table = lut)
-        self.pdm.scalar_range = (mass.min(), mass.max())
-        self.pdm.scalar_mode = 'use_point_data'
-        self.point_actor = tvtk.Actor(mapper = self.pdm)
-        self.scene.add_actor(self.point_actor)
-
-def get_all_parents(grid):
-    parents = []
-    if len(grid.Parents) == 0: return grid
-    for parent in grid.Parents: parents.append(get_all_parents(parent))
-    return list(set(parents))
-
-def run_vtk():
-    gui = pyface.GUI()
-    importer = HierarchyImporter()
-    importer.edit_traits(handler = HierarchyImportHandler(
-            importer = importer))
-    #ehds.edit_traits()
-    gui.start_event_loop()
-
-
-if __name__=="__main__":
-    print "This code probably won't work.  But if you want to give it a try,"
-    print "you need:"
-    print
-    print "VTK (CVS)"
-    print "Mayavi2 (from Enthought)"
-    print
-    print "If you have 'em, give it a try!"
-    print
-    run_vtk()


--- a/yt/mods.py	Mon Mar 28 13:44:14 2011 -0600
+++ b/yt/mods.py	Mon Mar 28 13:44:34 2011 -0600
@@ -47,7 +47,7 @@
     derived_field, add_field, FieldInfo, \
     ValidateParameter, ValidateDataField, ValidateProperty, \
     ValidateSpatial, ValidateGridType, \
-    EnzoTimeSeries, TimeSeriesData, AnalysisTask, analysis_task
+    TimeSeriesData, AnalysisTask, analysis_task
 
 from yt.data_objects.derived_quantities import \
     add_quantity, quantity_info


--- a/yt/utilities/_amr_utils/VolumeIntegrator.pyx	Mon Mar 28 13:44:14 2011 -0600
+++ b/yt/utilities/_amr_utils/VolumeIntegrator.pyx	Mon Mar 28 13:44:34 2011 -0600
@@ -63,6 +63,7 @@
     double fmod(double x, double y)
     double log2(double x)
     long int lrint(double x)
+    double fabs(double x)
 
 cdef extern from "FixedInterpolator.h":
     np.float64_t fast_interpolate(int ds[3], int ci[3], np.float64_t dp[3],
@@ -596,6 +597,7 @@
             if tdelta[i] < 0: tdelta[i] *= -1
         # We have to jumpstart our calculation
         enter_t = intersect_t
+        hit = 0
         while 1:
             # dims here is one less than the dimensions of the data,
             # but we are tracing on the grid, not on the data...
@@ -634,7 +636,7 @@
                     cur_ind[2] += step[2]
                     enter_t = tmax[2]
                     tmax[2] += tdelta[2]
-            if enter_t > 1.0: break
+            if enter_t >= 1.0: break
         if return_t != NULL: return_t[0] = exit_t
         return hit
 
@@ -913,14 +915,14 @@
     cdef AdaptiveRayPacket **lpacket_pointers
 
     def __cinit__(self, center, rays_per_cell, initial_nside,
-                  np.float64_t normalization, brick_list, int max_nside = 8096):
+                  np.float64_t normalization, brick_list, int max_nside = 8192):
         cdef int i
         self.max_nside = max_nside
         self.center[0] = center[0]
         self.center[1] = center[1]
         self.center[2] = center[2]
         self.rays_per_cell = rays_per_cell
-        cdef AdaptiveRayPacket *ray = self.first
+        cdef AdaptiveRayPacket *ray
         cdef AdaptiveRayPacket *last = NULL
         cdef PartitionedGrid pg
         cdef double v_dir[3]
@@ -942,7 +944,7 @@
             ray.prev = last
             ray.ipix = i
             ray.nside = initial_nside
-            ray.t = 0.0 # Start in the first brick
+            ray.t = 0.0 # We assume we are not on a brick boundary
             healpix_interface.pix2vec_nest(initial_nside, i, v_dir)
             ray.v_dir[0] = v_dir[0] * normalization
             ray.v_dir[1] = v_dir[1] * normalization
@@ -1006,29 +1008,34 @@
         cdef AdaptiveRayPacket *ray = self.packet_pointers[pgi]
         cdef AdaptiveRayPacket *next
         cdef int *grid_neighbors = self.find_neighbors(pgi, pg.dds[0], ledges, redges)
+        cdef np.float64_t enter_t, dt, offpos[3]
+        cdef int found_a_home, hit
+        #print "Grid: ", pgi, "has", grid_neighbors[0], "neighbors"
         while ray != NULL:
             # Note that we may end up splitting a ray such that it ends up
-            # outside the brick!
+            # outside the brick!  This will likely cause them to get lost.
             #print count
             count +=1
-            #if count > 10+self.nrays or ray.cgi != pgi:
-            #    raise RuntimeError
             # We don't need to check for intersection anymore, as we are the
             # Ruler of the planet Omicron Persei 8
             #if self.intersects(ray, pg):
             ray = self.refine_ray(ray, domega, pg.dds[0],
                                   pg.left_edge, pg.right_edge)
-            pg.integrate_ray(self.center, ray.v_dir, ray.value,
-                             tf, &ray.t)
+            enter_t = ray.t
+            hit = pg.integrate_ray(self.center, ray.v_dir, ray.value, tf, &ray.t)
+            if hit == 0: dt = 0.0
+            else: dt = (ray.t - enter_t)/hit
             for i in range(3):
-                ray.pos[i] = ray.v_dir[i] * (ray.t + 1e-8) + self.center[i]
+                ray.pos[i] = ray.v_dir[i] * ray.t + self.center[i]
+                offpos[i] = ray.pos[i] + ray.v_dir[i] * 1e-5*dt
             # We set 'next' after the refinement has occurred
             next = ray.brick_next
+            found_a_home = 0
             for j in range(grid_neighbors[0]):
                 i = grid_neighbors[j+1]
-                if ((ledges[i, 0] <= ray.pos[0] <= redges[i, 0]) and
-                    (ledges[i, 1] <= ray.pos[1] <= redges[i, 1]) and
-                    (ledges[i, 2] <= ray.pos[2] <= redges[i, 2])):
+                if ((ledges[i, 0] <= offpos[0] <= redges[i, 0]) and
+                    (ledges[i, 1] <= offpos[1] <= redges[i, 1]) and
+                    (ledges[i, 2] <= offpos[2] <= redges[i, 2])):
                     if self.lpacket_pointers[i] == NULL:
                         self.packet_pointers[i] = \
                         self.lpacket_pointers[i] = ray
@@ -1038,6 +1045,7 @@
                         self.lpacket_pointers[i] = ray
                         ray.brick_next = NULL
                     #ray.cgi = i
+                    found_a_home = 1
                     break
             ray = next
         free(grid_neighbors)
@@ -1062,18 +1070,18 @@
         gre[2] = redges[this_grid, 2] + dds
         for i in range(this_grid+1, ledges.shape[0]):
             # Check for overlap
-            if ((gle[0] < redges[i, 0] and gre[0] > ledges[i, 0]) and
-                (gle[1] < redges[i, 1] and gre[1] > ledges[i, 1]) and
-                (gle[2] < redges[i, 2] and gre[2] > ledges[i, 2])):
+            if ((gle[0] <= redges[i, 0] and gre[0] >= ledges[i, 0]) and
+                (gle[1] <= redges[i, 1] and gre[1] >= ledges[i, 1]) and
+                (gle[2] <= redges[i, 2] and gre[2] >= ledges[i, 2])):
                 count += 1
-        cdef int *tr = <int *> malloc(sizeof(int) * count + 1)
+        cdef int *tr = <int *> malloc(sizeof(int) * (count + 1))
         tr[0] = count
         count = 0
         for i in range(this_grid+1, ledges.shape[0]):
             # Check for overlap
-            if ((gle[0] < redges[i, 0] and gre[0] > ledges[i, 0]) and
-                (gle[1] < redges[i, 1] and gre[1] > ledges[i, 1]) and
-                (gle[2] < redges[i, 2] and gre[2] > ledges[i, 2])):
+            if ((gle[0] <= redges[i, 0] and gre[0] >= ledges[i, 0]) and
+                (gle[1] <= redges[i, 1] and gre[1] >= ledges[i, 1]) and
+                (gle[2] <= redges[i, 2] and gre[2] >= ledges[i, 2])):
                 tr[count + 1] = i
                 count += 1
         return tr


--- a/yt/utilities/command_line.py	Mon Mar 28 13:44:14 2011 -0600
+++ b/yt/utilities/command_line.py	Mon Mar 28 13:44:34 2011 -0600
@@ -224,42 +224,7 @@
             func(self, subcmd, opts, arg)
     return arg_iterate
 
-def _get_vcs_type(path):
-    if os.path.exists(os.path.join(path, ".hg")):
-        return "hg"
-    if os.path.exists(os.path.join(path, ".svn")):
-        return "svn"
-    return None
-
-def _get_svn_version(path):
-    p = subprocess.Popen(["svn", "info", path], stdout = subprocess.PIPE,
-                                               stderr = subprocess.STDOUT)
-    stdout, stderr = p.communicate()
-    return stdout
-
-def _update_svn(path):
-    f = open(os.path.join(path, "yt_updater.log"), "a")
-    f.write("\n\nUPDATE PROCESS: %s\n\n" % (time.asctime()))
-    p = subprocess.Popen(["svn", "up", path], stdout = subprocess.PIPE,
-                                              stderr = subprocess.STDOUT)
-    stdout, stderr = p.communicate()
-    f.write(stdout)
-    f.write("\n\n")
-    if p.returncode:
-        print "BROKEN: See %s" % (os.path.join(path, "yt_updater.log"))
-        sys.exit(1)
-    f.write("Rebuilding modules\n\n")
-    p = subprocess.Popen([sys.executable, "setup.py", "build_ext", "-i"], cwd=path,
-                        stdout = subprocess.PIPE, stderr = subprocess.STDOUT)
-    stdout, stderr = p.communicate()
-    f.write(stdout)
-    f.write("\n\n")
-    if p.returncode:
-        print "BROKEN: See %s" % (os.path.join(path, "yt_updater.log"))
-        sys.exit(1)
-    f.write("Successful!\n")
-
-def _update_hg(path):
+def _update_hg(path, skip_rebuild = False):
     from mercurial import hg, ui, commands
     f = open(os.path.join(path, "yt_updater.log"), "a")
     u = ui.ui()
@@ -280,10 +245,11 @@
         print "    $ cd %s" % (path)
         print "    $ hg up"
         print "    $ %s setup.py develop" % (sys.executable)
-        sys.exit(1)
+        return 1
     print "Updating the repository"
     f.write("Updating the repository\n\n")
     commands.update(u, repo, check=True)
+    if skip_rebuild: return
     f.write("Rebuilding modules\n\n")
     p = subprocess.Popen([sys.executable, "setup.py", "build_ext", "-i"], cwd=path,
                         stdout = subprocess.PIPE, stderr = subprocess.STDOUT)
@@ -294,6 +260,7 @@
         print "BROKEN: See %s" % (os.path.join(path, "yt_updater.log"))
         sys.exit(1)
     f.write("Successful!\n")
+    print "Updated successfully."
 
 def _get_hg_version(path):
     from mercurial import hg, ui, commands
@@ -303,11 +270,6 @@
     commands.identify(u, repo)
     return u.popbuffer()
 
-_vcs_identifier = dict(svn = _get_svn_version,
-                        hg = _get_hg_version)
-_vcs_updater = dict(svn = _update_svn,
-                     hg = _update_hg)
-
 class YTCommands(cmdln.Cmdln):
     name="yt"
 
@@ -342,20 +304,32 @@
         print
         print "yt module located at:"
         print "    %s" % (path)
+        update_supp = False
+        if "YT_DEST" in os.environ:
+            spath = os.path.join(
+                     os.environ["YT_DEST"], "src", "yt-supplemental")
+            if os.path.isdir(spath):
+                print "The supplemental repositories are located at:"
+                print "    %s" % (spath)
+                update_supp = True
+        vstring = None
         if "site-packages" not in path:
-            vc_type = _get_vcs_type(path)
-            vstring = _vcs_identifier[vc_type](path)
+            vstring = _get_hg_version(path)
             print
             print "The current version of the code is:"
             print
             print "---"
-            print vstring
+            print vstring.strip()
             print "---"
             print
             print "This installation CAN be automatically updated."
             if opts.update_source:  
                 _vcs_updater[vc_type](path)
             print "Updated successfully."
+        elif opts.update_source:
+            print
+            print "You have to update this installation yourself."
+            print
         if vstring is not None and opts.outputfile is not None:
             open(opts.outputfile, "w").write(vstring)
 
@@ -591,7 +565,349 @@
                             weight="CellMassMsun")
         ph.modify["line"](pr.data["Density"], pr.data["Temperature"])
         pc.save()
-    
+
+    @cmdln.option("-d", "--desc", action="store",
+                  default = None, dest="desc",
+                  help="Description for this pasteboard entry")
+    def do_pasteboard(self, subcmd, opts, arg):
+        """
+        Place a file into the user's pasteboard
+        """
+        if opts.desc is None: raise RuntimeError
+        from yt.utilities.pasteboard import PostInventory
+        pp = PostInventory()
+        pp.add_post(arg, desc=opts.desc)
+
+    @cmdln.option("-o", "--output", action="store",
+                  default = None, dest="output_fn",
+                  help="File to output to; else, print.")
+    def do_pastegrab(self, subcmd, opts, username, paste_id):
+        from yt.utilities.pasteboard import retrieve_pastefile
+        retrieve_pastefile(username, paste_id, opts.output_fn)
+
+    def do_bootstrap_dev(self, subcmd, opts):
+        """
+        Bootstrap a yt development environment
+        """
+        from mercurial import hg, ui, commands
+        import imp
+        import getpass
+        import json
+        uu = ui.ui()
+        print
+        print "Hi there!  Welcome to the yt development bootstrap tool."
+        print
+        print "This should get you started with mercurial as well as a few"
+        print "other handy things, like a pasteboard of your very own."
+        print
+        # We have to do a couple things.
+        # First, we check that YT_DEST is set.
+        if "YT_DEST" not in os.environ:
+            print
+            print "*** You must set the environment variable YT_DEST ***"
+            print "*** to point to the installation location!        ***"
+            print
+            sys.exit(1)
+        supp_path = os.path.join(os.environ["YT_DEST"], "src",
+                                 "yt-supplemental")
+        # Now we check that the supplemental repository is checked out.
+        if not os.path.isdir(supp_path):
+            print
+            print "*** The yt-supplemental repository is not checked ***"
+            print "*** out.  I can do this for you, but because this ***"
+            print "*** is a delicate act, I require you to respond   ***"
+            print "*** to the prompt with the word 'yes'.            ***"
+            print
+            response = raw_input("Do you want me to try to check it out? ")
+            if response != "yes":
+                print
+                print "Okay, I understand.  You can check it out yourself."
+                print "This command will do it:"
+                print
+                print "$ hg clone http://hg.enzotools.org/yt-supplemental/ ",
+                print "%s" % (supp_path)
+                print
+                sys.exit(1)
+            rv = commands.clone(uu,
+                    "http://hg.enzotools.org/yt-supplemental/", supp_path)
+            if rv:
+                print "Something has gone wrong.  Quitting."
+                sys.exit(1)
+        # Now we think we have our supplemental repository.
+        print
+        print "I have found the yt-supplemental repository at %s" % (supp_path)
+        print
+        print "Let's load up and check what we need to do to get up and"
+        print "running."
+        print
+        print "There are three stages:"
+        print
+        print " 1. Setting up your ~/.hgrc to have a username."
+        print " 2. Setting up your bitbucket user account and the hgbb"
+        print "    extension."
+        print " 3. Setting up a new pasteboard repository."
+        print
+        firstname = lastname = email_address = bbusername = repo_list = None
+        # Now we try to import the cedit extension.
+        try:
+            result = imp.find_module("cedit", [supp_path])
+        except ImportError:
+            print "I was unable to find the 'cedit' module in %s" % (supp_path)
+            print "This may be due to a broken checkout."
+            print "Sorry, but I'm going to bail."
+            sys.exit(1)
+        cedit = imp.load_module("cedit", *result)
+        try:
+            result = imp.find_module("hgbb", [supp_path + "/hgbb"])
+        except ImportError:
+            print "I was unable to find the 'hgbb' module in %s" % (supp_path)
+            print "This may be due to a broken checkout."
+            print "Sorry, but I'm going to bail."
+            sys.exit(1)
+        hgbb = imp.load_module("hgbb", *result)
+        if uu.config("ui","username",None) is None:
+            print "You don't have a username specified in your ~/.hgrc."
+            print "Let's set this up.  If you would like to quit at any time,"
+            print "hit Ctrl-C."
+            print
+            firstname = raw_input("What is your first name? ")
+            lastname = raw_input("What is your last name? ")
+            email_address = raw_input("What is your email address? ")
+            print
+            print "Thanks.  I will now add a username of this form to your"
+            print "~/.hgrc file:"
+            print
+            print "    %s %s <%s>" % (firstname, lastname, email_address)
+            print
+            loki = raw_input("Press enter to go on, Ctrl-C to exit.")
+            print
+            cedit.setuser(uu, name="%s %s" % (firstname, lastname),
+                              email="%s" % (email_address),
+                              local=False, username=False)
+            print
+        else:
+            print "Looks like you already have a username!"
+            print "We'll skip that step, then."
+            print
+        print "Now we'll set up BitBucket user.  If you would like to do this"
+        print "yourself, please visit:"
+        print " https://bitbucket.org/account/signup/?plan=5_users"
+        print "for a free account."
+        print
+        loki = raw_input("Do you have a BitBucket.org user already? [yes/no]")
+        if loki.strip().upper() == "YES":
+            bbusername = raw_input("Okay, cool.  What is your username?  ").strip()
+            # Now we get information about the username.
+            if firstname is None or lastname is None:
+                rv = hgbb._bb_apicall(uu, "users/%s" % bbusername, None, False)
+                rv = json.loads(rv)
+                firstname = rv['user']["first_name"]
+                lastname = rv['user']["last_name"]
+                repo_list = rv['repositories']
+                print "Retrieved your info:"
+                print "  username:   %s" % (bbusername)
+                print "  first name: %s" % (firstname)
+                print "  last name:  %s" % (lastname)
+        elif loki.strip().upper() == "NO":
+            print "Okay, we can set you up with one.  It's probably better for"
+            print "it to be all lowercase letter."
+            print
+            bbusername = raw_input("What is your desired username? ").strip()
+            if firstname is None:
+                firstname = raw_input("What's your first name? ").strip()
+            if lastname is None:
+                lastname = raw_input("What's your last name? ").strip()
+            if email_address is None:
+                email_address = raw_input("What's your email address? ").strip()
+            print
+            print "Okay, I'll see if I can create a user with this information:"
+            print "  username:   %s" % (bbusername)
+            print "  first name: %s" % (firstname)
+            print "  last name:  %s" % (lastname)
+            print "  email:      %s" % (email_address)
+            print
+            print "Now, I'm going to ask for a password.  This password will"
+            print "be transmitted over HTTPS (not HTTP) and will not be stored"
+            print "in any local file.  But, it will be stored in memory for"
+            print "the duration of the user-creation process."
+            print
+            while 1:
+                password1 = getpass.getpass("Password? ")
+                password2 = getpass.getpass("Confirm? ")
+                if password1 == password2: break
+                print "Sorry, they didn't match!  Let's try again."
+                print
+            rv = hgbb._bb_apicall(uu, "newuser",
+                                    dict(username=bbusername,
+                                         password=password1,
+                                         email=email_address,
+                                         first_name = firstname,
+                                         last_name = lastname),
+                                   False)
+            del password1, password2
+            if str(json.loads(rv)['username']) == bbusername:
+                print "Successful!  You probably just got an email asking you"
+                print "to confirm this."
+            else:
+                print "Okay, something is wrong.  Quitting!"
+                sys.exit(1)
+        else:
+            print "Not really sure what you replied with.  Quitting!"
+            sys.exit(1)
+        # We're now going to do some modification of the hgrc.
+        # We need an hgrc first.
+        hgrc_path = [cedit.config.defaultpath("user", uu)]
+        hgrc_path = cedit.config.verifypaths(hgrc_path)
+        # Now we set up the hgbb extension
+        if uu.config("extensions","config",None) is None:
+            # cedit is a module, but hgbb is a file.  So we call dirname here.
+            cedit_path = os.path.dirname(cedit.__file__)
+            print "Now we're going to turn on the cedit extension in:"
+            print "    ", hgrc_path
+            print "This will enable you to edit your configuration from the"
+            print "command line.  Mainly, this is useful to use the command"
+            print "'addsource', which will let you add a new source to a given"
+            print "mercurial repository -- like a fork, or your own fork."
+            print
+            print "This constitutes adding the path to the cedit extension,"
+            print "which will look like this:"
+            print
+            print "   [extensions]"
+            print "   config=%s" % cedit_path
+            print
+            loki = raw_input("Press enter to go on, Ctrl-C to exit.")
+            cedit.config.setoption(uu, hgrc_path, "extensions.config=%s" % cedit_path)
+        if uu.config("extensions","hgbb",None) is None:
+            hgbb_path = hgbb.__file__
+            if hgbb_path.endswith(".pyc"): hgbb_path = hgbb_path[:-1]
+            print "Now we're going to turn on the hgbb extension in:"
+            print "    ", hgrc_path
+            print "This will enable you to access BitBucket more easily, as well"
+            print "as create repositories from the command line."
+            print
+            print "This constitutes adding the path to the hgbb extension,"
+            print "which will look like this:"
+            print
+            print "   [extensions]"
+            print "   hgbb=%s" % hgbb_path
+            print
+            loki = raw_input("Press enter to go on, Ctrl-C to exit.")
+            cedit.config.setoption(uu, hgrc_path, "extensions.hgbb=%s" % hgbb_path)
+        if uu.config("bb","username", None) is None:
+            print "We'll now set up your username for BitBucket."
+            print "We will add this:"
+            print
+            print "   [bb]"
+            print "   username = %s" % (bbusername)
+            print
+            loki = raw_input("Press enter to go on, Ctrl-C to exit.")
+            cedit.config.setoption(uu, hgrc_path, "bb.username=%s" % bbusername)
+        # We now reload the UI's config file so that it catches the [bb]
+        # section changes.
+        uu.readconfig(hgrc_path[0])
+        # Now the only thing remaining to do is to set up the pasteboard
+        # repository.
+        # This is, unfortunately, the most difficult.
+        print
+        print "We are now going to set up a pasteboard. This is a mechanism"
+        print "for versioned posting of snippets, collaboration and"
+        print "discussion."
+        print
+        # Let's get the full list of repositories
+        pasteboard_name = "%s.bitbucket.org" % (bbusername.lower())
+        if repo_list is None:
+            rv = hgbb._bb_apicall(uu, "users/%s" % bbusername, None, False)
+            rv = json.loads(rv)
+            repo_list = rv['repositories']
+        create = True
+        for repo in repo_list:
+            if repo['name'] == pasteboard_name:
+                create = False
+        if create:
+            # Now we first create the repository, but we
+            # will only use the creation API, not the bbcreate command.
+            print
+            print "I am now going to create the repository:"
+            print "    ", pasteboard_name
+            print "on BitBucket.org.  This will set up the domain"
+            print "     http://%s" % (pasteboard_name)
+            print "which will point to the current contents of the repo."
+            print
+            loki = raw_input("Press enter to go on, Ctrl-C to exit.")
+            data = dict(name=pasteboard_name)
+            hgbb._bb_apicall(uu, 'repositories', data)
+        # Now we clone
+        pasteboard_path = os.path.join(os.environ["YT_DEST"], "src",
+                                       pasteboard_name)
+        if os.path.isdir(pasteboard_path):
+            print "Found an existing clone of the pasteboard repo:"
+            print "    ", pasteboard_path
+        else:
+            print
+            print "I will now clone a copy of your pasteboard repo."
+            print
+            loki = raw_input("Press enter to go on, Ctrl-C to exit.")
+            commands.clone(uu, "https://%s@bitbucket.org/%s/%s" % (
+                             bbusername, bbusername, pasteboard_name),
+                           pasteboard_path)
+            pbtemplate_path = os.path.join(supp_path, "pasteboard_template")
+            pb_hgrc_path = os.path.join(pasteboard_path, ".hg", "hgrc")
+            cedit.config.setoption(uu, [pb_hgrc_path],
+                                   "paths.pasteboard = " + pbtemplate_path)
+            if create:
+                # We have to pull in the changesets from the pasteboard.
+                pb_repo = hg.repository(uu, pasteboard_path)
+                commands.pull(uu, pb_repo,
+                              os.path.join(supp_path, "pasteboard_template"))
+        if ytcfg.get("yt","pasteboard_repo") != pasteboard_path:
+            print
+            print "Now setting the pasteboard_repo option in"
+            print "~/.yt/config to point to %s" % (pasteboard_path)
+            print
+            loki = raw_input("Press enter to go on, Ctrl-C to exit.")
+            dotyt_path = os.path.expanduser("~/.yt")
+            if not os.path.isdir(dotyt_path):
+                print "There's no directory:"
+                print "    ", dotyt_path
+                print "I will now create it."
+                print
+                loki = raw_input("Press enter to go on, Ctrl-C to exit.")
+                os.mkdir(dotyt_path)
+            ytcfg_path = os.path.expanduser("~/.yt/config")
+            cedit.config.setoption(uu, [ytcfg_path],
+                        "yt.pasteboard_repo=%s" % (pasteboard_path))
+        try:
+            import pygments
+            install_pygments = False
+        except ImportError:
+            install_pygments = True
+        if install_pygments:
+            print "You are missing the Pygments package.  Installing."
+            import pip
+            rv = pip.main(["install", "pygments"])
+            if rv == 1:
+                print "Unable to install Pygments.  Please report this bug to yt-users."
+                sys.exit(1)
+        try:
+            import lxml
+            install_lxml = False
+        except ImportError:
+            install_lxml = True
+        if install_lxml:
+            print "You are missing the lxml package.  Installing."
+            import pip
+            rv = pip.main(["install", "lxml"])
+            if rv == 1:
+                print "Unable to install lxml.  Please report this bug to yt-users."
+                sys.exit(1)
+        print
+        print "All done!"
+        print
+        print "You're now set up to use the 'yt pasteboard' command"
+        print "as well as develop using Mercurial and BitBucket."
+        print
+        print "Good luck!"
+
 def run_main():
     for co in ["--parallel", "--paste"]:
         if co in sys.argv: del sys.argv[sys.argv.index(co)]


--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/yt/utilities/parallel_tools/task_queue.py	Mon Mar 28 13:44:34 2011 -0600
@@ -0,0 +1,149 @@
+"""
+A task queue for distributing work to worker agents
+
+Author: Matthew Turk <matthewturk at gmail.com>
+Affiliation: NSF / Columbia
+Homepage: http://yt.enzotools.org/
+License:
+  Copyright (C) 2011 Matthew Turk.  All Rights Reserved.
+
+  This file is part of yt.
+
+  yt is free software; you can redistribute it and/or modify
+  it under the terms of the GNU General Public License as published by
+  the Free Software Foundation; either version 3 of the License, or
+  (at your option) any later version.
+
+  This program is distributed in the hope that it will be useful,
+  but WITHOUT ANY WARRANTY; without even the implied warranty of
+  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+  GNU General Public License for more details.
+
+  You should have received a copy of the GNU General Public License
+  along with this program.  If not, see <http://www.gnu.org/licenses/>.
+"""
+
+import threading
+from yt.funcs import *
+
+# The idea here is that we have a set of tasks, which we want to distribute.
+# We'll try to make this forward-compatible.  To do so, we want to support the
+# idea that there's a single, global set of tasks, as well as consumers that
+# receive tasks from the main controller.  These consumers then pass them out
+# to executors.
+#
+# The middle level, the "Consumer," is only really distinct from the executor
+# in the case that there is an MPI subcommunicator.  The reason for the
+# separation is so that the controller only communicates with a single member
+# of each subcommunicator, which then passes that info back out.
+
+def locked(func):
+    @wraps(func)
+    def exclusive(self, *args, **kwargs):
+        with self.lock:
+            return func(self, *args, **kwargs)
+    return exclusive
+
+class YTTaskCommunicator(object):
+    # This should carefully be checked for a race condition, particularly in
+    # the wait() function
+    def __init__(self, interval = 2.0):
+        self.interval = interval
+        self.task_id = None
+        self.waiting = False
+        self.lock = threading.Lock()
+
+    @locked
+    def send_task(self, task_id):
+        self.task_id = task_id
+
+    @locked
+    def query(self):
+        return (self.waiting and self.task_id is None)
+
+    def wait(self):
+        self.waiting = True
+        while self.task_id is None:
+            time.sleep(self.interval)
+        with self.lock:
+            self.waiting = False
+            new_task_id = self.task_id
+            self.task_id = None
+        return new_task_id
+
+class YTTaskQueueController(threading.Thread):
+    # There's only one of these for every instance of yt -- whether than
+    # instance be spread across processors or not.
+    # We assume that this will exist in the process space of a consumer, so it
+    # will be threading based.
+    def __init__(self, tasks, interval = 2.0, communicators = None):
+        self.assignments = []
+        self.interval = interval
+        # Communicators can be anything but they have to implement a mechanism
+        # for saying, "I'm ready" and "I'm done"
+        self.tasks = tasks
+        self.communicators = communicators
+        threading.Thread.__init__(self)
+
+    def run(self):
+        # Now we bootstrap
+        for i,c in enumerate(self.communicators):
+            self.assignments.append(i)
+            if i == len(self.tasks): break
+            c.send_task(i)
+        while len(self.assignments) < len(self.tasks):
+            time.sleep(self.interval)
+            for i,c in enumerate(self.communicators):
+                if not c.query(): continue
+                print "Sending assignment %s to %s" % (
+                    len(self.assignments), i)
+                c.send_task(len(self.assignments))
+                self.assignments.append(i)
+                if len(self.assignments) >= len(self.tasks): break
+        terminated = 0
+        while terminated != len(self.communicators):
+            for i,c in enumerate(self.communicators):
+                if not c.query(): continue
+                c.send_task(-1)
+                terminated += 1
+                print "Terminated %s" % (i)
+
+class YTTaskQueueConsumer(object):
+    # One of these will exist per individual MPI task or one per MPI
+    # subcommunicator, depending on the level of parallelism.  They serve to
+    # talk to the YTTaskQueueController on one side and possibly several
+    # YTTaskExecutors on the other.
+    #
+    # One potential setup for this, when using MPI, would be to have the
+    # Executors each have one of these, but only the head process of that
+    # subcommunicator possess an external communicator.  Then in next_task,
+    # if the external communicator exists, one would probe that; otherwise,
+    # accept a broadcast from the internal communicator's 0th task.
+    def __init__(self, external_communicator, internal_communicator):
+        self.external_communicator = external_communicator
+        self.internal_communicator = internal_communicator
+
+    def next_task(self):
+        next_task = self.external_communicator.wait()
+        #self.internal_communicator.notify(next_task)
+        return next_task
+
+class YTTaskExecutor(object):
+    _count = 0
+    # One of these will exist per computational actor
+    def __init__(self, tasks, communicator):
+        self.communicator = communicator
+        self.tasks = tasks
+        self.name = "Runner%03s" % (self.__class__._count)
+        self.__class__._count += 1
+
+    def run(self):
+        # Note that right now this only works for a 1:1 mapping of
+        # YTTaskQueueConsumer to YTTaskExecutor
+        next_task = None
+        while 1:
+            next_task = self.communicator.next_task()
+            if next_task == -1: break
+            print "Executing on %s" % (self.name),
+            self.tasks[next_task]()
+        print "Concluded on %s" % (self.name)


--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/yt/utilities/pasteboard.py	Mon Mar 28 13:44:34 2011 -0600
@@ -0,0 +1,166 @@
+"""
+Author: Matthew Turk <matthewturk at gmail.com>
+Affiliation: NSF / Columbia
+Homepage: http://yt.enzotools.org/
+License:
+  Copyright (C) 2011 Matthew Turk.  All Rights Reserved.
+
+  This file is part of yt.
+
+  yt is free software; you can redistribute it and/or modify
+  it under the terms of the GNU General Public License as published by
+  the Free Software Foundation; either version 3 of the License, or
+  (at your option) any later version.
+
+  This program is distributed in the hope that it will be useful,
+  but WITHOUT ANY WARRANTY; without even the implied warranty of
+  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+  GNU General Public License for more details.
+
+  You should have received a copy of the GNU General Public License
+  along with this program.  If not, see <http://www.gnu.org/licenses/>.
+"""
+
+from mercurial import ui, repo, commands, hg
+import json
+import os
+import time
+import uuid
+import urllib
+
+from yt.config import ytcfg
+
+def _get_last_mod(filectx):
+    rev = filectx.filectx(filectx.filerev())
+    return rev
+
+class PostInventory(object):
+    def __init__(self, uu = None, repo_fn = None):
+        if uu is None: uu = ui.ui()
+        if repo_fn is None: repo_fn = ytcfg.get("yt","pasteboard_repo")
+        if repo_fn == '':
+            raise KeyError("~/.yt/config:[yt]pasteboard_repo")
+        self.repo_fn = repo_fn
+        self.bbrepo = hg.repository(uu, repo_fn)
+        config_fn = os.path.join(repo_fn, ".hg", "hgrc")
+        uu.readconfig(config_fn)
+        commands.pull(uu, self.bbrepo)
+        commands.update(uu, self.bbrepo, clean=True)
+        if not os.path.exists(os.path.join(repo_fn, "posts")):
+            os.makedirs(os.path.join(repo_fn, "posts"))
+        if not os.path.exists(os.path.join(repo_fn, "html")):
+            os.makedirs(os.path.join(repo_fn, "html"))
+        self.uu = uu
+
+    def regenerate_posts(self):
+        self.posts = []
+        for file in self.bbrepo["tip"]:
+            if file.startswith("posts/") and file.count("/") == 1 \
+               and not file.endswith(".desc"):
+                filectx = self.bbrepo["tip"][file]
+                last_mod = _get_last_mod(filectx).date()
+                self.posts.append((last_mod[0] + last_mod[1], file))
+        self.posts.sort()
+        self.posts = self.posts[::-1]
+
+    def add_post(self, filename, desc = None,
+                 uu = None, highlight = True, push = True):
+        # We assume the post filename exists in the current space
+        self.regenerate_posts()
+        if uu is None: uu = self.uu
+        prefix = uuid.uuid4()
+        name = "%s-%s" % (prefix, os.path.basename(filename))
+        name_noext = name.replace(".","-")
+        hfn = "html/%s.html" % (name_noext)
+        pfn = "posts/%s" % (name)
+        abs_pfn = os.path.join(self.repo_fn, pfn)
+        abs_hfn = os.path.join(self.repo_fn, hfn)
+        if desc is not None:
+            open(abs_pfn + ".desc", "w").write(desc)
+        self.posts.insert(0, (int(time.time()), "posts/%s" % name))
+        if not os.path.exists(abs_pfn):
+            open(abs_pfn,"w").write(open(filename).read())
+        inv_fname = self.update_inventory()
+        if highlight and not name.endswith(".html"):
+            from pygments.cmdline import main as pygmain
+            rv = pygmain(["pygmentize", "-o", abs_hfn,
+                          "-O", "full", abs_pfn])
+        if not highlight or rv:
+            content = open(abs_pfn).read()
+            open(abs_hfn, "w").write(
+                "<HTML><BODY><PRE>" + content + "</PRE></BODY></HTML>")
+        to_manage = [abs_pfn, abs_hfn]
+        if desc is not None: to_manage.append(abs_pfn + ".desc")
+        commands.add(uu, self.bbrepo, *to_manage)
+        commands.commit(uu, self.bbrepo, *(to_manage + [inv_fname]),
+                        message="Adding %s" % name)
+        if push: commands.push(uu, self.bbrepo)
+
+    def update_inventory(self):
+        tip = self.bbrepo["tip"]
+        vals = []
+        for t, pfn in self.posts:
+            dfn = pfn + ".desc"
+            if dfn in tip:
+                d = tip[dfn].data()
+                last_mod =_get_last_mod(tip[dfn])
+                last_hash = last_mod.hex()
+                uname = last_mod.user()
+            elif pfn not in tip:
+                abs_pfn = os.path.join(self.repo_fn, pfn)
+                uname = self.uu.config("ui","username")
+                if os.path.exists(abs_pfn + ".desc"):
+                    d = open(abs_pfn + ".desc").read()
+                else:
+                    d = open(abs_pfn).read()
+                last_hash = "tip"
+            else:
+                d = tip[pfn].data()
+                last_mod = _get_last_mod(tip[pfn])
+                last_hash = last_mod.hex()
+                uname = last_mod.user()
+            if len(d) > 80: d = d[:77] + "..."
+            name_noext = pfn[6:].replace(".","-")
+            vals.append(dict(modified = time.ctime(t),
+                             modtime = t,
+                             lastmod_hash = last_hash,
+                             fullname = pfn,
+                             htmlname = "html/%s.html" % name_noext,
+                             name = pfn[43:], # 6 for posts/ then 36 for UUID
+                             username = uname,
+                             descr = d)) 
+        fn = os.path.join(self.repo_fn, "inventory.json")
+        f = open(fn, "w")
+        f.write("var inventory_data = ")
+        json.dump(vals, f, indent = 1)
+        f.write(";")
+        return fn
+
+def retrieve_pastefile(username, paste_id, output_fn = None):
+    # First we get the username's inventory.json
+    s = urllib.urlopen("http://%s.bitbucket.org/inventory.json" % (username))
+    data = s.read()
+    # This is an ugly, ugly hack for my lack of understanding of how best to
+    # handle this JSON stuff.
+    data = data[data.find("=")+1:data.rfind(";")] 
+    #import pdb;pdb.set_trace()
+    inv = json.loads(data)
+    k = None
+    if len(paste_id) == 36:
+        # Then this is a UUID
+        for k in inv:
+            if k['fullname'][6:42] == paste_id: break
+    elif len(paste_id) == 10:
+        pp = int(paste_id)
+        for k in inv:
+            if k['modtime'] == pp: break
+    if k is None: raise KeyError(k)
+    # k is our key
+    url = "http://%s.bitbucket.org/%s" % (username, k['fullname'])
+    s = urllib.urlopen(url)
+    data = s.read()
+    if output_fn is not None:
+        if os.path.exists(output_fn): raise IOError(output_fn)
+        open(output_fn, "w").write(data)
+    else:
+        print data


--- a/yt/visualization/streamlines.py	Mon Mar 28 13:44:14 2011 -0600
+++ b/yt/visualization/streamlines.py	Mon Mar 28 13:44:34 2011 -0600
@@ -28,7 +28,8 @@
 from yt.utilities.parallel_tools.parallel_analysis_interface import \
     ParallelAnalysisInterface, parallel_passthrough
 from yt.utilities.amr_kdtree.api import AMRKDTree
-
+from yt.data_objects.data_containers import AMRStreamlineBase
+        
 class Streamlines(ParallelAnalysisInterface):
     r"""A collection of streamlines that flow through the volume
 
@@ -41,12 +42,15 @@
         This is the parameter file to streamline
     pos : array_like
         An array of initial starting positions of the streamlines.
-    xfield: field
+    xfield: field, optional
         The x component of the vector field to be streamlined.
-    yfield: field
+        Default:'x-velocity'
+    yfield: field, optional
         The y component of the vector field to be streamlined.
-    zfield: field
+        Default:'y-velocity'
+    zfield: field, optional
         The z component of the vector field to be streamlined.
+        Default:'z-velocity'
     volume : `yt.extensions.volume_rendering.HomogenizedVolume`, optional
         The volume to be streamlined.  Can be specified for
         finer-grained control, but otherwise will be automatically
@@ -58,6 +62,9 @@
     length : float, optional
         Optionally specify the length of integration.  
         Default: na.max(self.pf.domain_right_edge-self.pf.domain_left_edge)
+    direction : real, optional
+        Specifies the direction of integration.  The magnitude of this
+        value has no effect, only the sign.
     
     Examples
     --------
@@ -83,14 +90,16 @@
     >>>     ax.plot3D(stream[:,0], stream[:,1], stream[:,2], alpha=0.1)
     >>> pl.savefig('streamlines.png')
     """
-    def __init__(self, pf, positions, xfield, yfield, zfield, volume=None,
-                 dx=None, length=None):
+    def __init__(self, pf, positions, xfield='x-velocity', yfield='x-velocity',
+                 zfield='x-velocity', volume=None,
+                 dx=None, length=None, direction=1):
         self.pf = pf
-        self.start_positions = positions
+        self.start_positions = na.array(positions)
         self.N = self.start_positions.shape[0]
         self.xfield = xfield
         self.yfield = yfield
         self.zfield = zfield
+        self.direction = na.sign(direction)
         if volume is None:
             volume = AMRKDTree(self.pf, fields=[self.xfield,self.yfield,self.zfield],
                             log_fields=[False,False,False], merge_trees=True)
@@ -103,7 +112,7 @@
         self.length = length
         self.steps = int(length/dx)
         self.streamlines = na.zeros((self.N,self.steps,3), dtype='float64')
-
+        
     def integrate_through_volume(self):
         nprocs = self._mpi_get_size()
         my_rank = self._mpi_get_rank()
@@ -119,17 +128,18 @@
         pbar.finish()
         
         self._finalize_parallel(None)
-
+       
     @parallel_passthrough
     def _finalize_parallel(self,data):
         self.streamlines = self._mpi_allsum(self.streamlines)
         
-    def _integrate_through_brick(self, node, stream, step, periodic=False):
+    def _integrate_through_brick(self, node, stream, step,
+                                 periodic=False):
         while (step > 1):
             self.volume.get_brick_data(node)
             brick = node.brick
             stream[-step+1] = stream[-step]
-            brick.integrate_streamline(stream[-step+1], self.dx)
+            brick.integrate_streamline(stream[-step+1], self.direction*self.dx)
             if na.any(stream[-step+1,:] <= self.pf.domain_left_edge) | \
                    na.any(stream[-step+1,:] >= self.pf.domain_right_edge):
                 return 0
@@ -140,7 +150,35 @@
             step -= 1
         return step
 
-    
+    def clean_streamlines(self):
+        temp = na.empty(self.N, dtype='object')
+        for i,stream in enumerate(self.streamlines):
+            temp[i] = stream[na.all(stream != 0.0, axis=1)]
+        self.streamlines = temp
 
+    def path(self, streamline_id):
+        """
+        Returns an AMR1DData object defined by a streamline.
 
+        Parameters
+        ----------
+        streamline_id : int
+            This defines which streamline from the Streamlines object
+            that will define the AMR1DData object.
+
+        Returns
+        -------
+        An AMRStreamlineBase AMR1DData object
+
+        Examples
+        --------
+
+        >>> from yt.visualization.api import Streamlines
+        >>> streamlines = Streamlines(pf, [0.5]*3) 
+        >>> streamlines.integrate_through_volume()
+        >>> stream = streamlines.path(0)
+        >>> matplotlib.pylab.semilogy(stream['t'], stream['Density'], '-x')
         
+        """
+        return AMRStreamlineBase(self.streamlines[streamline_id], pf=self.pf)
+        


--- a/yt/visualization/volume_rendering/camera.py	Mon Mar 28 13:44:14 2011 -0600
+++ b/yt/visualization/volume_rendering/camera.py	Mon Mar 28 13:44:34 2011 -0600
@@ -606,7 +606,7 @@
                  transfer_function = None, fields = None,
                  sub_samples = 5, log_fields = None, volume = None,
                  pf = None, use_kd=True, no_ghost=False,
-                 rays_per_cell = 0.1):
+                 rays_per_cell = 0.1, max_nside = 8192):
         if pf is not None: self.pf = pf
         self.center = na.array(center, dtype='float64')
         self.radius = radius
@@ -625,6 +625,7 @@
         self.volume = volume
         self.initial_nside = nside
         self.rays_per_cell = rays_per_cell
+        self.max_nside = max_nside
 
     def snapshot(self, fn = None):
         tfp = TransferFunctionProxy(self.transfer_function)
@@ -637,9 +638,17 @@
         bricks = [b for b in self.volume.traverse(None, self.center, None)][::-1]
         left_edges = na.array([b.LeftEdge for b in bricks])
         right_edges = na.array([b.RightEdge for b in bricks])
+        min_dx = min(((b.RightEdge[0] - b.LeftEdge[0])/b.my_data[0].shape[0]
+                     for b in bricks))
+        # We jitter a bit if we're on a boundary of our initial grid
+        for i in range(3):
+            if bricks[0].LeftEdge[i] == self.center[i]:
+                self.center += 1e-2 * min_dx
+            elif bricks[0].RightEdge[i] == self.center[i]:
+                self.center -= 1e-2 * min_dx
         ray_source = AdaptiveRaySource(self.center, self.rays_per_cell,
                                        self.initial_nside, self.radius,
-                                       bricks)
+                                       bricks, self.max_nside)
         for i,brick in enumerate(bricks):
             ray_source.integrate_brick(brick, tfp, i, left_edges, right_edges)
             total_cells += na.prod(brick.my_data[0].shape)

Repository URL: https://bitbucket.org/yt_analysis/yt/

--

This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.



More information about the yt-svn mailing list