[Yt-svn] commit/yt: 12 new changesets

Bitbucket commits-noreply at bitbucket.org
Tue Mar 15 18:00:06 PDT 2011


12 new changesets in yt:

http://bitbucket.org/yt_analysis/yt/changeset/abc5d39e3758/
changeset:   r3833:abc5d39e3758
branch:      yt
user:        MatthewTurk
date:        2011-02-25 18:41:25
summary:     More fixes to the Adaptive HEALpix rendering, from a few days ago.
Unfortunately, packets still get lost, sometimes don't get placed into the
correct parent, etc etc.  I'm a bit puzzled at this point.  I don't believe
it's related to splitting rays and moving them outside their host grid, but it
may be.
affected #:  2 files (849 bytes)

--- a/yt/utilities/_amr_utils/VolumeIntegrator.pyx	Tue Feb 22 23:32:21 2011 -0500
+++ b/yt/utilities/_amr_utils/VolumeIntegrator.pyx	Fri Feb 25 12:41:25 2011 -0500
@@ -63,6 +63,7 @@
     double fmod(double x, double y)
     double log2(double x)
     long int lrint(double x)
+    double fabs(double x)
 
 cdef extern from "FixedInterpolator.h":
     np.float64_t fast_interpolate(int ds[3], int ci[3], np.float64_t dp[3],
@@ -596,6 +597,7 @@
             if tdelta[i] < 0: tdelta[i] *= -1
         # We have to jumpstart our calculation
         enter_t = intersect_t
+        hit = 0
         while 1:
             # dims here is one less than the dimensions of the data,
             # but we are tracing on the grid, not on the data...
@@ -634,7 +636,7 @@
                     cur_ind[2] += step[2]
                     enter_t = tmax[2]
                     tmax[2] += tdelta[2]
-            if enter_t > 1.0: break
+            if enter_t >= 1.0: break
         if return_t != NULL: return_t[0] = exit_t
         return hit
 
@@ -844,14 +846,14 @@
     cdef AdaptiveRayPacket **lpacket_pointers
 
     def __cinit__(self, center, rays_per_cell, initial_nside,
-                  np.float64_t normalization, brick_list, int max_nside = 8096):
+                  np.float64_t normalization, brick_list, int max_nside = 8192):
         cdef int i
         self.max_nside = max_nside
         self.center[0] = center[0]
         self.center[1] = center[1]
         self.center[2] = center[2]
         self.rays_per_cell = rays_per_cell
-        cdef AdaptiveRayPacket *ray = self.first
+        cdef AdaptiveRayPacket *ray
         cdef AdaptiveRayPacket *last = NULL
         cdef PartitionedGrid pg
         cdef double v_dir[3]
@@ -873,7 +875,7 @@
             ray.prev = last
             ray.ipix = i
             ray.nside = initial_nside
-            ray.t = 0.0 # Start in the first brick
+            ray.t = 0.0 # We assume we are not on a brick boundary
             healpix_interface.pix2vec_nest(initial_nside, i, v_dir)
             ray.v_dir[0] = v_dir[0] * normalization
             ray.v_dir[1] = v_dir[1] * normalization
@@ -937,29 +939,34 @@
         cdef AdaptiveRayPacket *ray = self.packet_pointers[pgi]
         cdef AdaptiveRayPacket *next
         cdef int *grid_neighbors = self.find_neighbors(pgi, pg.dds[0], ledges, redges)
+        cdef np.float64_t enter_t, dt, offpos[3]
+        cdef int found_a_home, hit
+        #print "Grid: ", pgi, "has", grid_neighbors[0], "neighbors"
         while ray != NULL:
             # Note that we may end up splitting a ray such that it ends up
-            # outside the brick!
+            # outside the brick!  This will likely cause them to get lost.
             #print count
             count +=1
-            #if count > 10+self.nrays or ray.cgi != pgi:
-            #    raise RuntimeError
             # We don't need to check for intersection anymore, as we are the
             # Ruler of the planet Omicron Persei 8
             #if self.intersects(ray, pg):
             ray = self.refine_ray(ray, domega, pg.dds[0],
                                   pg.left_edge, pg.right_edge)
-            pg.integrate_ray(self.center, ray.v_dir, ray.value,
-                             tf, &ray.t)
+            enter_t = ray.t
+            hit = pg.integrate_ray(self.center, ray.v_dir, ray.value, tf, &ray.t)
+            if hit == 0: dt = 0.0
+            else: dt = (ray.t - enter_t)/hit
             for i in range(3):
-                ray.pos[i] = ray.v_dir[i] * (ray.t + 1e-8) + self.center[i]
+                ray.pos[i] = ray.v_dir[i] * ray.t + self.center[i]
+                offpos[i] = ray.pos[i] + ray.v_dir[i] * 1e-5*dt
             # We set 'next' after the refinement has occurred
             next = ray.brick_next
+            found_a_home = 0
             for j in range(grid_neighbors[0]):
                 i = grid_neighbors[j+1]
-                if ((ledges[i, 0] <= ray.pos[0] <= redges[i, 0]) and
-                    (ledges[i, 1] <= ray.pos[1] <= redges[i, 1]) and
-                    (ledges[i, 2] <= ray.pos[2] <= redges[i, 2])):
+                if ((ledges[i, 0] <= offpos[0] <= redges[i, 0]) and
+                    (ledges[i, 1] <= offpos[1] <= redges[i, 1]) and
+                    (ledges[i, 2] <= offpos[2] <= redges[i, 2])):
                     if self.lpacket_pointers[i] == NULL:
                         self.packet_pointers[i] = \
                         self.lpacket_pointers[i] = ray
@@ -969,6 +976,7 @@
                         self.lpacket_pointers[i] = ray
                         ray.brick_next = NULL
                     #ray.cgi = i
+                    found_a_home = 1
                     break
             ray = next
         free(grid_neighbors)
@@ -993,18 +1001,18 @@
         gre[2] = redges[this_grid, 2] + dds
         for i in range(this_grid+1, ledges.shape[0]):
             # Check for overlap
-            if ((gle[0] < redges[i, 0] and gre[0] > ledges[i, 0]) and
-                (gle[1] < redges[i, 1] and gre[1] > ledges[i, 1]) and
-                (gle[2] < redges[i, 2] and gre[2] > ledges[i, 2])):
+            if ((gle[0] <= redges[i, 0] and gre[0] >= ledges[i, 0]) and
+                (gle[1] <= redges[i, 1] and gre[1] >= ledges[i, 1]) and
+                (gle[2] <= redges[i, 2] and gre[2] >= ledges[i, 2])):
                 count += 1
-        cdef int *tr = <int *> malloc(sizeof(int) * count + 1)
+        cdef int *tr = <int *> malloc(sizeof(int) * (count + 1))
         tr[0] = count
         count = 0
         for i in range(this_grid+1, ledges.shape[0]):
             # Check for overlap
-            if ((gle[0] < redges[i, 0] and gre[0] > ledges[i, 0]) and
-                (gle[1] < redges[i, 1] and gre[1] > ledges[i, 1]) and
-                (gle[2] < redges[i, 2] and gre[2] > ledges[i, 2])):
+            if ((gle[0] <= redges[i, 0] and gre[0] >= ledges[i, 0]) and
+                (gle[1] <= redges[i, 1] and gre[1] >= ledges[i, 1]) and
+                (gle[2] <= redges[i, 2] and gre[2] >= ledges[i, 2])):
                 tr[count + 1] = i
                 count += 1
         return tr


--- a/yt/visualization/volume_rendering/camera.py	Tue Feb 22 23:32:21 2011 -0500
+++ b/yt/visualization/volume_rendering/camera.py	Fri Feb 25 12:41:25 2011 -0500
@@ -604,7 +604,7 @@
                  transfer_function = None, fields = None,
                  sub_samples = 5, log_fields = None, volume = None,
                  pf = None, use_kd=True, no_ghost=False,
-                 rays_per_cell = 0.1):
+                 rays_per_cell = 0.1, max_nside = 8192):
         if pf is not None: self.pf = pf
         self.center = na.array(center, dtype='float64')
         self.radius = radius
@@ -623,6 +623,7 @@
         self.volume = volume
         self.initial_nside = nside
         self.rays_per_cell = rays_per_cell
+        self.max_nside = max_nside
 
     def snapshot(self, fn = None):
         tfp = TransferFunctionProxy(self.transfer_function)
@@ -635,9 +636,17 @@
         bricks = [b for b in self.volume.traverse(None, self.center, None)][::-1]
         left_edges = na.array([b.LeftEdge for b in bricks])
         right_edges = na.array([b.RightEdge for b in bricks])
+        min_dx = min(((b.RightEdge[0] - b.LeftEdge[0])/b.my_data[0].shape[0]
+                     for b in bricks))
+        # We jitter a bit if we're on a boundary of our initial grid
+        for i in range(3):
+            if bricks[0].LeftEdge[i] == self.center[i]:
+                self.center += 1e-2 * min_dx
+            elif bricks[0].RightEdge[i] == self.center[i]:
+                self.center -= 1e-2 * min_dx
         ray_source = AdaptiveRaySource(self.center, self.rays_per_cell,
                                        self.initial_nside, self.radius,
-                                       bricks)
+                                       bricks, self.max_nside)
         for i,brick in enumerate(bricks):
             ray_source.integrate_brick(brick, tfp, i, left_edges, right_edges)
             total_cells += na.prod(brick.my_data[0].shape)


http://bitbucket.org/yt_analysis/yt/changeset/1e58d7a138b4/
changeset:   r3834:1e58d7a138b4
branch:      yt
user:        MatthewTurk
date:        2011-02-27 07:12:28
summary:     The beginnings of a multi-level parallelism task queue.  Right now it works for
... a single thread, on a single processor.  Next up will be very simple MPI.
Then, integrating it with the time series analysis.  A sample script, showing
how very simple it is just now, is here:

http://paste.enzotools.org/show/1517/
affected #:  1 file (4.7 KB)

--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/yt/utilities/parallel_tools/task_queue.py	Sun Feb 27 01:12:28 2011 -0500
@@ -0,0 +1,129 @@
+"""
+A task queue for distributing work to worker agents
+
+Author: Matthew Turk <matthewturk at gmail.com>
+Affiliation: NSF / Columbia
+Homepage: http://yt.enzotools.org/
+License:
+  Copyright (C) 2011 Matthew Turk.  All Rights Reserved.
+
+  This file is part of yt.
+
+  yt is free software; you can redistribute it and/or modify
+  it under the terms of the GNU General Public License as published by
+  the Free Software Foundation; either version 3 of the License, or
+  (at your option) any later version.
+
+  This program is distributed in the hope that it will be useful,
+  but WITHOUT ANY WARRANTY; without even the implied warranty of
+  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+  GNU General Public License for more details.
+
+  You should have received a copy of the GNU General Public License
+  along with this program.  If not, see <http://www.gnu.org/licenses/>.
+"""
+
+import threading
+from yt.funcs import *
+
+# The idea here is that we have a set of tasks, which we want to distribute.
+# We'll try to make this forward-compatible.  To do so, we want to support the
+# idea that there's a single, global set of tasks, as well as consumers that
+# receive tasks from the main controller.  These consumers then pass them out
+# to executors.
+#
+# The middle level, the "Consumer," is only really distinct from the executor
+# in the case that there is an MPI subcommunicator.  The reason for the
+# separation is so that the controller only communicates with a single member
+# of each subcommunicator, which then passes that info back out.
+
+def locked(func):
+    @wraps(func)
+    def exclusive(self, *args, **kwargs):
+        with self.lock:
+            return func(self, *args, **kwargs)
+    return exclusive
+
+class YTTaskCommunicator(object):
+    # This should carefully be checked for a race condition, particularly in
+    # the wait() function
+    def __init__(self, interval = 2.0):
+        self.interval = interval
+        self.task_id = None
+        self.waiting = False
+        self.lock = threading.Lock()
+
+    @locked
+    def send_task(self, task_id):
+        self.task_id = task_id
+
+    @locked
+    def query(self):
+        return self.waiting
+
+    def wait(self):
+        self.waiting = True
+        while self.task_id is None:
+            time.sleep(self.interval)
+        with self.lock:
+            self.waiting = False
+            new_task_id = self.task_id
+            self.task_id = None
+        return new_task_id
+
+class YTTaskQueueController(threading.Thread):
+    # There's only one of these for every instance of yt -- whether than
+    # instance be spread across processors or not.
+    # We assume that this will exist in the process space of a consumer, so it
+    # will be threading based.
+    def __init__(self, tasks, interval = 2.0, communicators = None):
+        self.assignments = []
+        self.interval = interval
+        # Communicators can be anything but they have to implement a mechanism
+        # for saying, "I'm ready" and "I'm done"
+        self.tasks = tasks
+        self.communicators = communicators
+        threading.Thread.__init__(self)
+
+    def run(self):
+        # Now we bootstrap
+        for i,c in enumerate(self.communicators):
+            self.assignments.append(i)
+            if i == len(self.tasks): break
+            c.send_task(i)
+        while len(self.assignments) < len(self.tasks):
+            time.sleep(self.interval)
+            for i,c in enumerate(self.communicators):
+                if not c.query(): continue
+                c.send_task(len(self.assignments))
+                self.assignments.append(i)
+        for c in self.communicators:
+            c.send_task(-1)
+
+class YTTaskQueueConsumer(object):
+    # One of these will exist per individual MPI task or one per MPI
+    # subcommunicator, depending on the level of parallelism.  They serve to
+    # talk to the YTTaskQueueController on one side and possibly several
+    # YTTaskExecutors on the other.
+    def __init__(self, external_communicator, internal_communicator):
+        self.external_communicator = external_communicator
+        self.internal_communicator = internal_communicator
+
+    def next_task(self):
+        next_task = self.external_communicator.wait()
+        #self.internal_communicator.notify(next_task)
+        return next_task
+
+class YTTaskExecutor(object):
+    # One of these will exist per computational actor
+    def __init__(self, tasks, communicator):
+        self.communicator = communicator
+        self.tasks = tasks
+
+    def run(self):
+        # Note that right now this only works for a 1:1 mapping of
+        # YTTaskQueueConsumer to YTTaskExecutor
+        next_task = None
+        while next_task != -1:
+            next_task = self.communicator.next_task()
+            self.tasks[next_task]()


http://bitbucket.org/yt_analysis/yt/changeset/0d1bfb04f801/
changeset:   r3835:0d1bfb04f801
branch:      yt
user:        MatthewTurk
date:        2011-02-26 16:09:11
summary:     Removing bzrignore
affected #:  1 file (0 bytes)

--- a/.bzrignore	Fri Feb 25 12:41:25 2011 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,4 +0,0 @@
-__config__.py
-build
-hdf5.cfg
-setuptools-0.6c9-py2.5.egg


http://bitbucket.org/yt_analysis/yt/changeset/5a226e29204d/
changeset:   r3836:5a226e29204d
branch:      yt
user:        MatthewTurk
date:        2011-02-27 16:26:25
summary:     Merge
affected #:  0 files (0 bytes)

--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/yt/utilities/parallel_tools/task_queue.py	Sun Feb 27 10:26:25 2011 -0500
@@ -0,0 +1,129 @@
+"""
+A task queue for distributing work to worker agents
+
+Author: Matthew Turk <matthewturk at gmail.com>
+Affiliation: NSF / Columbia
+Homepage: http://yt.enzotools.org/
+License:
+  Copyright (C) 2011 Matthew Turk.  All Rights Reserved.
+
+  This file is part of yt.
+
+  yt is free software; you can redistribute it and/or modify
+  it under the terms of the GNU General Public License as published by
+  the Free Software Foundation; either version 3 of the License, or
+  (at your option) any later version.
+
+  This program is distributed in the hope that it will be useful,
+  but WITHOUT ANY WARRANTY; without even the implied warranty of
+  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+  GNU General Public License for more details.
+
+  You should have received a copy of the GNU General Public License
+  along with this program.  If not, see <http://www.gnu.org/licenses/>.
+"""
+
+import threading
+from yt.funcs import *
+
+# The idea here is that we have a set of tasks, which we want to distribute.
+# We'll try to make this forward-compatible.  To do so, we want to support the
+# idea that there's a single, global set of tasks, as well as consumers that
+# receive tasks from the main controller.  These consumers then pass them out
+# to executors.
+#
+# The middle level, the "Consumer," is only really distinct from the executor
+# in the case that there is an MPI subcommunicator.  The reason for the
+# separation is so that the controller only communicates with a single member
+# of each subcommunicator, which then passes that info back out.
+
+def locked(func):
+    @wraps(func)
+    def exclusive(self, *args, **kwargs):
+        with self.lock:
+            return func(self, *args, **kwargs)
+    return exclusive
+
+class YTTaskCommunicator(object):
+    # This should carefully be checked for a race condition, particularly in
+    # the wait() function
+    def __init__(self, interval = 2.0):
+        self.interval = interval
+        self.task_id = None
+        self.waiting = False
+        self.lock = threading.Lock()
+
+    @locked
+    def send_task(self, task_id):
+        self.task_id = task_id
+
+    @locked
+    def query(self):
+        return self.waiting
+
+    def wait(self):
+        self.waiting = True
+        while self.task_id is None:
+            time.sleep(self.interval)
+        with self.lock:
+            self.waiting = False
+            new_task_id = self.task_id
+            self.task_id = None
+        return new_task_id
+
+class YTTaskQueueController(threading.Thread):
+    # There's only one of these for every instance of yt -- whether than
+    # instance be spread across processors or not.
+    # We assume that this will exist in the process space of a consumer, so it
+    # will be threading based.
+    def __init__(self, tasks, interval = 2.0, communicators = None):
+        self.assignments = []
+        self.interval = interval
+        # Communicators can be anything but they have to implement a mechanism
+        # for saying, "I'm ready" and "I'm done"
+        self.tasks = tasks
+        self.communicators = communicators
+        threading.Thread.__init__(self)
+
+    def run(self):
+        # Now we bootstrap
+        for i,c in enumerate(self.communicators):
+            self.assignments.append(i)
+            if i == len(self.tasks): break
+            c.send_task(i)
+        while len(self.assignments) < len(self.tasks):
+            time.sleep(self.interval)
+            for i,c in enumerate(self.communicators):
+                if not c.query(): continue
+                c.send_task(len(self.assignments))
+                self.assignments.append(i)
+        for c in self.communicators:
+            c.send_task(-1)
+
+class YTTaskQueueConsumer(object):
+    # One of these will exist per individual MPI task or one per MPI
+    # subcommunicator, depending on the level of parallelism.  They serve to
+    # talk to the YTTaskQueueController on one side and possibly several
+    # YTTaskExecutors on the other.
+    def __init__(self, external_communicator, internal_communicator):
+        self.external_communicator = external_communicator
+        self.internal_communicator = internal_communicator
+
+    def next_task(self):
+        next_task = self.external_communicator.wait()
+        #self.internal_communicator.notify(next_task)
+        return next_task
+
+class YTTaskExecutor(object):
+    # One of these will exist per computational actor
+    def __init__(self, tasks, communicator):
+        self.communicator = communicator
+        self.tasks = tasks
+
+    def run(self):
+        # Note that right now this only works for a 1:1 mapping of
+        # YTTaskQueueConsumer to YTTaskExecutor
+        next_task = None
+        while next_task != -1:
+            next_task = self.communicator.next_task()
+            self.tasks[next_task]()


http://bitbucket.org/yt_analysis/yt/changeset/1fe0f891ef2a/
changeset:   r3837:1fe0f891ef2a
branch:      yt
user:        MatthewTurk
date:        2011-02-27 18:59:48
summary:     A few more fixes for the task queue.  This now works with multiple consumer
threads.  Of course, this is not designed to operate in threads, but it is
promising that perhaps it will work shortly for non-threaded, MPI task queues.

http://paste.enzotools.org/show/1518/
affected #:  1 file (1015 bytes)

--- a/yt/utilities/parallel_tools/task_queue.py	Sun Feb 27 10:26:25 2011 -0500
+++ b/yt/utilities/parallel_tools/task_queue.py	Sun Feb 27 12:59:48 2011 -0500
@@ -59,7 +59,7 @@
 
     @locked
     def query(self):
-        return self.waiting
+        return (self.waiting and self.task_id is None)
 
     def wait(self):
         self.waiting = True
@@ -95,16 +95,30 @@
             time.sleep(self.interval)
             for i,c in enumerate(self.communicators):
                 if not c.query(): continue
+                print "Sending assignment %s to %s" % (
+                    len(self.assignments), i)
                 c.send_task(len(self.assignments))
                 self.assignments.append(i)
-        for c in self.communicators:
-            c.send_task(-1)
+                if len(self.assignments) >= len(self.tasks): break
+        terminated = 0
+        while terminated != len(self.communicators):
+            for i,c in enumerate(self.communicators):
+                if not c.query(): continue
+                c.send_task(-1)
+                terminated += 1
+                print "Terminated %s" % (i)
 
 class YTTaskQueueConsumer(object):
     # One of these will exist per individual MPI task or one per MPI
     # subcommunicator, depending on the level of parallelism.  They serve to
     # talk to the YTTaskQueueController on one side and possibly several
     # YTTaskExecutors on the other.
+    #
+    # One potential setup for this, when using MPI, would be to have the
+    # Executors each have one of these, but only the head process of that
+    # subcommunicator possess an external communicator.  Then in next_task,
+    # if the external communicator exists, one would probe that; otherwise,
+    # accept a broadcast from the internal communicator's 0th task.
     def __init__(self, external_communicator, internal_communicator):
         self.external_communicator = external_communicator
         self.internal_communicator = internal_communicator
@@ -115,15 +129,21 @@
         return next_task
 
 class YTTaskExecutor(object):
+    _count = 0
     # One of these will exist per computational actor
     def __init__(self, tasks, communicator):
         self.communicator = communicator
         self.tasks = tasks
+        self.name = "Runner%03s" % (self.__class__._count)
+        self.__class__._count += 1
 
     def run(self):
         # Note that right now this only works for a 1:1 mapping of
         # YTTaskQueueConsumer to YTTaskExecutor
         next_task = None
-        while next_task != -1:
+        while 1:
             next_task = self.communicator.next_task()
+            if next_task == -1: break
+            print "Executing on %s" % (self.name),
             self.tasks[next_task]()
+        print "Concluded on %s" % (self.name)


http://bitbucket.org/yt_analysis/yt/changeset/1c4250be4c0a/
changeset:   r3838:1c4250be4c0a
branch:      yt
user:        MatthewTurk
date:        2011-03-03 03:05:10
summary:     Reorganizing the GUI directory
affected #:  7 files (46.5 KB)

--- a/yt/gui/setup.py	Sun Feb 27 12:59:48 2011 -0500
+++ b/yt/gui/setup.py	Wed Mar 02 21:05:10 2011 -0500
@@ -5,6 +5,9 @@
 def configuration(parent_package='',top_path=None):
     from numpy.distutils.misc_util import Configuration
     config = Configuration('gui',parent_package,top_path)
+    config.add_subpackage('opengl_widgets')
+    config.add_subpackage('traited_explorer')
+    config.add_subpackage('reason')
     config.make_config_py() # installs __config__.py
     config.make_svn_version_py()
     return config


--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/yt/gui/traited_explorer/plot_editors.py	Wed Mar 02 21:05:10 2011 -0500
@@ -0,0 +1,128 @@
+"""
+Figure editors for the Traits GUI
+
+Author: Matthew Turk <matthewturk at gmail.com>
+Affiliation: KIPAC/SLAC/Stanford
+Homepage: http://yt.enzotools.org/
+License:
+  Copyright (C) 2009 Matthew Turk.  All Rights Reserved.
+
+  This file is part of yt.
+
+  yt is free software; you can redistribute it and/or modify
+  it under the terms of the GNU General Public License as published by
+  the Free Software Foundation; either version 3 of the License, or
+  (at your option) any later version.
+
+  This program is distributed in the hope that it will be useful,
+  but WITHOUT ANY WARRANTY; without even the implied warranty of
+  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+  GNU General Public License for more details.
+
+  You should have received a copy of the GNU General Public License
+  along with this program.  If not, see <http://www.gnu.org/licenses/>.
+"""
+
+import sys, matplotlib
+# We want matplotlib to use a wxPython backend
+matplotlib.use('QT4Agg')
+from matplotlib.backends.backend_qt4agg import FigureCanvasQTAgg as FigureCanvas
+from matplotlib.figure import Figure
+from matplotlib.axes import Axes
+
+from enthought.traits.api import Any, Instance
+from enthought.traits.ui.qt4.editor import Editor
+from enthought.traits.ui.qt4.basic_editor_factory import BasicEditorFactory
+
+from enthought.pyface.action.api import ActionController
+
+from enthought.traits.ui.menu import \
+    Menu, Action, Separator, OKCancelButtons, OKButton
+
+from matplotlib.backend_bases import Event as MPLEvent
+
+class _MPLFigureEditor(Editor):
+    """ Snagged from Gael's tutorial """
+
+    scrollable  = True
+    mpl_control = Instance(FigureCanvas)
+
+    def init(self, parent):
+        self.control = self._create_canvas(parent)
+        self.set_tooltip()
+
+    def update_editor(self):
+        pass
+
+    def _create_canvas(self, parent):
+        """ Create the MPL canvas. """
+        # The panel lets us add additional controls.
+        panel = wx.Panel(parent, -1)
+        sizer = wx.BoxSizer(wx.VERTICAL)
+        panel.SetSizer(sizer)
+        # matplotlib commands to create a canvas
+        self.mpl_control = FigureCanvas(panel, -1, self.value)
+        sizer.Add(self.mpl_control, 1, wx.LEFT | wx.TOP | wx.GROW | wx.SHAPED)
+        self.value.canvas.SetMinSize((10,8))
+        return panel
+
+class MPLFigureEditor(BasicEditorFactory):
+    klass = _MPLFigureEditor
+
+class MPLAction(Action):
+    event = Instance(MPLEvent)
+
+class _MPLVMPlotEditor(_MPLFigureEditor, ActionController):
+
+    def _create_canvas(self, parent):
+        panel = _MPLFigureEditor._create_canvas(self, parent)
+        self.mpl_control.mpl_connect("button_press_event", self.on_click)
+        return panel
+
+    def on_click(self, event):
+        if not event.inaxes: return
+        if event.button == 3:
+            my_menu = Menu(MPLAction(name="Recenter", action="object.recenter",
+                                     event=event),
+                           MPLAction(name="Yo!", action="object.do_something",
+                                     event=event))
+            wxmenu = my_menu.create_menu(self.mpl_control, self)
+            self.mpl_control.PopupMenuXY(wxmenu)
+
+    def perform ( self, action ):
+        """
+        This is largely taken/modified from the TreeEditor _perform method.
+        """
+        object            = self.object
+        method_name       = action.action
+        info              = self.ui.info
+        handler           = self.ui.handler
+        event             = action.event
+
+        if method_name.find( '.' ) >= 0:
+            if method_name.find( '(' ) < 0:
+                method_name += '(event)'
+            try:
+                eval( method_name, globals(),
+                      { 'object':  object,
+                        'editor':  self,
+                        'info':    info,
+                        'event':   event,
+                        'handler': handler } )
+            except:
+                # fixme: Should the exception be logged somewhere?
+                print sys.exc_info()
+                
+            return
+
+        method = getattr( handler, method_name, None )
+        if method is not None:
+            method( info, object )
+            return
+
+        if action.on_perform is not None:
+            action.on_perform( object )
+
+class MPLVMPlotEditor(BasicEditorFactory):
+    klass = _MPLVMPlotEditor
+


--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/yt/gui/traited_explorer/setup.py	Wed Mar 02 21:05:10 2011 -0500
@@ -0,0 +1,10 @@
+#!/usr/bin/env python
+import setuptools
+import os, sys, os.path
+
+def configuration(parent_package='',top_path=None):
+    from numpy.distutils.misc_util import Configuration
+    config = Configuration('traited_explorer',parent_package,top_path)
+    config.make_config_py() # installs __config__.py
+    config.make_svn_version_py()
+    return config


--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/yt/gui/traited_explorer/traited_explorer.py	Wed Mar 02 21:05:10 2011 -0500
@@ -0,0 +1,479 @@
+"""
+New version of Reason, using a TraitsUI-based approach
+
+Author: Matthew Turk <matthewturk at gmail.com>
+Affiliation: KIPAC/SLAC/Stanford
+Homepage: http://yt.enzotools.org/
+License:
+  Copyright (C) 2009 Matthew Turk.  All Rights Reserved.
+
+  This file is part of yt.
+
+  yt is free software; you can redistribute it and/or modify
+  it under the terms of the GNU General Public License as published by
+  the Free Software Foundation; either version 3 of the License, or
+  (at your option) any later version.
+
+  This program is distributed in the hope that it will be useful,
+  but WITHOUT ANY WARRANTY; without even the implied warranty of
+  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+  GNU General Public License for more details.
+
+  You should have received a copy of the GNU General Public License
+  along with this program.  If not, see <http://www.gnu.org/licenses/>.
+"""
+
+from yt.mods import *
+from yt.utilities.definitions import \
+    x_dict, \
+    y_dict
+#pf = EnzoStaticOutput("/Users/matthewturk/Research/data/galaxy1200.dir/galaxy1200")
+
+from enthought.traits.api import \
+    HasTraits, List, Instance, Str, Float, Any, Code, PythonValue, Int, CArray, \
+    Property, Enum, cached_property, DelegatesTo, Callable, Array, \
+    Button
+from enthought.traits.ui.api import \
+    Group, VGroup, HGroup, Tabbed, View, Item, ShellEditor, InstanceEditor, ListStrEditor, \
+    ListEditor, VSplit, VFlow, HSplit, VFold, ValueEditor, TreeEditor, TreeNode, RangeEditor, \
+    EnumEditor, Handler, Controller, DNDEditor
+from enthought.traits.ui.menu import \
+    Menu, Action, Separator, OKCancelButtons, OKButton
+from enthought.pyface.action.api import \
+    ActionController
+from enthought.tvtk.pyface.scene_editor import SceneEditor
+from enthought.tvtk.pyface.api import \
+    DecoratedScene
+from enthought.tvtk.pyface.scene_model import SceneModel
+
+from plot_editors import Figure, MPLFigureEditor, MPLVMPlotEditor, Axes
+
+from yt.visualization.plot_types import VMPlot, ProjectionPlot, SlicePlot
+
+import traceback
+from tvtk_interface import \
+    HierarchyImporter, YTScene
+
+class PlotCreationHandler(Controller):
+    main_window = Instance(HasTraits)
+    pnode = Instance(HasTraits)
+
+    format = Str
+    plot_type = Any
+    
+    def close(self, info, is_ok):
+        if not is_ok:
+            super(Controller, self).close(info, True)
+            return
+        spt = self.plot_type(plot_spec=self.model, pf=self.pnode.pf,
+                           name=self.format % (self.model.axis))
+        self.pnode.data_objects.append(spt)
+        self.main_window.plot_frame_tabs.append(spt)
+        spt.plot
+
+class VTKSceneCreationHandler(PlotCreationHandler):
+    importer = Instance(HierarchyImporter)
+
+    def close(self, info, is_ok):
+        if is_ok: 
+            yt_scene = YTScene(importer=self.importer,
+                scene=SceneModel())
+            spt = VTKDataObject(name = "VTK: %s" % self.pnode.pf,
+                    scene=yt_scene.scene,
+                    yt_scene=yt_scene)
+            self.pnode.data_objects.append(spt)
+            self.main_window.plot_frame_tabs.append(spt)
+        super(Controller, self).close(info, True)
+        return True
+
+
+class DataObject(HasTraits):
+    name = Str
+
+class VTKDataObject(DataObject):
+    yt_scene = Instance(YTScene)
+    scene = DelegatesTo("yt_scene")
+    add_contours = Button
+    add_isocontour = Button
+    add_x_plane = Button
+    add_y_plane = Button
+    add_z_plane = Button
+    edit_camera = Button
+    edit_operators = Button
+    edit_pipeline = Button
+    center_on_max = Button
+    operators = DelegatesTo("yt_scene")
+    traits_view = View(
+            Item("scene", editor = 
+        SceneEditor(scene_class=DecoratedScene),
+                    resizable=True, show_label=False),
+            HGroup(Item("add_contours", show_label=False),
+                   Item("add_isocontour", show_label=False),
+                   Item("add_x_plane", show_label=False),
+                   Item("add_y_plane", show_label=False),
+                   Item("add_z_plane", show_label=False),
+                   Item("edit_camera", show_label=False),
+                   Item("edit_operators", show_label=False),
+                   Item("edit_pipeline", show_label=False),
+                   Item("center_on_max", show_label=False),
+                ),
+            )
+
+    operators_edit = View(
+        Item("operators", style='custom', show_label=False,
+             editor=ListEditor(editor=InstanceEditor(),
+                               use_notebook=True),
+              name="Edit Operators"),
+        height=500.0, width=500.0, resizable=True)
+    
+    def _edit_camera_fired(self):
+        self.yt_scene.camera_path.edit_traits()
+
+    def _edit_operators_fired(self):
+        self.edit_traits(view='operators_edit')
+
+    def _edit_pipeline_fired(self):
+        from enthought.tvtk.pipeline.browser import PipelineBrowser
+        pb = PipelineBrowser(self.scene)
+        pb.show()
+
+    def _add_contours_fired(self):
+        self.yt_scene.add_contour()
+
+    def _add_isocontour_fired(self):
+        self.yt_scene.add_isocontour()
+
+    def _add_x_plane_fired(self):
+        self.yt_scene.add_x_plane()
+
+    def _add_y_plane_fired(self):
+        self.yt_scene.add_y_plane()
+
+    def _add_z_plane_fired(self):
+        self.yt_scene.add_z_plane()
+
+    def _center_on_max_fired(self):
+        self.yt_scene.do_center_on_max()
+
+class ParameterFile(HasTraits):
+    pf = Instance(EnzoStaticOutput)
+    data_objects = List(Instance(DataObject))
+    name = Str
+
+    def _name_default(self):
+        return str(self.pf)
+
+    def do_slice(self):
+        cons_view = View(
+                Item('axis'), 
+                Item('center'), 
+                Item('field', editor=EnumEditor(name='field_list')),
+                buttons=OKCancelButtons, title="Slicer: %s" % self.pf)
+        ps = SlicePlotSpec(pf=self.pf)
+        hand = PlotCreationHandler(main_window=mw, pnode=self, model=ps,
+                                   plot_type=SlicePlotTab, format="Slice: %s")
+        ps.edit_traits(cons_view, handler=hand)
+
+    def do_proj(self):
+        cons_view = View(
+                Item('axis'), 
+                Item('field', editor=EnumEditor(name='field_list')),
+                Item('weight_field', editor=EnumEditor(name='none_field_list')),
+                buttons=OKCancelButtons, title="Projector: %s" % self.pf)
+        ps = ProjPlotSpec(pf=self.pf)
+        hand = PlotCreationHandler(main_window=mw, pnode=self, model=ps,
+                                   plot_type=ProjPlotTab, format="Proj: %s")
+        ps.edit_traits(cons_view, handler=hand)
+
+    def do_vtk(self):
+        from tvtk_interface import HierarchyImporter, \
+            HierarchyImportHandler
+        importer = HierarchyImporter(pf=self.pf, max_level=self.pf.h.max_level)
+        importer.edit_traits(handler = VTKSceneCreationHandler(
+            main_window=mw, pnode=self, importer = importer))
+
+class ParameterFileCollection(HasTraits):
+    parameter_files = List(Instance(ParameterFile))
+    name = Str
+    collection = Any
+
+    def _parameter_files_default(self):
+        my_list = []
+        for f in self.collection:
+            try:
+                pf = EnzoStaticOutput(f)
+                my_list.append(
+                    ParameterFile(pf=pf, 
+                            data_objects = []))
+            except IOError: pass
+        return my_list
+
+    def _name_default(self):
+        return str(self.collection)
+
+class ParameterFileCollectionList(HasTraits):
+    parameter_file_collections = List(Instance(ParameterFileCollection))
+
+    def _parameter_file_collections_default(self):
+        return [ParameterFileCollection(collection=c)
+                for c in fido.GrabCollections()]
+
+class DataObjectList(HasTraits):
+    data_objects = List(Str)
+
+    traits_view = View(
+              Item('data_objects', show_label=False,
+                   editor=ListStrEditor())
+               )
+
+    def _data_objects_default(self):
+        return ['a','b','c']
+
+class PlotFrameTab(DataObject):
+    figure = Instance(Figure)
+
+class VMPlotSpec(HasTraits):
+    pf = Instance(EnzoStaticOutput)
+    field = Str('Density')
+    field_list = Property(depends_on = 'pf')
+
+    center = Array(shape=(3,), dtype='float64')
+    axis = Enum(0,1,2)
+
+    @cached_property
+    def _get_field_list(self):
+        fl = self.pf.h.field_list
+        df = self.pf.h.derived_field_list
+        fl.sort(); df.sort()
+        return fl + df
+
+    def _center_default(self):
+        return self.pf.h.find_max("Density")[1]
+
+class SlicePlotSpec(VMPlotSpec):
+    pass
+
+class ProjPlotSpec(VMPlotSpec):
+    weight_field = Str("None")
+    none_field_list = Property(depends_on = 'field_list')
+
+    @cached_property
+    def _get_none_field_list(self):
+        return ["None"] + self.field_list
+
+class VMPlotTab(PlotFrameTab):
+    pf = Instance(EnzoStaticOutput)
+    figure = Instance(Figure, args=())
+    field = DelegatesTo('plot_spec')
+    field_list = DelegatesTo('plot_spec')
+    plot = Instance(VMPlot)
+    axes = Instance(Axes)
+    disp_width = Float(1.0)
+    unit = Str('unitary')
+    min_width = Property(Float, depends_on=['pf','unit'])
+    max_width = Property(Float, depends_on=['pf','unit'])
+    unit_list = Property(depends_on = 'pf')
+    smallest_dx = Property(depends_on = 'pf')
+
+    traits_view = View(VGroup(
+            HGroup(Item('figure', editor=MPLVMPlotEditor(),
+                     show_label=False)),
+            HGroup(Item('disp_width',
+                     editor=RangeEditor(format="%0.2e",
+                        low_name='min_width', high_name='max_width',
+                        mode='logslider', enter_set=True),
+                     show_label=False, width=400.0),
+                   Item('unit',
+                      editor=EnumEditor(name='unit_list')),),
+            HGroup(Item('field',
+                      editor=EnumEditor(name='field_list')),
+                )),
+             resizable=True)
+
+    def __init__(self, **traits):
+        super(VMPlotTab, self).__init__(**traits)
+        self.axes = self.figure.add_subplot(111, aspect='equal')
+
+    def _field_changed(self, old, new):
+        self.plot.switch_z(new)
+        self._redraw()
+
+    @cached_property
+    def _get_min_width(self):
+        return 50.0*self.smallest_dx*self.pf[self.unit]
+
+    @cached_property
+    def _get_max_width(self):
+        return self.pf['unitary']*self.pf[self.unit]
+
+    @cached_property
+    def _get_smallest_dx(self):
+        return self.pf.h.get_smallest_dx()
+
+    @cached_property
+    def _get_unit_list(self):
+        return self.pf.units.keys()
+
+    def _unit_changed(self, old, new):
+        self.disp_width = self.disp_width * self.pf[new]/self.pf[old]
+
+    def _disp_width_changed(self, old, new):
+        self.plot.set_width(new, self.unit)
+        self._redraw()
+
+    def _redraw(self):
+        self.figure.canvas.draw()
+
+    def recenter(self, event):
+        xp, yp = event.xdata, event.ydata
+        dx = abs(self.plot.xlim[0] - self.plot.xlim[1])/self.plot.pix[0]
+        dy = abs(self.plot.ylim[0] - self.plot.ylim[1])/self.plot.pix[1]
+        x = (dx * xp) + self.plot.xlim[0]
+        y = (dy * yp) + self.plot.ylim[0]
+        xi = x_dict[self.axis]
+        yi = y_dict[self.axis]
+        cc = self.center[:]
+        cc[xi] = x; cc[yi] = y
+        self.plot.data.center = cc[:]
+        self.plot.data.set_field_parameter('center', cc.copy())
+        self.center = cc
+
+class SlicePlotTab(VMPlotTab):
+    plot_spec = Instance(SlicePlotSpec)
+
+    axis = DelegatesTo('plot_spec')
+    center = DelegatesTo('plot_spec')
+    
+    plot = Instance(SlicePlot)
+
+    def _plot_default(self):
+        coord = self.center[self.axis]
+        sl = self.pf.h.slice(self.axis, coord, center=self.center[:])
+        sp = SlicePlot(sl, self.field, self.figure, self.axes)
+        self.figure.canvas.draw()
+        return sp
+
+    def _center_changed(self, old, new):
+        #traceback.print_stack()
+        if na.all(na.abs(old - new) == 0.0): return
+        print na.abs(old-new)
+        print "Re-slicing", old, new
+        pp = self.center
+        self.plot.data.reslice(pp[self.axis])
+        self.plot._refresh_display_width()
+        self.figure.canvas.draw()
+
+class ProjPlotTab(VMPlotTab):
+    plot_spec = Instance(ProjPlotSpec)
+
+    axis = DelegatesTo('plot_spec')
+    center = DelegatesTo('plot_spec')
+    weight_field = DelegatesTo('plot_spec')
+
+    plot = Instance(ProjectionPlot)
+
+    def _plot_default(self):
+        self.field = self.field[:]
+        self.weight_field = self.weight_field[:]
+        wf = self.weight_field
+        if str(wf) == "None": wf = None
+        proj = self.pf.h.proj(self.axis, self.field, wf,
+                        center=self.center[:])
+        pp = ProjectionPlot(proj, self.field, self.figure, self.axes)
+        self.figure.canvas.draw()
+        return pp
+
+    def _center_changed(self, old, new):
+        self.plot._refresh_display_width()
+
+class SphereWrapper(DataObject):
+    radius = Float
+    unit = Str
+
+class MainWindow(HasTraits):
+    parameter_file_collections = Instance(ParameterFileCollectionList)
+    parameter_files = Instance(ParameterFileCollection)
+    plot_frame_tabs = List(Instance(DataObject))
+    open_parameterfile = Button
+    shell = PythonValue
+
+    def _shell_default(self):
+        return globals()
+    notebook_editor = ListEditor(editor=InstanceEditor(editable=True),
+                                 use_notebook=True)
+
+    traits_view = View(VSplit(
+                    HSplit(VGroup(
+                       Item('parameter_file_collections', 
+                            width=120.0, height=500.0,
+                            show_label=False,
+                            editor = TreeEditor(editable=False,
+                    nodes=[
+                        TreeNode(node_for=[ParameterFileCollectionList],
+                                 children='parameter_file_collections',
+                                 label="=Data Collections"),
+                        TreeNode(node_for=[ParameterFileCollection],
+                                 children='parameter_files',
+                                 label="name",
+                                 view=View()),
+                        TreeNode(node_for=[ParameterFile],
+                                 children='data_objects',
+                                 label="name",
+                                 menu = Menu(Action(name='Slice',
+                                                    action='object.do_slice'),
+                                             Action(name='Project',
+                                                    action='object.do_proj'),
+                                             Action(name='VTK',
+                                                    action='object.do_vtk')),
+                                 view=View()),
+                        TreeNode(node_for=[DataObject],
+                                 children='',
+                                 label="name"),
+                                ], show_icons=False),),
+                        Item('open_parameterfile', show_label=False)),
+                       Item('plot_frame_tabs', style='custom',
+                            editor = notebook_editor,
+                            show_label=False, height=500.0, width=500.0),
+                    ),
+                    HGroup(
+                       #Item('shell', editor=ShellEditor(share=True),
+                            #show_label=False, height=120.0),
+                    ),
+                ),
+               resizable=True, width=800.0, height=660.0,
+               title="reason v2 [prototype]")
+
+    def _open_parameterfile_fired(self):
+        print "OPENING"
+
+    def _parameter_file_collections_default(self):
+        return ParameterFileCollectionList()
+
+class YTScript(HasTraits):
+    code = Code
+    traits_view = View(Item('code', show_label=False),
+                       height=0.8, width=0.8, resizable=True,
+                       buttons=OKCancelButtons)
+
+class ObjectViewer(HasTraits):
+    to_view=Any
+    traits_view = View(
+            Item('to_view', editor=ValueEditor(), show_label=False),
+                     resizable=True, height=0.8, width=0.8)
+
+def view_object(obj):
+    ObjectViewer(to_view=obj).edit_traits()
+
+def run_script():
+    my_script = YTScript()
+    my_script.edit_traits()
+    return my_script
+
+class event_mock(object):
+    inaxes = True
+    button = 3
+
+dol = DataObjectList()
+mw = MainWindow(plot_frame_tabs = [])
+mw.edit_traits()
+#mw.edit_traits()


--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/yt/gui/traited_explorer/tvtk_interface.py	Wed Mar 02 21:05:10 2011 -0500
@@ -0,0 +1,692 @@
+"""
+This is the preliminary interface to VTK.  Note that as of VTK 5.2, it still
+requires a patchset prepared here:
+http://yt.enzotools.org/files/vtk_composite_data.zip
+
+Author: Matthew Turk <matthewturk at gmail.com>
+Affiliation: KIPAC/SLAC/Stanford
+Homepage: http://yt.enzotools.org/
+License:
+  Copyright (C) 2007-2009 Matthew Turk.  All Rights Reserved.
+
+  This file is part of yt.
+
+  yt is free software; you can redistribute it and/or modify
+  it under the terms of the GNU General Public License as published by
+  the Free Software Foundation; either version 3 of the License, or
+  (at your option) any later version.
+
+  This program is distributed in the hope that it will be useful,
+  but WITHOUT ANY WARRANTY; without even the implied warranty of
+  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+  GNU General Public License for more details.
+
+  You should have received a copy of the GNU General Public License
+  along with this program.  If not, see <http://www.gnu.org/licenses/>.
+"""
+
+from enthought.tvtk.tools import ivtk
+from enthought.tvtk.api import tvtk 
+from enthought.traits.api import \
+    Float, HasTraits, Instance, Range, Any, Delegate, Tuple, File, Int, Str, \
+    CArray, List, Button, Bool, Property, cached_property
+from enthought.traits.ui.api import View, Item, HGroup, VGroup, TableEditor, \
+    Handler, Controller, RangeEditor, EnumEditor, InstanceEditor
+from enthought.traits.ui.menu import \
+    Menu, Action, Separator, OKCancelButtons, OKButton
+from enthought.traits.ui.table_column import ObjectColumn
+from enthought.tvtk.pyface.api import DecoratedScene
+
+import enthought.pyface.api as pyface
+
+#from yt.reason import *
+import sys
+import numpy as na
+import time, pickle, os, os.path
+from yt.funcs import *
+from yt.analysis_modules.hierarchy_subset.api import \
+        ExtractedHierarchy, ExtractedParameterFile
+
+#from enthought.tvtk.pyface.ui.wx.wxVTKRenderWindowInteractor \
+     #import wxVTKRenderWindowInteractor
+
+from enthought.mayavi.core.lut_manager import LUTManager
+
+#wxVTKRenderWindowInteractor.USE_STEREO = 1
+
+class TVTKMapperWidget(HasTraits):
+    alpha = Float(1.0)
+    post_call = Any
+    lut_manager = Instance(LUTManager)
+
+    def _alpha_changed(self, old, new):
+        self.lut_manager.lut.alpha_range = (new, new)
+        self.post_call()
+
+class MappingPlane(TVTKMapperWidget):
+    plane = Instance(tvtk.Plane)
+    _coord_redit = editor=RangeEditor(format="%0.2e",
+                              low_name='vmin', high_name='vmax',
+                              auto_set=False, enter_set=True)
+    auto_set = Bool(False)
+    traits_view = View(Item('coord', editor=_coord_redit),
+                       Item('auto_set'),
+                       Item('alpha', editor=RangeEditor(
+                              low=0.0, high=1.0,
+                              enter_set=True, auto_set=False)),
+                       Item('lut_manager', show_label=False,
+                            editor=InstanceEditor(), style='custom'))
+    vmin = Float
+    vmax = Float
+
+    def _auto_set_changed(self, old, new):
+        if new is True:
+            self._coord_redit.auto_set = True
+            self._coord_redit.enter_set = False
+        else:
+            self._coord_redit.auto_set = False
+            self._coord_redit.enter_set = True
+
+    def __init__(self, vmin, vmax, vdefault, **traits):
+        HasTraits.__init__(self, **traits)
+        self.vmin = vmin
+        self.vmax = vmax
+        trait = Range(float(vmin), float(vmax), value=vdefault)
+        self.add_trait("coord", trait)
+        self.coord = vdefault
+
+    def _coord_changed(self, old, new):
+        orig = self.plane.origin[:]
+        orig[self.axis] = new
+        self.plane.origin = orig
+        self.post_call()
+
+class MappingMarchingCubes(TVTKMapperWidget):
+    operator = Instance(tvtk.MarchingCubes)
+    mapper = Instance(tvtk.HierarchicalPolyDataMapper)
+    vmin = Float
+    vmax = Float
+    auto_set = Bool(False)
+    _val_redit = RangeEditor(format="%0.2f",
+                             low_name='vmin', high_name='vmax',
+                             auto_set=False, enter_set=True)
+    traits_view = View(Item('value', editor=_val_redit),
+                       Item('auto_set'),
+                       Item('alpha', editor=RangeEditor(
+                            low=0.0, high=1.0,
+                            enter_set=True, auto_set=False,)),
+                       Item('lut_manager', show_label=False,
+                            editor=InstanceEditor(), style='custom'))
+
+    def __init__(self, vmin, vmax, vdefault, **traits):
+        HasTraits.__init__(self, **traits)
+        self.vmin = vmin
+        self.vmax = vmax
+        trait = Range(float(vmin), float(vmax), value=vdefault)
+        self.add_trait("value", trait)
+        self.value = vdefault
+
+    def _auto_set_changed(self, old, new):
+        if new is True:
+            self._val_redit.auto_set = True
+            self._val_redit.enter_set = False
+        else:
+            self._val_redit.auto_set = False
+            self._val_redit.enter_set = True
+
+    def _value_changed(self, old, new):
+        self.operator.set_value(0, new)
+        self.post_call()
+
+class MappingIsoContour(MappingMarchingCubes):
+    operator = Instance(tvtk.ContourFilter)
+
+class CameraPosition(HasTraits):
+    position = CArray(shape=(3,), dtype='float64')
+    focal_point = CArray(shape=(3,), dtype='float64')
+    view_up = CArray(shape=(3,), dtype='float64')
+    clipping_range = CArray(shape=(2,), dtype='float64')
+    distance = Float
+    num_steps = Int(10)
+    orientation_wxyz = CArray(shape=(4,), dtype='float64')
+
+class CameraControl(HasTraits):
+    # Traits
+    positions = List(CameraPosition)
+    yt_scene = Instance('YTScene')
+    center = Delegate('yt_scene')
+    scene = Delegate('yt_scene')
+    camera = Instance(tvtk.OpenGLCamera)
+    reset_position = Instance(CameraPosition)
+    fps = Float(25.0)
+    export_filename = 'frames'
+    periodic = Bool
+
+    # UI elements
+    snapshot = Button()
+    play = Button()
+    export_frames = Button()
+    reset_path = Button()
+    recenter = Button()
+    save_path = Button()
+    load_path = Button()
+    export_path = Button()
+
+    table_def = TableEditor(
+        columns = [ ObjectColumn(name='position'),
+                    ObjectColumn(name='focal_point'),
+                    ObjectColumn(name='view_up'),
+                    ObjectColumn(name='clipping_range'),
+                    ObjectColumn(name='num_steps') ],
+        reorderable=True, deletable=True,
+        sortable=True, sort_model=True,
+        show_toolbar=True,
+        selection_mode='row',
+        selected = 'reset_position'
+                )
+
+    default_view = View(
+                VGroup(
+                  HGroup(
+                    Item('camera', show_label=False),
+                    Item('recenter', show_label=False),
+                    label='Camera'),
+                  HGroup(
+                    Item('snapshot', show_label=False),
+                    Item('play', show_label=False),
+                    Item('export_frames',show_label=False),
+                    Item('reset_path', show_label=False),
+                    Item('save_path', show_label=False),
+                    Item('load_path', show_label=False),
+                    Item('export_path', show_label=False),
+                    Item('export_filename'),
+                    Item('periodic'),
+                    Item('fps'),
+                    label='Playback'),
+                  VGroup(
+                    Item('positions', show_label=False,
+                        editor=table_def),
+                    label='Camera Path'),
+                 ),
+                resizable=True, title="Camera Path Editor",
+                       )
+
+    def _reset_position_changed(self, old, new):
+        if new is None: return
+        cam = self.scene.camera
+        cam.position = new.position
+        cam.focal_point = new.focal_point
+        cam.view_up = new.view_up
+        cam.clipping_range = new.clipping_range
+        self.scene.render()
+
+    def __init__(self, **traits):
+        HasTraits.__init__(self, **traits)
+
+    def take_snapshot(self):
+        cam = self.scene.camera
+        self.positions.append(CameraPosition(
+                position=cam.position,
+                focal_point=cam.focal_point,
+                view_up=cam.view_up,
+                clipping_range=cam.clipping_range,
+                distance=cam.distance,
+                orientation_wxyz=cam.orientation_wxyz))
+
+    def _export_path_fired(self): 
+        dlg = pyface.FileDialog(
+            action='save as',
+            wildcard="*.cpath",
+        )
+        if dlg.open() == pyface.OK:
+            print "Saving:", dlg.path
+            self.export_camera_path(dlg.path)
+
+    def export_camera_path(self, fn):
+        to_dump = dict(positions=[], focal_points=[],
+                       view_ups=[], clipping_ranges=[],
+                       distances=[], orientation_wxyzs=[])
+        def _write(cam):
+            to_dump['positions'].append(cam.position)
+            to_dump['focal_points'].append(cam.focal_point)
+            to_dump['view_ups'].append(cam.view_up)
+            to_dump['clipping_ranges'].append(cam.clipping_range)
+            to_dump['distances'].append(cam.distance)
+            to_dump['orientation_wxyzs'].append(cam.orientation_wxyz)
+        self.step_through(0.0, callback=_write)
+        pickle.dump(to_dump, open(fn, "wb"))
+
+    def _save_path_fired(self): 
+        dlg = pyface.FileDialog(
+            action='save as',
+            wildcard="*.cpath",
+        )
+        if dlg.open() == pyface.OK:
+            print "Saving:", dlg.path
+            self.dump_camera_path(dlg.path)
+
+    def dump_camera_path(self, fn):
+        to_dump = dict(positions=[], focal_points=[],
+                       view_ups=[], clipping_ranges=[],
+                       distances=[], orientation_wxyzs=[],
+                       num_stepss=[])
+        for p in self.positions:
+            to_dump['positions'].append(p.position)
+            to_dump['focal_points'].append(p.focal_point)
+            to_dump['view_ups'].append(p.view_up)
+            to_dump['clipping_ranges'].append(p.clipping_range)
+            to_dump['distances'].append(p.distance)
+            to_dump['num_stepss'].append(p.num_steps) # stupid s
+            to_dump['orientation_wxyzs'].append(p.orientation_wxyz)
+        pickle.dump(to_dump, open(fn, "wb"))
+
+    def _load_path_fired(self):
+        dlg = pyface.FileDialog(
+            action='open',
+            wildcard="*.cpath",
+        )
+        if dlg.open() == pyface.OK:
+            print "Loading:", dlg.path
+            self.load_camera_path(dlg.path)
+
+    def load_camera_path(self, fn):
+        to_use = pickle.load(open(fn, "rb"))
+        self.positions = []
+        for i in range(len(to_use['positions'])):
+            dd = {}
+            for kw in to_use:
+                # Strip the s
+                dd[kw[:-1]] = to_use[kw][i]
+            self.positions.append(
+                CameraPosition(**dd))
+
+    def _recenter_fired(self):
+        self.camera.focal_point = self.center
+        self.scene.render()
+
+    def _snapshot_fired(self):
+        self.take_snapshot()
+
+    def _play_fired(self):
+        self.step_through()
+
+    def _export_frames_fired(self):
+        self.step_through(save_frames=True)
+
+    def _reset_path_fired(self):
+        self.positions = []
+
+    def step_through(self, pause = 1.0, callback=None, save_frames=False):
+        cam = self.scene.camera
+        frame_counter=0
+        if self.periodic:
+            cyclic_pos = self.positions + [self.positions[0]]
+        else:
+            cyclic_pos = self.positions
+        for i in range(len(cyclic_pos)-1):
+            pos1 = cyclic_pos[i]
+            pos2 = cyclic_pos[i+1]
+            r = pos1.num_steps
+            for p in range(pos1.num_steps):
+                po = _interpolate(pos1.position, pos2.position, p, r)
+                fp = _interpolate(pos1.focal_point, pos2.focal_point, p, r)
+                vu = _interpolate(pos1.view_up, pos2.view_up, p, r)
+                cr = _interpolate(pos1.clipping_range, pos2.clipping_range, p, r)
+                _set_cpos(cam, po, fp, vu, cr)
+                self.scene.render()
+                if callback is not None: callback(cam)
+                if save_frames:
+                    self.scene.save("%s_%0.5d.png" % (self.export_filename,frame_counter))
+                else:
+                    time.sleep(pause * 1.0/self.fps)
+                frame_counter += 1
+
+def _interpolate(q1, q2, p, r):
+    return q1 + p*(q2 - q1)/float(r)
+
+def _set_cpos(cam, po, fp, vu, cr):
+    cam.position = po
+    cam.focal_point = fp
+    cam.view_up = vu
+    cam.clipping_range = cr
+
+class HierarchyImporter(HasTraits):
+    pf = Any
+    min_grid_level = Int(0)
+    max_level = Int(1)
+    number_of_levels = Range(0, 13)
+    max_import_levels = Property(depends_on='min_grid_level')
+    field = Str("Density")
+    field_list = List
+    center_on_max = Bool(True)
+    center = CArray(shape = (3,), dtype = 'float64')
+    cache = Bool(True)
+    smoothed = Bool(True)
+    show_grids = Bool(True)
+
+    def _field_list_default(self):
+        fl = self.pf.h.field_list
+        df = self.pf.h.derived_field_list
+        fl.sort(); df.sort()
+        return fl + df
+    
+    default_view = View(Item('min_grid_level',
+                              editor=RangeEditor(low=0,
+                                                 high_name='max_level')),
+                        Item('number_of_levels', 
+                              editor=RangeEditor(low=1,
+                                                 high_name='max_import_levels')),
+                        Item('field', editor=EnumEditor(name='field_list')),
+                        Item('center_on_max'),
+                        Item('center', enabled_when='not object.center_on_max'),
+                        Item('smoothed'),
+                        Item('cache', label='Pre-load data'),
+                        Item('show_grids'),
+                        buttons=OKCancelButtons)
+
+    def _center_default(self):
+        return [0.5,0.5,0.5]
+
+    @cached_property
+    def _get_max_import_levels(self):
+        return min(13, self.pf.h.max_level - self.min_grid_level + 1)
+
+class HierarchyImportHandler(Controller):
+    importer = Instance(HierarchyImporter)
+    
+
+    def close(self, info, is_ok):
+        if is_ok: 
+            yt_scene = YTScene(
+                importer=self.importer)
+        super(Controller, self).close(info, True)
+        return
+
+
+class YTScene(HasTraits):
+
+    # Traits
+    importer = Instance(HierarchyImporter)
+    pf = Delegate("importer")
+    min_grid_level = Delegate("importer")
+    number_of_levels = Delegate("importer")
+    field = Delegate("importer")
+    center = CArray(shape = (3,), dtype = 'float64')
+    center_on_max = Delegate("importer")
+    smoothed = Delegate("importer")
+    cache = Delegate("importer")
+    show_grids = Delegate("importer")
+
+    camera_path = Instance(CameraControl)
+    #window = Instance(ivtk.IVTKWithCrustAndBrowser)
+    #python_shell = Delegate('window')
+    #scene = Delegate('window')
+    scene = Instance(HasTraits)
+    operators = List(HasTraits)
+
+    # State variables
+    _grid_boundaries_actor = None
+
+    # Views
+    def _window_default(self):
+        # Should experiment with passing in a pipeline browser
+        # that has two root objects -- one for TVTKBases, i.e. the render
+        # window, and one that accepts our objects
+        return ivtk.IVTKWithCrustAndBrowser(size=(800,600), stereo=1)
+
+    def _camera_path_default(self):
+        return CameraControl(yt_scene=self, camera=self.scene.camera)
+
+    def __init__(self, **traits):
+        HasTraits.__init__(self, **traits)
+        max_level = min(self.pf.h.max_level,
+                        self.min_grid_level + self.number_of_levels - 1)
+        self.extracted_pf = ExtractedParameterFile(self.pf,
+                             self.min_grid_level, max_level, offset=None)
+        self.extracted_hierarchy = self.extracted_pf.h
+        self._hdata_set = tvtk.HierarchicalBoxDataSet()
+        self._ugs = []
+        self._grids = []
+        self._min_val = 1e60
+        self._max_val = -1e60
+        gid = 0
+        if self.cache:
+            for grid_set in self.extracted_hierarchy.get_levels():
+                for grid in grid_set:
+                    grid[self.field]
+        for l, grid_set in enumerate(self.extracted_hierarchy.get_levels()):
+            gid = self._add_level(grid_set, l, gid)
+        if self.show_grids:
+            self.toggle_grid_boundaries()
+            
+    def _center_default(self):
+        return self.extracted_hierarchy._convert_coords(
+                [0.5, 0.5, 0.5])
+
+    def do_center_on_max(self):
+        self.center = self.extracted_hierarchy._convert_coords(
+            self.pf.h.find_max("Density")[1])
+        self.scene.camera.focal_point = self.center
+
+    def _add_level(self, grid_set, level, gid):
+        for grid in grid_set:
+            self._hdata_set.set_refinement_ratio(level, 2)
+            gid = self._add_grid(grid, gid, level)
+        return gid
+
+    def _add_grid(self, grid, gid, level=0):
+        mylog.debug("Adding grid %s on level %s (%s)",
+                    grid.id, level, grid.Level)
+        if grid in self._grids: return
+        self._grids.append(grid)
+
+        scalars = grid.get_vertex_centered_data(self.field, smoothed=self.smoothed)
+
+        left_index = grid.get_global_startindex()
+        origin = grid.LeftEdge
+        dds = grid.dds
+        right_index = left_index + scalars.shape - 1
+        ug = tvtk.UniformGrid(origin=origin, spacing=dds,
+                              dimensions=grid.ActiveDimensions+1)
+        if self.field not in self.pf.field_info or \
+            self.pf.field_info[self.field].take_log:
+            scalars = na.log10(scalars)
+        ug.point_data.scalars = scalars.transpose().ravel()
+        ug.point_data.scalars.name = self.field
+        if grid.Level != self.min_grid_level + self.number_of_levels - 1:
+            ug.cell_visibility_array = grid.child_mask.transpose().ravel()
+        else:
+            ug.cell_visibility_array = na.ones(
+                    grid.ActiveDimensions, dtype='int').ravel()
+        self._ugs.append((grid,ug))
+        self._hdata_set.set_data_set(level, gid, left_index, right_index, ug)
+
+        self._min_val = min(self._min_val, scalars.min())
+        self._max_val = max(self._max_val, scalars.max())
+
+        gid += 1
+        return gid
+
+    def _add_data_to_ug(self, field):
+        for g, ug in self._ugs:
+            scalars_temp = grid.get_vertex_centered_data(field, smoothed=self.smoothed)
+            ii = ug.point_data.add_array(scalars_temp.transpose().ravel())
+            ug.point_data.get_array(ii).name = field
+
+    def zoom(self, dist, unit='1'):
+        vec = self.scene.camera.focal_point - \
+              self.scene.camera.position
+        self.scene.camera.position += \
+            vec * dist/self._grids[0].pf[unit]
+        self.scene.render()
+
+    def toggle_grid_boundaries(self):
+        if self._grid_boundaries_actor is None:
+            # We don't need to track this stuff right now.
+            ocf = tvtk.OutlineCornerFilter(
+                    executive=tvtk.CompositeDataPipeline(),
+                    corner_factor = 0.5)
+            ocf.input = self._hdata_set
+            ocm = tvtk.HierarchicalPolyDataMapper(
+                input_connection = ocf.output_port)
+            self._grid_boundaries_actor = tvtk.Actor(mapper = ocm)
+            self.scene.add_actor(self._grid_boundaries_actor)
+        else:
+            self._grid_boundaries_actor.visibility = \
+            (not self._grid_boundaries_actor.visibility)
+
+    def _add_sphere(self, origin=(0.0,0.0,0.0), normal=(0,1,0)):
+        sphere = tvtk.Sphere(center=origin, radius=0.25)
+        cutter = tvtk.Cutter(executive = tvtk.CompositeDataPipeline(),
+                             cut_function = sphere)
+        cutter.input = self._hdata_set
+        lut_manager = LUTManager(data_name=self.field, scene=self.scene)
+        smap = tvtk.HierarchicalPolyDataMapper(
+                        scalar_range=(self._min_val, self._max_val),
+                        lookup_table=lut_manager.lut,
+                        input_connection = cutter.output_port)
+        sactor = tvtk.Actor(mapper=smap)
+        self.scene.add_actors(sactor)
+        return sphere, lut_manager
+
+    def _add_plane(self, origin=(0.0,0.0,0.0), normal=(0,1,0)):
+        plane = tvtk.Plane(origin=origin, normal=normal)
+        cutter = tvtk.Cutter(executive = tvtk.CompositeDataPipeline(),
+                             cut_function = plane)
+        cutter.input = self._hdata_set
+        lut_manager = LUTManager(data_name=self.field, scene=self.scene)
+        smap = tvtk.HierarchicalPolyDataMapper(
+                        scalar_range=(self._min_val, self._max_val),
+                        lookup_table=lut_manager.lut,
+                        input_connection = cutter.output_port)
+        sactor = tvtk.Actor(mapper=smap)
+        self.scene.add_actors(sactor)
+        return plane, lut_manager
+
+    def add_plane(self, origin=(0.0,0.0,0.0), normal=(0,1,0)):
+        self.operators.append(self._add_plane(origin, normal))
+        return self.operators[-1]
+
+    def _add_axis_plane(self, axis):
+        normal = [0,0,0]
+        normal[axis] = 1
+        np, lut_manager = self._add_plane(self.center, normal=normal)
+        LE = self.extracted_hierarchy.min_left_edge
+        RE = self.extracted_hierarchy.max_right_edge
+        self.operators.append(MappingPlane(
+                vmin=LE[axis], vmax=RE[axis],
+                vdefault = self.center[axis],
+                post_call = self.scene.render,
+                plane = np, axis=axis, coord=0.0,
+                lut_manager = lut_manager,
+                scene=self.scene))
+
+    def add_x_plane(self):
+        self._add_axis_plane(0)
+        return self.operators[-1]
+
+    def add_y_plane(self):
+        self._add_axis_plane(1)
+        return self.operators[-1]
+
+    def add_z_plane(self):
+        self._add_axis_plane(2)
+        return self.operators[-1]
+
+    def add_contour(self, val=None):
+        if val is None: 
+            if self._min_val != self._min_val:
+                self._min_val = 1.0
+            val = (self._max_val+self._min_val) * 0.5
+        cubes = tvtk.MarchingCubes(
+                    executive = tvtk.CompositeDataPipeline())
+        cubes.input = self._hdata_set
+        cubes.set_value(0, val)
+        lut_manager = LUTManager(data_name=self.field, scene=self.scene)
+        cube_mapper = tvtk.HierarchicalPolyDataMapper(
+                                input_connection = cubes.output_port,
+                                lookup_table=lut_manager.lut)
+        cube_mapper.color_mode = 'map_scalars'
+        cube_mapper.scalar_range = (self._min_val, self._max_val)
+        cube_actor = tvtk.Actor(mapper=cube_mapper)
+        self.scene.add_actors(cube_actor)
+        self.operators.append(MappingMarchingCubes(operator=cubes,
+                    vmin=self._min_val, vmax=self._max_val,
+                    vdefault=val,
+                    mapper = cube_mapper,
+                    post_call = self.scene.render,
+                    lut_manager = lut_manager,
+                    scene=self.scene))
+        return self.operators[-1]
+
+    def add_isocontour(self, val=None):
+        if val is None: val = (self._max_val+self._min_val) * 0.5
+        isocontour = tvtk.ContourFilter(
+                    executive = tvtk.CompositeDataPipeline())
+        isocontour.input = self._hdata_set
+        isocontour.generate_values(1, (val, val))
+        lut_manager = LUTManager(data_name=self.field, scene=self.scene)
+        isocontour_normals = tvtk.PolyDataNormals(
+            executive=tvtk.CompositeDataPipeline())
+        isocontour_normals.input_connection = isocontour.output_port
+        iso_mapper = tvtk.HierarchicalPolyDataMapper(
+                                input_connection = isocontour_normals.output_port,
+                                lookup_table=lut_manager.lut)
+        iso_mapper.scalar_range = (self._min_val, self._max_val)
+        iso_actor = tvtk.Actor(mapper=iso_mapper)
+        self.scene.add_actors(iso_actor)
+        self.operators.append(MappingIsoContour(operator=isocontour,
+                    vmin=self._min_val, vmax=self._max_val,
+                    vdefault=val,
+                    mapper = iso_mapper,
+                    post_call = self.scene.render,
+                    lut_manager = lut_manager,
+                    scene=self.scene))
+        return self.operators[-1]
+
+    def display_points(self):
+        dd = self.pf.h.all_data()
+        points = tvtk.Points()
+        good = (dd["creation_time"] > 0.0)
+        points.data = na.array([ dd["particle_position_%s" % ax][good] for ax in 'xyz' ]).transpose()
+        mass = na.log10(dd["ParticleAge"][good])
+        self.conn = tvtk.CellArray()
+        for i in xrange(mass.shape[0]):
+            self.conn.insert_next_cell(1)
+            self.conn.insert_cell_point(i)
+        self.points = points
+        self.pd = tvtk.PolyData(points = self.points, verts = self.conn)
+        self.pd.point_data.scalars = mass
+        lut = tvtk.LookupTable()
+        self.pdm = tvtk.PolyDataMapper(input = self.pd,
+                                       lookup_table = lut)
+        self.pdm.scalar_range = (mass.min(), mass.max())
+        self.pdm.scalar_mode = 'use_point_data'
+        self.point_actor = tvtk.Actor(mapper = self.pdm)
+        self.scene.add_actor(self.point_actor)
+
+def get_all_parents(grid):
+    parents = []
+    if len(grid.Parents) == 0: return grid
+    for parent in grid.Parents: parents.append(get_all_parents(parent))
+    return list(set(parents))
+
+def run_vtk():
+    gui = pyface.GUI()
+    importer = HierarchyImporter()
+    importer.edit_traits(handler = HierarchyImportHandler(
+            importer = importer))
+    #ehds.edit_traits()
+    gui.start_event_loop()
+
+
+if __name__=="__main__":
+    print "This code probably won't work.  But if you want to give it a try,"
+    print "you need:"
+    print
+    print "VTK (CVS)"
+    print "Mayavi2 (from Enthought)"
+    print
+    print "If you have 'em, give it a try!"
+    print
+    run_vtk()


http://bitbucket.org/yt_analysis/yt/changeset/ee06240efd03/
changeset:   r3839:ee06240efd03
branch:      yt
user:        MatthewTurk
date:        2011-03-03 03:36:02
summary:     Adding in a port of the yt_ajax repo's bottle-based HTTP read-eval-print-loop.
No support for anything other than text passage at the moment.  Widgets have
not and maybe will not be backported.
affected #:  9 files (90.8 KB)

--- a/yt/gui/plot_editors.py	Wed Mar 02 21:05:10 2011 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,128 +0,0 @@
-"""
-Figure editors for the Traits GUI
-
-Author: Matthew Turk <matthewturk at gmail.com>
-Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
-License:
-  Copyright (C) 2009 Matthew Turk.  All Rights Reserved.
-
-  This file is part of yt.
-
-  yt is free software; you can redistribute it and/or modify
-  it under the terms of the GNU General Public License as published by
-  the Free Software Foundation; either version 3 of the License, or
-  (at your option) any later version.
-
-  This program is distributed in the hope that it will be useful,
-  but WITHOUT ANY WARRANTY; without even the implied warranty of
-  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-  GNU General Public License for more details.
-
-  You should have received a copy of the GNU General Public License
-  along with this program.  If not, see <http://www.gnu.org/licenses/>.
-"""
-
-import sys, matplotlib
-# We want matplotlib to use a wxPython backend
-matplotlib.use('QT4Agg')
-from matplotlib.backends.backend_qt4agg import FigureCanvasQTAgg as FigureCanvas
-from matplotlib.figure import Figure
-from matplotlib.axes import Axes
-
-from enthought.traits.api import Any, Instance
-from enthought.traits.ui.qt4.editor import Editor
-from enthought.traits.ui.qt4.basic_editor_factory import BasicEditorFactory
-
-from enthought.pyface.action.api import ActionController
-
-from enthought.traits.ui.menu import \
-    Menu, Action, Separator, OKCancelButtons, OKButton
-
-from matplotlib.backend_bases import Event as MPLEvent
-
-class _MPLFigureEditor(Editor):
-    """ Snagged from Gael's tutorial """
-
-    scrollable  = True
-    mpl_control = Instance(FigureCanvas)
-
-    def init(self, parent):
-        self.control = self._create_canvas(parent)
-        self.set_tooltip()
-
-    def update_editor(self):
-        pass
-
-    def _create_canvas(self, parent):
-        """ Create the MPL canvas. """
-        # The panel lets us add additional controls.
-        panel = wx.Panel(parent, -1)
-        sizer = wx.BoxSizer(wx.VERTICAL)
-        panel.SetSizer(sizer)
-        # matplotlib commands to create a canvas
-        self.mpl_control = FigureCanvas(panel, -1, self.value)
-        sizer.Add(self.mpl_control, 1, wx.LEFT | wx.TOP | wx.GROW | wx.SHAPED)
-        self.value.canvas.SetMinSize((10,8))
-        return panel
-
-class MPLFigureEditor(BasicEditorFactory):
-    klass = _MPLFigureEditor
-
-class MPLAction(Action):
-    event = Instance(MPLEvent)
-
-class _MPLVMPlotEditor(_MPLFigureEditor, ActionController):
-
-    def _create_canvas(self, parent):
-        panel = _MPLFigureEditor._create_canvas(self, parent)
-        self.mpl_control.mpl_connect("button_press_event", self.on_click)
-        return panel
-
-    def on_click(self, event):
-        if not event.inaxes: return
-        if event.button == 3:
-            my_menu = Menu(MPLAction(name="Recenter", action="object.recenter",
-                                     event=event),
-                           MPLAction(name="Yo!", action="object.do_something",
-                                     event=event))
-            wxmenu = my_menu.create_menu(self.mpl_control, self)
-            self.mpl_control.PopupMenuXY(wxmenu)
-
-    def perform ( self, action ):
-        """
-        This is largely taken/modified from the TreeEditor _perform method.
-        """
-        object            = self.object
-        method_name       = action.action
-        info              = self.ui.info
-        handler           = self.ui.handler
-        event             = action.event
-
-        if method_name.find( '.' ) >= 0:
-            if method_name.find( '(' ) < 0:
-                method_name += '(event)'
-            try:
-                eval( method_name, globals(),
-                      { 'object':  object,
-                        'editor':  self,
-                        'info':    info,
-                        'event':   event,
-                        'handler': handler } )
-            except:
-                # fixme: Should the exception be logged somewhere?
-                print sys.exc_info()
-                
-            return
-
-        method = getattr( handler, method_name, None )
-        if method is not None:
-            method( info, object )
-            return
-
-        if action.on_perform is not None:
-            action.on_perform( object )
-
-class MPLVMPlotEditor(BasicEditorFactory):
-    klass = _MPLVMPlotEditor
-


--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/yt/gui/reason/basic_repl.py	Wed Mar 02 21:36:02 2011 -0500
@@ -0,0 +1,140 @@
+"""
+A read-eval-print-loop.  This code was released in the CherryPy project
+initially, but has been heavily modified and again re-released in compliance
+with the terms of its original license.
+
+Author: Matthew Turk <matthewturk at gmail.com>
+Affiliation: NSF / Columbia
+Homepage: http://yt.enzotools.org/
+License:
+  Copyright (C) 2011 Matthew Turk.  All Rights Reserved.
+
+  This file is part of yt.
+
+  yt is free software; you can redistribute it and/or modify
+  it under the terms of the GNU General Public License as published by
+  the Free Software Foundation; either version 3 of the License, or
+  (at your option) any later version.
+
+  This program is distributed in the hope that it will be useful,
+  but WITHOUT ANY WARRANTY; without even the implied warranty of
+  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+  GNU General Public License for more details.
+
+  You should have received a copy of the GNU General Public License
+  along with this program.  If not, see <http://www.gnu.org/licenses/>.
+"""
+
+import codeop
+import inspect
+import re
+import json
+import sys
+import traceback
+from cStringIO import StringIO
+
+class ProgrammaticREPL(object):
+    
+    def __init__(self, locals=None):
+        self.locals = {}
+        if locals:
+            self.locals.update(locals)
+        self.buffer = []
+        # Nominally at this point we could populate our namespace with widgets
+        # or other useful functions.  We aren't really ready for that just yet.
+    
+    def push(self, line):
+        """Push 'line' and return exec results (None if more input needed)."""
+        if line == "help":
+            return "Type help(object) for help about object."
+        if line == "help()":
+            return "You cannot call help() without an argument."
+        
+        self.buffer.append(line)
+        source = "\n".join(self.buffer)
+        
+        try:
+            code = codeop.compile_command(source, "<HTTP input>", 'single')
+        except (OverflowError, SyntaxError, ValueError):
+            self.buffer = []
+            return traceback.format_exc()
+        
+        if code is None:
+            # More lines needed.
+            return None
+        
+        self.buffer = []
+        return self.execute(code)
+    
+    def execute(self, code):
+        """Execute the given code in self.locals and return any stdout/sterr."""
+        out = StringIO()
+        oldout = sys.stdout
+        olderr = sys.stderr
+        sys.stdout = sys.stderr = out
+        try:
+            try:
+                exec code in self.locals
+            except:
+                result = traceback.format_exc()
+            else:
+                result = out.getvalue()
+        finally:
+            sys.stdout = oldout
+            sys.stderr = olderr
+        out.close()
+        return result
+    
+    def dir(self, line):
+        """Examine a partial line and provide attr list of final expr."""
+        line = re.split(r"\s", line)[-1].strip()
+        # Support lines like "thing.attr" as "thing.", because the browser
+        # may not finish calculating the partial line until after the user
+        # has clicked on a few more keys.
+        line = ".".join(line.split(".")[:-1])
+        try:
+            result = eval("dir(%s)" % line, {}, self.locals)
+        except:
+            return []
+        return result
+    
+    def doc(self, line):
+        """Examine a partial line and provide sig+doc of final expr."""
+        line = re.split(r"\s", line)[-1].strip()
+        # Support lines like "func(text" as "func(", because the browser
+        # may not finish calculating the partial line until after the user
+        # has clicked on a few more keys.
+        line = "(".join(line.split("(")[:-1])
+        try:
+            result = eval(line, {}, self.locals)
+            try:
+                if isinstance(result, type):
+                    func = result.__init__
+                else:
+                    func = result
+                args, varargs, varkw, defaults = inspect.getargspec(func)
+            except TypeError:
+                if callable(result):
+                    doc = getattr(result, "__doc__", "") or ""
+                    return "%s\n\n%s" % (line, doc)
+                return None
+        except:
+            return None
+        
+        if args and args[0] == 'self':
+            args.pop(0)
+        missing = object()
+        defaults = defaults or []
+        defaults = ([missing] * (len(args) - len(defaults))) + list(defaults)
+        arglist = []
+        for a, d in zip(args, defaults):
+            if d is missing:
+                arglist.append(a)
+            else:
+                arglist.append("%s=%s" % (a, d))
+        if varargs:
+            arglist.append("*%s" % varargs)
+        if varkw:
+            arglist.append("**%s" % varkw)
+        doc = getattr(result, "__doc__", "") or ""
+        return "%s(%s)\n%s" % (line, ", ".join(arglist), doc)


--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/yt/gui/reason/bottle.py	Wed Mar 02 21:36:02 2011 -0500
@@ -0,0 +1,2095 @@
+# -*- coding: utf-8 -*-
+"""
+Bottle is a fast and simple micro-framework for small web applications. It
+offers request dispatching (Routes) with url parameter support, templates,
+a built-in HTTP Server and adapters for many third party WSGI/HTTP-server and
+template engines - all in a single file and with no dependencies other than the
+Python Standard Library.
+
+Homepage and documentation: http://bottle.paws.de/
+
+Copyright (c) 2010, Marcel Hellkamp.
+License: MIT (see LICENSE.txt for details)
+"""
+
+from __future__ import with_statement
+
+__author__ = 'Marcel Hellkamp'
+__version__ = '0.9.dev'
+__license__ = 'MIT'
+
+import base64
+import cgi
+import email.utils
+import functools
+import hmac
+import httplib
+import inspect
+import itertools
+import mimetypes
+import os
+import re
+import subprocess
+import sys
+import tempfile
+import thread
+import threading
+import time
+import tokenize
+import warnings
+
+from Cookie import SimpleCookie
+from tempfile import TemporaryFile
+from traceback import format_exc
+from urllib import quote as urlquote
+from urlparse import urlunsplit, urljoin
+
+try: from collections import MutableMapping as DictMixin
+except ImportError: # pragma: no cover
+    from UserDict import DictMixin
+
+try: from urlparse import parse_qs
+except ImportError: # pragma: no cover
+    from cgi import parse_qs
+
+try: import cPickle as pickle
+except ImportError: # pragma: no cover
+    import pickle
+
+try: from json import dumps as json_dumps
+except ImportError: # pragma: no cover
+    try: from simplejson import dumps as json_dumps
+    except ImportError: # pragma: no cover
+        try: from django.utils.simplejson import dumps as json_dumps
+        except ImportError: # pragma: no cover
+            json_dumps = None
+
+if sys.version_info >= (3,0,0): # pragma: no cover
+    # See Request.POST
+    from io import BytesIO
+    from io import TextIOWrapper
+    class NCTextIOWrapper(TextIOWrapper):
+        ''' Garbage collecting an io.TextIOWrapper(buffer) instance closes the
+            wrapped buffer. This subclass keeps it open. '''
+        def close(self): pass
+    StringType = bytes
+    def touni(x, enc='utf8'):
+        """ Convert anything to unicode """
+        return str(x, encoding=enc) if isinstance(x, bytes) else str(x)
+else:
+    from StringIO import StringIO as BytesIO
+    from types import StringType
+    NCTextIOWrapper = None
+    def touni(x, enc='utf8'):
+        """ Convert anything to unicode """
+        return x if isinstance(x, unicode) else unicode(str(x), encoding=enc)
+
+def tob(data, enc='utf8'):
+    """ Convert anything to bytes """
+    return data.encode(enc) if isinstance(data, unicode) else StringType(data)
+
+# Convert strings and unicode to native strings
+if sys.version_info >= (3,0,0):
+    tonat = touni
+else:
+    tonat = tob
+tonat.__doc__ = """ Convert anything to native strings """
+
+
+# Background compatibility
+def depr(message, critical=False):
+    if critical: raise DeprecationWarning(message)
+    warnings.warn(message, DeprecationWarning, stacklevel=3)
+
+# Small helpers
+def makelist(data):
+    if isinstance(data, (tuple, list, set, dict)): return list(data)
+    elif data: return [data]
+    else: return []
+
+
+
+
+
+
+###############################################################################
+# Exceptions and Events ########################################################
+###############################################################################
+
+class BottleException(Exception):
+    """ A base class for exceptions used by bottle. """
+    pass
+
+
+class HTTPResponse(BottleException):
+    """ Used to break execution and immediately finish the response """
+    def __init__(self, output='', status=200, header=None):
+        super(BottleException, self).__init__("HTTP Response %d" % status)
+        self.status = int(status)
+        self.output = output
+        self.headers = HeaderDict(header) if header else None
+
+    def apply(self, response):
+        if self.headers:
+            for key, value in self.headers.iterallitems():
+                response.headers[key] = value
+        response.status = self.status
+
+
+class HTTPError(HTTPResponse):
+    """ Used to generate an error page """
+    def __init__(self, code=500, output='Unknown Error', exception=None, traceback=None, header=None):
+        super(HTTPError, self).__init__(output, code, header)
+        self.exception = exception
+        self.traceback = traceback
+
+    def __repr__(self):
+        return ''.join(ERROR_PAGE_TEMPLATE.render(e=self))
+
+
+
+
+
+
+###############################################################################
+# Routing ######################################################################
+###############################################################################
+
+class RouteError(BottleException):
+    """ This is a base class for all routing related exceptions """
+
+
+class RouteSyntaxError(RouteError):
+    """ The route parser found something not supported by this router """
+
+
+class RouteBuildError(RouteError):
+    """ The route could not been built """
+
+
+class Route(object):
+    ''' Represents a single route and can parse the dynamic route syntax '''
+    syntax = re.compile(r'(?<!\\):([a-zA-Z_]+)?(?:#(.*?)#)?')
+    default = '[^/]+'
+
+    def __init__(self, route, target=None, name=None, static=False):
+        """ Create a Route. The route string may contain `:key`,
+            `:key#regexp#` or `:#regexp#` tokens for each dynamic part of the
+            route. These can be escaped with a backslash in front of the `:`
+            and are completely ignored if static is true. A name may be used
+            to refer to this route later (depends on Router)
+        """
+        self.route = route.replace('\\:',':')
+        self.target = target
+        self.name = name
+        self.realroute = route.replace(':','\\:') if static else route
+        self.tokens = self.syntax.split(self.realroute)
+
+    def group_re(self):
+        ''' Return a regexp pattern with named groups '''
+        out = ''
+        for i, part in enumerate(self.tokens):
+            if i%3 == 0:   out += re.escape(part.replace('\:',':'))
+            elif i%3 == 1: out += '(?P<%s>' % part if part else '(?:'
+            else:          out += '%s)' % (part or self.default)
+        return out
+        
+    def flat_re(self):
+        ''' Return a regexp pattern with non-grouping parentheses '''
+        rf = lambda m: m.group(0) if len(m.group(1)) % 2 else m.group(1) + '(?:'
+        return re.sub(r'(\\*)(\(\?P<[^>]*>|\((?!\?))', rf, self.group_re())
+
+    def format_str(self):
+        ''' Return a format string with named fields. '''
+        out, c = '', 0
+        for i, part in enumerate(self.tokens):
+            if i%3 == 0:  out += part.replace('\\:',':').replace('%','%%')
+            elif i%3 == 1:
+                if not part: part = 'anon%d' % c; c+=1
+                out += '%%(%s)s' % part
+        return out
+
+    @property
+    def static(self):
+        return len(self.tokens) == 1
+
+    def __repr__(self):
+        return "<Route(%s) />" % repr(self.realroute)
+
+    def __eq__(self, other):
+        return (self.realroute) == (other.realroute)
+
+
+class Router(object):
+    ''' A route associates a string (e.g. URL) with an object (e.g. function)
+        Some dynamic routes may extract parts of the string and provide them as
+        a dictionary. This router matches a string against multiple routes and
+        returns the associated object along with the extracted data.
+    '''
+
+    def __init__(self):
+        self.routes  = []  # List of all installed routes
+        self.named   = {}  # Cache for named routes and their format strings
+        self.static  = {}  # Cache for static routes
+        self.dynamic = []  # Search structure for dynamic routes
+        self.compiled = False
+
+    def add(self, route, target=None, **ka):
+        """ Add a route->target pair or a :class:`Route` object to the Router.
+            Return the Route object. See :class:`Route` for details.
+        """
+        if not isinstance(route, Route):
+            route = Route(route, target, **ka)
+        if self.get_route(route):
+            return RouteError('Route %s is not uniqe.' % route)
+        self.routes.append(route)
+        self.compiled, self.named, self.static, self.dynamic = False, {}, {}, []
+        return route
+
+    def get_route(self, route, target=None, **ka):
+        ''' Get a route from the router by specifying either the same
+            parameters as in :meth:`add` or comparing to an instance of
+            :class:`Route`. Note that not all parameters are considered by the
+            compare function. '''
+        if not isinstance(route, Route):
+            route = Route(route, **ka)
+        for known in self.routes:
+            if route == known:
+                return known
+        return None
+
+    def match(self, uri):
+        ''' Match an URI and return a (target, urlargs) tuple '''
+        if uri in self.static:
+            return self.static[uri], {}
+        for combined, subroutes in self.dynamic:
+            match = combined.match(uri)
+            if not match: continue
+            target, args_re = subroutes[match.lastindex - 1]
+            args = args_re.match(uri).groupdict() if args_re else {}
+            return target, args
+        if not self.compiled: # Late check to reduce overhead on hits
+            self.compile() # Compile and try again.
+            return self.match(uri)
+        return None, {}
+
+    def build(self, _name, **args):
+        ''' Build an URI out of a named route and values for the wildcards. '''
+        try:
+            return self.named[_name] % args
+        except KeyError:
+            if not self.compiled: # Late check to reduce overhead on hits
+                self.compile() # Compile and try again.
+                return self.build(_name, **args)
+            raise RouteBuildError("No route found with name '%s'." % _name)
+
+    def compile(self):
+        ''' Build the search structures. Call this before actually using the
+            router.'''
+        self.named = {}
+        self.static = {}
+        self.dynamic = []
+        for route in self.routes:
+            if route.name:
+                self.named[route.name] = route.format_str()
+            if route.static:
+                self.static[route.route] = route.target
+                continue
+            gpatt = route.group_re()
+            fpatt = route.flat_re()
+            try:
+                gregexp = re.compile('^(%s)$' % gpatt) if '(?P' in gpatt else None
+                combined = '%s|(^%s$)' % (self.dynamic[-1][0].pattern, fpatt)
+                self.dynamic[-1] = (re.compile(combined), self.dynamic[-1][1])
+                self.dynamic[-1][1].append((route.target, gregexp))
+            except (AssertionError, IndexError), e: # AssertionError: Too many groups
+                self.dynamic.append((re.compile('(^%s$)'%fpatt),
+                                    [(route.target, gregexp)]))
+            except re.error, e:
+                raise RouteSyntaxError("Could not add Route: %s (%s)" % (route, e))
+        self.compiled = True
+
+    def __eq__(self, other):
+        return self.routes == other.routes
+
+
+
+
+
+
+###############################################################################
+# Application Object ###########################################################
+###############################################################################
+
+class Bottle(object):
+    """ WSGI application """
+
+    def __init__(self, catchall=True, autojson=True, config=None):
+        """ Create a new bottle instance.
+            You usually don't do that. Use `bottle.app.push()` instead.
+        """
+        self.routes = Router()
+        self.mounts = {}
+        self.error_handler = {}
+        self.catchall = catchall
+        self.config = config or {}
+        self.serve = True
+        self.castfilter = []
+        if autojson and json_dumps:
+            self.add_filter(dict, dict2json)
+        self.hooks = {'before_request': [], 'after_request': []}
+
+    def optimize(self, *a, **ka):
+        depr("Bottle.optimize() is obsolete.")
+
+    def mount(self, app, script_path):
+        ''' Mount a Bottle application to a specific URL prefix '''
+        if not isinstance(app, Bottle):
+            raise TypeError('Only Bottle instances are supported for now.')
+        script_path = '/'.join(filter(None, script_path.split('/')))
+        path_depth = script_path.count('/') + 1
+        if not script_path:
+            raise TypeError('Empty script_path. Perhaps you want a merge()?')
+        for other in self.mounts:
+            if other.startswith(script_path):
+                raise TypeError('Conflict with existing mount: %s' % other)
+        @self.route('/%s/:#.*#' % script_path, method="ANY")
+        def mountpoint():
+            request.path_shift(path_depth)
+            return app.handle(request.path, request.method)
+        self.mounts[script_path] = app
+
+    def add_filter(self, ftype, func):
+        ''' Register a new output filter. Whenever bottle hits a handler output
+            matching `ftype`, `func` is applied to it. '''
+        if not isinstance(ftype, type):
+            raise TypeError("Expected type object, got %s" % type(ftype))
+        self.castfilter = [(t, f) for (t, f) in self.castfilter if t != ftype]
+        self.castfilter.append((ftype, func))
+        self.castfilter.sort()
+
+    def match_url(self, path, method='GET'):
+        """ Find a callback bound to a path and a specific HTTP method.
+            Return (callback, param) tuple or raise HTTPError.
+            method: HEAD falls back to GET. All methods fall back to ANY.
+        """
+        path, method = path.strip().lstrip('/'), method.upper()
+        callbacks, args = self.routes.match(path)
+        if not callbacks:
+            raise HTTPError(404, "Not found: " + path)
+        if method in callbacks:
+            return callbacks[method], args
+        if method == 'HEAD' and 'GET' in callbacks:
+            return callbacks['GET'], args
+        if 'ANY' in callbacks:
+            return callbacks['ANY'], args
+        allow = [m for m in callbacks if m != 'ANY']
+        if 'GET' in allow and 'HEAD' not in allow:
+            allow.append('HEAD')
+        raise HTTPError(405, "Method not allowed.",
+                        header=[('Allow',",".join(allow))])
+
+    def get_url(self, routename, **kargs):
+        """ Return a string that matches a named route """
+        scriptname = request.environ.get('SCRIPT_NAME', '').strip('/') + '/'
+        location = self.routes.build(routename, **kargs).lstrip('/')
+        return urljoin(urljoin('/', scriptname), location)
+
+    def route(self, path=None, method='GET', no_hooks=False, decorate=None,
+              template=None, template_opts={}, callback=None, **kargs):
+        """ Decorator: Bind a callback function to a request path.
+
+            :param path: The request path or a list of paths to listen to. See 
+              :class:`Router` for syntax details. If no path is specified, it
+              is automatically generated from the callback signature. See
+              :func:`yieldroutes` for details.
+            :param method: The HTTP method (POST, GET, ...) or a list of
+              methods to listen to. (default: GET)
+            :param decorate: A decorator or a list of decorators. These are
+              applied to the callback in reverse order.
+            :param no_hooks: If true, application hooks are not triggered
+              by this route. (default: False)
+            :param template: The template to use for this callback.
+              (default: no template)
+            :param template_opts: A dict with additional template parameters.
+            :param static: If true, all paths are static even if they contain
+              dynamic syntax tokens. (default: False)
+            :param name: The name for this route. (default: None)
+            :param callback: If set, the route decorator is directly applied
+              to the callback and the callback is returned instead. This
+              equals ``Bottle.route(...)(callback)``.
+        """
+        # @route can be used without any parameters
+        if callable(path): path, callback = None, path
+        # Build up the list of decorators
+        decorators = makelist(decorate)
+        if template:     decorators.insert(0, view(template, **template_opts))
+        if not no_hooks: decorators.append(self._add_hook_wrapper)
+        def wrapper(func):
+            callback = func
+            for decorator in reversed(decorators):
+                callback = decorator(callback)
+            functools.update_wrapper(callback, func)
+            for route in makelist(path) or yieldroutes(func):
+                for meth in makelist(method):
+                    route = route.strip().lstrip('/')
+                    meth = meth.strip().upper()
+                    old = self.routes.get_route(route, **kargs)
+                    if old:
+                        old.target[meth] = callback
+                    else:
+                        self.routes.add(route, {meth: callback}, **kargs)
+            return func
+        return wrapper(callback) if callback else wrapper
+
+    def _add_hook_wrapper(self, func):
+        ''' Add hooks to a callable. See #84 '''
+        @functools.wraps(func)
+        def wrapper(*a, **ka):
+            for hook in self.hooks['before_request']: hook()
+            response.output = func(*a, **ka)
+            for hook in self.hooks['after_request']: hook()
+            return response.output
+        return wrapper
+
+    def get(self, path=None, method='GET', **kargs):
+        """ Decorator: Bind a function to a GET request path.
+            See :meth:'route' for details. """
+        return self.route(path, method, **kargs)
+
+    def post(self, path=None, method='POST', **kargs):
+        """ Decorator: Bind a function to a POST request path.
+            See :meth:'route' for details. """
+        return self.route(path, method, **kargs)
+
+    def put(self, path=None, method='PUT', **kargs):
+        """ Decorator: Bind a function to a PUT request path.
+            See :meth:'route' for details. """
+        return self.route(path, method, **kargs)
+
+    def delete(self, path=None, method='DELETE', **kargs):
+        """ Decorator: Bind a function to a DELETE request path.
+            See :meth:'route' for details. """
+        return self.route(path, method, **kargs)
+
+    def error(self, code=500):
+        """ Decorator: Register an output handler for a HTTP error code"""
+        def wrapper(handler):
+            self.error_handler[int(code)] = handler
+            return handler
+        return wrapper
+
+    def hook(self, name):
+        """ Return a decorator that adds a callback to the specified hook. """
+        def wrapper(func):
+            self.add_hook(name, func)
+            return func
+        return wrapper
+
+    def add_hook(self, name, func):
+        ''' Add a callback from a hook. '''
+        if name not in self.hooks:
+            raise ValueError("Unknown hook name %s" % name)
+        if name in ('after_request'):
+            self.hooks[name].insert(0, func)
+        else:
+            self.hooks[name].append(func)
+
+    def remove_hook(self, name, func):
+        ''' Remove a callback from a hook. '''
+        if name not in self.hooks:
+            raise ValueError("Unknown hook name %s" % name)
+        self.hooks[name].remove(func)
+
+    def handle(self, url, method):
+        """ Execute the handler bound to the specified url and method and return
+        its output. If catchall is true, exceptions are catched and returned as
+        HTTPError(500) objects. """
+        if not self.serve:
+            return HTTPError(503, "Server stopped")
+        try:
+            handler, args = self.match_url(url, method)
+            return handler(**args)
+        except HTTPResponse, e:
+            return e
+        except Exception, e:
+            if isinstance(e, (KeyboardInterrupt, SystemExit, MemoryError))\
+            or not self.catchall:
+                raise
+            return HTTPError(500, 'Unhandled exception', e, format_exc(10))
+
+    def _cast(self, out, request, response, peek=None):
+        """ Try to convert the parameter into something WSGI compatible and set
+        correct HTTP headers when possible.
+        Support: False, str, unicode, dict, HTTPResponse, HTTPError, file-like,
+        iterable of strings and iterable of unicodes
+        """
+        # Filtered types (recursive, because they may return anything)
+        for testtype, filterfunc in self.castfilter:
+            if isinstance(out, testtype):
+                return self._cast(filterfunc(out), request, response)
+
+        # Empty output is done here
+        if not out:
+            response.headers['Content-Length'] = 0
+            return []
+        # Join lists of byte or unicode strings. Mixed lists are NOT supported
+        if isinstance(out, (tuple, list))\
+        and isinstance(out[0], (StringType, unicode)):
+            out = out[0][0:0].join(out) # b'abc'[0:0] -> b''
+        # Encode unicode strings
+        if isinstance(out, unicode):
+            out = out.encode(response.charset)
+        # Byte Strings are just returned
+        if isinstance(out, StringType):
+            response.headers['Content-Length'] = str(len(out))
+            return [out]
+        # HTTPError or HTTPException (recursive, because they may wrap anything)
+        if isinstance(out, HTTPError):
+            out.apply(response)
+            return self._cast(self.error_handler.get(out.status, repr)(out), request, response)
+        if isinstance(out, HTTPResponse):
+            out.apply(response)
+            return self._cast(out.output, request, response)
+
+        # File-like objects.
+        if hasattr(out, 'read'):
+            if 'wsgi.file_wrapper' in request.environ:
+                return request.environ['wsgi.file_wrapper'](out)
+            elif hasattr(out, 'close') or not hasattr(out, '__iter__'):
+                return WSGIFileWrapper(out)
+
+        # Handle Iterables. We peek into them to detect their inner type.
+        try:
+            out = iter(out)
+            first = out.next()
+            while not first:
+                first = out.next()
+        except StopIteration:
+            return self._cast('', request, response)
+        except HTTPResponse, e:
+            first = e
+        except Exception, e:
+            first = HTTPError(500, 'Unhandled exception', e, format_exc(10))
+            if isinstance(e, (KeyboardInterrupt, SystemExit, MemoryError))\
+            or not self.catchall:
+                raise
+        # These are the inner types allowed in iterator or generator objects.
+        if isinstance(first, HTTPResponse):
+            return self._cast(first, request, response)
+        if isinstance(first, StringType):
+            return itertools.chain([first], out)
+        if isinstance(first, unicode):
+            return itertools.imap(lambda x: x.encode(response.charset),
+                                  itertools.chain([first], out))
+        return self._cast(HTTPError(500, 'Unsupported response type: %s'\
+                                         % type(first)), request, response)
+
+    def wsgi(self, environ, start_response):
+        """ The bottle WSGI-interface. """
+        try:
+            environ['bottle.app'] = self
+            request.bind(environ)
+            response.bind()
+            out = self.handle(request.path, request.method)
+            out = self._cast(out, request, response)
+            # rfc2616 section 4.3
+            if response.status in (100, 101, 204, 304) or request.method == 'HEAD':
+                if hasattr(out, 'close'): out.close()
+                out = []
+            status = '%d %s' % (response.status, HTTP_CODES[response.status])
+            start_response(status, response.headerlist)
+            return out
+        except (KeyboardInterrupt, SystemExit, MemoryError):
+            raise
+        except Exception, e:
+            if not self.catchall: raise
+            err = '<h1>Critical error while processing request: %s</h1>' \
+                  % environ.get('PATH_INFO', '/')
+            if DEBUG:
+                err += '<h2>Error:</h2>\n<pre>%s</pre>\n' % repr(e)
+                err += '<h2>Traceback:</h2>\n<pre>%s</pre>\n' % format_exc(10)
+            environ['wsgi.errors'].write(err) #TODO: wsgi.error should not get html
+            start_response('500 INTERNAL SERVER ERROR', [('Content-Type', 'text/html')])
+            return [tob(err)]
+        
+    def __call__(self, environ, start_response):
+        return self.wsgi(environ, start_response)
+
+
+
+
+
+
+###############################################################################
+# HTTP and WSGI Tools ##########################################################
+###############################################################################
+
+class Request(threading.local, DictMixin):
+    """ Represents a single HTTP request using thread-local attributes.
+        The Request object wraps a WSGI environment and can be used as such.
+    """
+    def __init__(self, environ=None):
+        """ Create a new Request instance.
+        
+            You usually don't do this but use the global `bottle.request`
+            instance instead.
+        """
+        self.bind(environ or {},)
+
+    def bind(self, environ):
+        """ Bind a new WSGI environment.
+            
+            This is done automatically for the global `bottle.request`
+            instance on every request.
+        """
+        self.environ = environ
+        # These attributes are used anyway, so it is ok to compute them here
+        self.path = '/' + environ.get('PATH_INFO', '/').lstrip('/')
+        self.method = environ.get('REQUEST_METHOD', 'GET').upper()
+
+    @property
+    def _environ(self):
+        depr("Request._environ renamed to Request.environ")
+        return self.environ
+
+    def copy(self):
+        ''' Returns a copy of self '''
+        return Request(self.environ.copy())
+
+    def path_shift(self, shift=1):
+        ''' Shift path fragments from PATH_INFO to SCRIPT_NAME and vice versa.
+
+          :param shift: The number of path fragments to shift. May be negative to
+            change the shift direction. (default: 1)
+        '''
+        script_name = self.environ.get('SCRIPT_NAME','/')
+        self['SCRIPT_NAME'], self.path = path_shift(script_name, self.path, shift)
+        self['PATH_INFO'] = self.path
+
+    def __getitem__(self, key): return self.environ[key]
+    def __delitem__(self, key): self[key] = ""; del(self.environ[key])
+    def __iter__(self): return iter(self.environ)
+    def __len__(self): return len(self.environ)
+    def keys(self): return self.environ.keys()
+    def __setitem__(self, key, value):
+        """ Shortcut for Request.environ.__setitem__ """
+        self.environ[key] = value
+        todelete = []
+        if key in ('PATH_INFO','REQUEST_METHOD'):
+            self.bind(self.environ)
+        elif key == 'wsgi.input': todelete = ('body','forms','files','params')
+        elif key == 'QUERY_STRING': todelete = ('get','params')
+        elif key.startswith('HTTP_'): todelete = ('headers', 'cookies')
+        for key in todelete:
+            if 'bottle.' + key in self.environ:
+                del self.environ['bottle.' + key]
+
+    @property
+    def query_string(self):
+        """ The content of the QUERY_STRING environment variable. """
+        return self.environ.get('QUERY_STRING', '')
+
+    @property
+    def fullpath(self):
+        """ Request path including SCRIPT_NAME (if present) """
+        return self.environ.get('SCRIPT_NAME', '').rstrip('/') + self.path
+
+    @property
+    def url(self):
+        """ Full URL as requested by the client (computed).
+
+            This value is constructed out of different environment variables
+            and includes scheme, host, port, scriptname, path and query string. 
+        """
+        scheme = self.environ.get('wsgi.url_scheme', 'http')
+        host   = self.environ.get('HTTP_X_FORWARDED_HOST', self.environ.get('HTTP_HOST', None))
+        if not host:
+            host = self.environ.get('SERVER_NAME')
+            port = self.environ.get('SERVER_PORT', '80')
+            if scheme + port not in ('https443', 'http80'):
+                host += ':' + port
+        parts = (scheme, host, urlquote(self.fullpath), self.query_string, '')
+        return urlunsplit(parts)
+
+    @property
+    def content_length(self):
+        """ Content-Length header as an integer, -1 if not specified """
+        return int(self.environ.get('CONTENT_LENGTH','') or -1)
+
+    @property
+    def header(self):
+        depr("The Request.header property was renamed to Request.headers")
+        return self.headers
+
+    @property
+    def headers(self):
+        ''' :class:`WSGIHeaderDict` filled with request headers. '''
+        if 'bottle.headers' not in self.environ:
+            self.environ['bottle.headers'] = WSGIHeaderDict(self.environ)
+        return self.environ['bottle.headers']
+
+    @property
+    def GET(self):
+        """ The QUERY_STRING parsed into a MultiDict.
+
+            Keys and values are strings. Multiple values per key are possible.
+            See MultiDict for details.
+        """
+        if 'bottle.get' not in self.environ:
+            data = parse_qs(self.query_string, keep_blank_values=True)
+            get = self.environ['bottle.get'] = MultiDict()
+            for key, values in data.iteritems():
+                for value in values:
+                    get[key] = value
+        return self.environ['bottle.get']
+
+    @property
+    def POST(self):
+        """ Property: The HTTP POST body parsed into a MultiDict.
+
+            This supports url-encoded and multipart POST requests. Multipart
+            is commonly used for file uploads and may result in some of the
+            values being cgi.FieldStorage objects instead of strings.
+
+            Multiple values per key are possible. See MultiDict for details.
+        """
+        if 'bottle.post' not in self.environ:
+            self.environ['bottle.post'] = MultiDict()
+            self.environ['bottle.forms'] = MultiDict()
+            self.environ['bottle.files'] = MultiDict()
+            safe_env = {'QUERY_STRING':''} # Build a safe environment for cgi
+            for key in ('REQUEST_METHOD', 'CONTENT_TYPE', 'CONTENT_LENGTH'):
+                if key in self.environ: safe_env[key] = self.environ[key]
+            if NCTextIOWrapper:
+                fb = NCTextIOWrapper(self.body, encoding='ISO-8859-1', newline='\n')
+                # TODO: Content-Length may be wrong now. Does cgi.FieldStorage
+                # use it at all? I think not, because all tests pass.
+            else:
+                fb = self.body
+            data = cgi.FieldStorage(fp=fb, environ=safe_env, keep_blank_values=True)
+            for item in data.list or []:
+                if item.filename:
+                    self.environ['bottle.post'][item.name] = item
+                    self.environ['bottle.files'][item.name] = item
+                else:
+                    self.environ['bottle.post'][item.name] = item.value
+                    self.environ['bottle.forms'][item.name] = item.value
+        return self.environ['bottle.post']
+
+    @property
+    def forms(self):
+        """ Property: HTTP POST form data parsed into a MultiDict. """
+        if 'bottle.forms' not in self.environ: self.POST
+        return self.environ['bottle.forms']
+
+    @property
+    def files(self):
+        """ Property: HTTP POST file uploads parsed into a MultiDict. """
+        if 'bottle.files' not in self.environ: self.POST
+        return self.environ['bottle.files']
+        
+    @property
+    def params(self):
+        """ A combined MultiDict with POST and GET parameters. """
+        if 'bottle.params' not in self.environ:
+            self.environ['bottle.params'] = MultiDict(self.GET)
+            self.environ['bottle.params'].update(dict(self.forms))
+        return self.environ['bottle.params']
+
+    @property
+    def body(self):
+        """ The HTTP request body as a seekable buffer object.
+        
+            This property returns a copy of the `wsgi.input` stream and should
+            be used instead of `environ['wsgi.input']`.
+         """
+        if 'bottle.body' not in self.environ:
+            maxread = max(0, self.content_length)
+            stream = self.environ['wsgi.input']
+            body = BytesIO() if maxread < MEMFILE_MAX else TemporaryFile(mode='w+b')
+            while maxread > 0:
+                part = stream.read(min(maxread, MEMFILE_MAX))
+                if not part: #TODO: Wrong content_length. Error? Do nothing?
+                    break
+                body.write(part)
+                maxread -= len(part)
+            self.environ['wsgi.input'] = body
+            self.environ['bottle.body'] = body
+        self.environ['bottle.body'].seek(0)
+        return self.environ['bottle.body']
+
+    @property
+    def auth(self): #TODO: Tests and docs. Add support for digest. namedtuple?
+        """ HTTP authorization data as a (user, passwd) tuple. (experimental)
+        
+            This implementation currently only supports basic auth and returns
+            None on errors.
+        """
+        return parse_auth(self.headers.get('Authorization',''))
+
+    @property
+    def COOKIES(self):
+        """ Cookie information parsed into a dictionary.
+        
+            Secure cookies are NOT decoded automatically. See
+            Request.get_cookie() for details.
+        """
+        if 'bottle.cookies' not in self.environ:
+            raw_dict = SimpleCookie(self.headers.get('Cookie',''))
+            self.environ['bottle.cookies'] = {}
+            for cookie in raw_dict.itervalues():
+                self.environ['bottle.cookies'][cookie.key] = cookie.value
+        return self.environ['bottle.cookies']
+
+    def get_cookie(self, key, secret=None):
+        """ Return the content of a cookie. To read a `Secure Cookies`, use the
+            same `secret` as used to create the cookie (see
+            :meth:`Response.set_cookie`). If anything goes wrong, None is
+            returned.
+        """
+        value = self.COOKIES.get(key)
+        if secret and value:
+            dec = cookie_decode(value, secret) # (key, value) tuple or None
+            return dec[1] if dec and dec[0] == key else None
+        return value or None
+
+    @property
+    def is_ajax(self):
+        ''' True if the request was generated using XMLHttpRequest '''
+        #TODO: write tests
+        return self.header.get('X-Requested-With') == 'XMLHttpRequest'
+
+
+
+class Response(threading.local):
+    """ Represents a single HTTP response using thread-local attributes.
+    """
+
+    def __init__(self):
+        self.bind()
+
+    def bind(self):
+        """ Resets the Response object to its factory defaults. """
+        self._COOKIES = None
+        self.status = 200
+        self.headers = HeaderDict()
+        self.content_type = 'text/html; charset=UTF-8'
+
+    @property
+    def header(self):
+        depr("Response.header renamed to Response.headers")
+        return self.headers
+
+    def copy(self):
+        ''' Returns a copy of self '''
+        copy = Response()
+        copy.status = self.status
+        copy.headers = self.headers.copy()
+        copy.content_type = self.content_type
+        return copy
+
+    def wsgiheader(self):
+        ''' Returns a wsgi conform list of header/value pairs. '''
+        for c in self.COOKIES.values():
+            if c.OutputString() not in self.headers.getall('Set-Cookie'):
+                self.headers.append('Set-Cookie', c.OutputString())
+        # rfc2616 section 10.2.3, 10.3.5
+        if self.status in (204, 304) and 'content-type' in self.headers:
+            del self.headers['content-type']
+        if self.status == 304:
+            for h in ('allow', 'content-encoding', 'content-language',
+                      'content-length', 'content-md5', 'content-range',
+                      'content-type', 'last-modified'): # + c-location, expires?
+                if h in self.headers:
+                     del self.headers[h]
+        return list(self.headers.iterallitems())
+    headerlist = property(wsgiheader)
+
+    @property
+    def charset(self):
+        """ Return the charset specified in the content-type header.
+        
+            This defaults to `UTF-8`.
+        """
+        if 'charset=' in self.content_type:
+            return self.content_type.split('charset=')[-1].split(';')[0].strip()
+        return 'UTF-8'
+
+    @property
+    def COOKIES(self):
+        """ A dict-like SimpleCookie instance. Use Response.set_cookie() instead. """
+        if not self._COOKIES:
+            self._COOKIES = SimpleCookie()
+        return self._COOKIES
+
+    def set_cookie(self, key, value, secret=None, **kargs):
+        ''' Add a cookie. If the `secret` parameter is set, this creates a
+            `Secure Cookie` (described below).
+
+            |param key| the name of the cookie.
+            |param value| the value of the cookie.
+            |param secret| required for secure cookies. (default: None)
+            |param max_age| maximum age in seconds. (default: None)
+            |param expires| a datetime object or UNIX timestamp. (defaut: None)
+            |param domain| the domain that is allowed to read the cookie.
+              (default: current domain)
+            |param path| limits the cookie to a given path (default: /)
+
+            If neither `expires` nor `max_age` are set (default), the cookie
+            lasts only as long as the browser is not closed.
+
+            Secure cookies may store any pickle-able object and are
+            cryptographically signed to prevent manipulation. Keep in mind that
+            cookies are limited to 4kb in most browsers.
+            
+            Warning: Secure cookies are not encrypted (the client can still see
+            the content) and not copy-protected (the client can restore an old
+            cookie). The main intention is to make pickling and unpickling
+            save, not to store secret information at client side.
+        '''
+        if secret:
+            value = touni(cookie_encode((key, value), secret))
+        elif not isinstance(value, basestring):
+            raise TypeError('Secret missing for non-string Cookie.')
+            
+        self.COOKIES[key] = value
+        for k, v in kargs.iteritems():
+            self.COOKIES[key][k.replace('_', '-')] = v
+
+    def delete_cookie(self, key, **kwargs):
+        ''' Delete a cookie. Be sure to use the same `domain` and `path`
+            parameters as used to create the cookie.
+        '''
+        kwargs['max_age'] = -1
+        kwargs['expires'] = 0
+        self.set_cookie(key, **kwargs)
+
+    def get_content_type(self):
+        """ Current 'Content-Type' header. """
+        return self.headers['Content-Type']
+
+    def set_content_type(self, value):
+        self.headers['Content-Type'] = value
+
+    content_type = property(get_content_type, set_content_type, None,
+                            get_content_type.__doc__)
+
+
+
+
+
+
+###############################################################################
+# Common Utilities #############################################################
+###############################################################################
+
+class MultiDict(DictMixin):
+    """ A dict that remembers old values for each key """
+    # collections.MutableMapping would be better for Python >= 2.6
+    def __init__(self, *a, **k):
+        self.dict = dict()
+        for k, v in dict(*a, **k).iteritems():
+            self[k] = v
+
+    def __len__(self): return len(self.dict)
+    def __iter__(self): return iter(self.dict)
+    def __contains__(self, key): return key in self.dict
+    def __delitem__(self, key): del self.dict[key]
+    def keys(self): return self.dict.keys()
+    def __getitem__(self, key): return self.get(key, KeyError, -1)
+    def __setitem__(self, key, value): self.append(key, value)
+
+    def append(self, key, value): self.dict.setdefault(key, []).append(value)
+    def replace(self, key, value): self.dict[key] = [value]
+    def getall(self, key): return self.dict.get(key) or []
+
+    def get(self, key, default=None, index=-1):
+        if key not in self.dict and default != KeyError:
+            return [default][index]
+        return self.dict[key][index]
+
+    def iterallitems(self):
+        for key, values in self.dict.iteritems():
+            for value in values:
+                yield key, value
+
+
+class HeaderDict(MultiDict):
+    """ Same as :class:`MultiDict`, but title()s the keys and overwrites by default. """
+    def __contains__(self, key): return MultiDict.__contains__(self, self.httpkey(key))
+    def __getitem__(self, key): return MultiDict.__getitem__(self, self.httpkey(key))
+    def __delitem__(self, key): return MultiDict.__delitem__(self, self.httpkey(key))
+    def __setitem__(self, key, value): self.replace(key, value)
+    def get(self, key, default=None, index=-1): return MultiDict.get(self, self.httpkey(key), default, index)
+    def append(self, key, value): return MultiDict.append(self, self.httpkey(key), str(value))
+    def replace(self, key, value): return MultiDict.replace(self, self.httpkey(key), str(value))
+    def getall(self, key): return MultiDict.getall(self, self.httpkey(key))
+    def httpkey(self, key): return str(key).replace('_','-').title()
+
+
+
+class WSGIHeaderDict(DictMixin):
+    ''' This dict-like class takes a WSGI environ dict and provides convenient
+        access to HTTP_* fields. Keys and values are stored as native strings
+        (bytes/unicode) based on the python version used (2/3) and keys are
+        case-insensitive. If the WSGI environment contains non-native strings,
+        these are de- or encoded using 'utf8' (default) or 'latin1' (fallback)
+        charset. To get the original value, use the .raw(key) method.
+
+        This is not a MultiDict because incoming headers are unique. The API
+        will remain stable even on WSGI spec changes, if possible.
+        '''
+
+    def __init__(self, environ):
+        self.cache = {}
+        self.environ = environ
+        for key, value in self.environ.iteritems():
+            key = tonat(key, 'latin1') # Headers are limited to ASCII anyway
+            if key.startswith('HTTP_'):
+                self[key[5:].replace('_','-')] = value
+
+    def __len__(self): return len(self.cache)
+    def keys(self): return self.cache.keys()
+    def __iter__(self): return iter(self.cache)
+    def __contains__(self, key): return key.title() in self.keys()
+    def __delitem__(self, key): del self.cache[key.title()]
+    def __getitem__(self, key): return self.cache[key.title()]
+    def __setitem__(self, key, value):
+        try:
+            self.cache[key.title()] = tonat(value, 'utf8')
+        except UnicodeError:
+            self.cache[key.title()] = tonat(value, 'latin1')
+
+    def raw(self, key, default=None):
+        ''' Return the raw WSGI header value for that key. '''
+        ekey = 'HTTP_%s' % key.replace('-','_').upper()
+        return self.environ.get(ekey, default)
+
+
+
+
+class AppStack(list):
+    """ A stack implementation. """
+
+    def __call__(self):
+        """ Return the current default app. """
+        return self[-1]
+
+    def push(self, value=None):
+        """ Add a new Bottle instance to the stack """
+        if not isinstance(value, Bottle):
+            value = Bottle()
+        self.append(value)
+        return value
+
+class WSGIFileWrapper(object):
+
+   def __init__(self, fp, buffer_size=1024*64):
+       self.fp, self.buffer_size = fp, buffer_size
+       for attr in ('fileno', 'close', 'read', 'readlines'):
+           if hasattr(fp, attr): setattr(self, attr, getattr(fp, attr))
+
+   def __iter__(self):
+       read, buff = self.fp.read, self.buffer_size
+       while True:
+           part = read(buff)
+           if not part: break
+           yield part
+
+
+
+
+
+
+###############################################################################
+# Application Helper ###########################################################
+###############################################################################
+
+def dict2json(d):
+    response.content_type = 'application/json'
+    return json_dumps(d)
+
+
+def abort(code=500, text='Unknown Error: Application stopped.'):
+    """ Aborts execution and causes a HTTP error. """
+    raise HTTPError(code, text)
+
+
+def redirect(url, code=303):
+    """ Aborts execution and causes a 303 redirect """
+    scriptname = request.environ.get('SCRIPT_NAME', '').rstrip('/') + '/'
+    location = urljoin(request.url, urljoin(scriptname, url))
+    raise HTTPResponse("", status=code, header=dict(Location=location))
+
+
+def send_file(*a, **k): #BC 0.6.4
+    """ Raises the output of static_file(). (deprecated) """
+    raise static_file(*a, **k)
+
+
+def static_file(filename, root, guessmime=True, mimetype=None, download=False):
+    """ Opens a file in a safe way and returns a HTTPError object with status
+        code 200, 305, 401 or 404. Sets Content-Type, Content-Length and
+        Last-Modified header. Obeys If-Modified-Since header and HEAD requests.
+    """
+    root = os.path.abspath(root) + os.sep
+    filename = os.path.abspath(os.path.join(root, filename.strip('/\\')))
+    header = dict()
+
+    if not filename.startswith(root):
+        return HTTPError(403, "Access denied.")
+    if not os.path.exists(filename) or not os.path.isfile(filename):
+        return HTTPError(404, "File does not exist.")
+    if not os.access(filename, os.R_OK):
+        return HTTPError(403, "You do not have permission to access this file.")
+
+    if not mimetype and guessmime:
+        header['Content-Type'] = mimetypes.guess_type(filename)[0]
+    else:
+        header['Content-Type'] = mimetype if mimetype else 'text/plain'
+
+    if download == True:
+        download = os.path.basename(filename)
+    if download:
+        header['Content-Disposition'] = 'attachment; filename="%s"' % download
+
+    stats = os.stat(filename)
+    lm = time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(stats.st_mtime))
+    header['Last-Modified'] = lm
+    ims = request.environ.get('HTTP_IF_MODIFIED_SINCE')
+    if ims:
+        ims = ims.split(";")[0].strip() # IE sends "<date>; length=146"
+        ims = parse_date(ims)
+        if ims is not None and ims >= int(stats.st_mtime):
+            header['Date'] = time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime())
+            return HTTPResponse(status=304, header=header)
+    header['Content-Length'] = stats.st_size
+    if request.method == 'HEAD':
+        return HTTPResponse('', header=header)
+    else:
+        return HTTPResponse(open(filename, 'rb'), header=header)
+
+
+
+
+
+
+###############################################################################
+# HTTP Utilities and MISC (TODO) ###############################################
+###############################################################################
+
+def debug(mode=True):
+    """ Change the debug level.
+    There is only one debug level supported at the moment."""
+    global DEBUG
+    DEBUG = bool(mode)
+
+
+def parse_date(ims):
+    """ Parse rfc1123, rfc850 and asctime timestamps and return UTC epoch. """
+    try:
+        ts = email.utils.parsedate_tz(ims)
+        return time.mktime(ts[:8] + (0,)) - (ts[9] or 0) - time.timezone
+    except (TypeError, ValueError, IndexError):
+        return None
+
+
+def parse_auth(header):
+    """ Parse rfc2617 HTTP authentication header string (basic) and return (user,pass) tuple or None"""
+    try:
+        method, data = header.split(None, 1)
+        if method.lower() == 'basic':
+            name, pwd = base64.b64decode(data).split(':', 1)
+            return name, pwd
+    except (KeyError, ValueError, TypeError):
+        return None
+
+
+def cookie_encode(data, key):
+    ''' Encode and sign a pickle-able object. Return a (byte) string '''
+    msg = base64.b64encode(pickle.dumps(data, -1))
+    sig = base64.b64encode(hmac.new(key, msg).digest())
+    return tob('!') + sig + tob('?') + msg
+
+
+def cookie_decode(data, key):
+    ''' Verify and decode an encoded string. Return an object or None.'''
+    data = tob(data)
+    if cookie_is_encoded(data):
+        sig, msg = data.split(tob('?'), 1)
+        if sig[1:] == base64.b64encode(hmac.new(key, msg).digest()):
+            return pickle.loads(base64.b64decode(msg))
+    return None
+
+
+def cookie_is_encoded(data):
+    ''' Return True if the argument looks like a encoded cookie.'''
+    return bool(data.startswith(tob('!')) and tob('?') in data)
+
+
+def yieldroutes(func):
+    """ Return a generator for routes that match the signature (name, args) 
+    of the func parameter. This may yield more than one route if the function
+    takes optional keyword arguments. The output is best described by example::
+    
+        a()         -> '/a'
+        b(x, y)     -> '/b/:x/:y'
+        c(x, y=5)   -> '/c/:x' and '/c/:x/:y'
+        d(x=5, y=6) -> '/d' and '/d/:x' and '/d/:x/:y'
+    """
+    path = func.__name__.replace('__','/').lstrip('/')
+    spec = inspect.getargspec(func)
+    argc = len(spec[0]) - len(spec[3] or [])
+    path += ('/:%s' * argc) % tuple(spec[0][:argc])
+    yield path
+    for arg in spec[0][argc:]:
+        path += '/:%s' % arg
+        yield path
+
+def path_shift(script_name, path_info, shift=1):
+    ''' Shift path fragments from PATH_INFO to SCRIPT_NAME and vice versa.
+
+        :return: The modified paths.
+        :param script_name: The SCRIPT_NAME path.
+        :param script_name: The PATH_INFO path.
+        :param shift: The number of path fragments to shift. May be negative to
+          change the shift direction. (default: 1)
+    '''
+    if shift == 0: return script_name, path_info
+    pathlist = path_info.strip('/').split('/')
+    scriptlist = script_name.strip('/').split('/')
+    if pathlist and pathlist[0] == '': pathlist = []
+    if scriptlist and scriptlist[0] == '': scriptlist = []
+    if shift > 0 and shift <= len(pathlist):
+        moved = pathlist[:shift]
+        scriptlist = scriptlist + moved
+        pathlist = pathlist[shift:]
+    elif shift < 0 and shift >= -len(scriptlist):
+        moved = scriptlist[shift:]
+        pathlist = moved + pathlist
+        scriptlist = scriptlist[:shift]
+    else:
+        empty = 'SCRIPT_NAME' if shift < 0 else 'PATH_INFO'
+        raise AssertionError("Cannot shift. Nothing left from %s" % empty)
+    new_script_name = '/' + '/'.join(scriptlist)
+    new_path_info = '/' + '/'.join(pathlist)
+    if path_info.endswith('/') and pathlist: new_path_info += '/'
+    return new_script_name, new_path_info
+
+
+
+# Decorators
+#TODO: Replace default_app() with app()
+
+def validate(**vkargs):
+    """
+    Validates and manipulates keyword arguments by user defined callables.
+    Handles ValueError and missing arguments by raising HTTPError(403).
+    """
+    def decorator(func):
+        def wrapper(**kargs):
+            for key, value in vkargs.iteritems():
+                if key not in kargs:
+                    abort(403, 'Missing parameter: %s' % key)
+                try:
+                    kargs[key] = value(kargs[key])
+                except ValueError:
+                    abort(403, 'Wrong parameter format for: %s' % key)
+            return func(**kargs)
+        return wrapper
+    return decorator
+
+
+def make_default_app_wrapper(name):
+    ''' Return a callable that relays calls to the current default app. '''
+    @functools.wraps(getattr(Bottle, name))
+    def wrapper(*a, **ka):
+        return getattr(app(), name)(*a, **ka)
+    return wrapper
+
+for name in 'route get post put delete error mount hook'.split():
+    globals()[name] = make_default_app_wrapper(name)
+
+url = make_default_app_wrapper('get_url')
+
+
+def default():
+    depr("The default() decorator is deprecated. Use @error(404) instead.")
+    return error(404)
+
+
+
+
+
+
+###############################################################################
+# Server Adapter ###############################################################
+###############################################################################
+
+class ServerAdapter(object):
+    quiet = False
+    def __init__(self, host='127.0.0.1', port=8080, **kargs):
+        self.options = kargs
+        self.host = host
+        self.port = int(port)
+
+    def run(self, handler): # pragma: no cover
+        pass
+        
+    def __repr__(self):
+        args = ', '.join(['%s=%s'%(k,repr(v)) for k, v in self.options.items()])
+        return "%s(%s)" % (self.__class__.__name__, args)
+
+
+class CGIServer(ServerAdapter):
+    quiet = True
+    def run(self, handler): # pragma: no cover
+        from wsgiref.handlers import CGIHandler
+        CGIHandler().run(handler) # Just ignore host and port here
+
+
+class FlupFCGIServer(ServerAdapter):
+    def run(self, handler): # pragma: no cover
+        import flup.server.fcgi
+        flup.server.fcgi.WSGIServer(handler, bindAddress=(self.host, self.port)).run()
+
+
+class WSGIRefServer(ServerAdapter):
+    def run(self, handler): # pragma: no cover
+        from wsgiref.simple_server import make_server, WSGIRequestHandler
+        if self.quiet:
+            class QuietHandler(WSGIRequestHandler):
+                def log_request(*args, **kw): pass
+            self.options['handler_class'] = QuietHandler
+        srv = make_server(self.host, self.port, handler, **self.options)
+        srv.serve_forever()
+
+
+class CherryPyServer(ServerAdapter):
+    def run(self, handler): # pragma: no cover
+        from cherrypy import wsgiserver
+        server = wsgiserver.CherryPyWSGIServer((self.host, self.port), handler)
+        server.start()
+
+
+class PasteServer(ServerAdapter):
+    def run(self, handler): # pragma: no cover
+        from paste import httpserver
+        if not self.quiet:
+            from paste.translogger import TransLogger
+            handler = TransLogger(handler)
+        httpserver.serve(handler, host=self.host, port=str(self.port),
+                         **self.options)
+                         
+class MeinheldServer(ServerAdapter):
+    def run(self, handler):
+        from meinheld import server
+        server.listen((self.host, self.port))
+        server.run(handler)
+
+class FapwsServer(ServerAdapter):
+    """
+    Extremely fast webserver using libev.
+    See http://william-os4y.livejournal.com/
+    """
+    def run(self, handler): # pragma: no cover
+        import fapws._evwsgi as evwsgi
+        from fapws import base, config
+        port = self.port
+        if float(config.SERVER_IDENT[-2:]) > 0.4:
+            # fapws3 silently changed its API in 0.5
+            port = str(port)
+        evwsgi.start(self.host, port)
+        # fapws3 never releases the GIL. Complain upstream. I tried. No luck.
+        if 'BOTTLE_CHILD' in os.environ and not self.quiet:
+            print "WARNING: Auto-reloading does not work with Fapws3."
+            print "         (Fapws3 breaks python thread support)"
+        evwsgi.set_base_module(base)
+        def app(environ, start_response):
+            environ['wsgi.multiprocess'] = False
+            return handler(environ, start_response)
+        evwsgi.wsgi_cb(('', app))
+        evwsgi.run()
+
+
+class TornadoServer(ServerAdapter):
+    """ Untested. As described here:
+        http://github.com/facebook/tornado/blob/master/tornado/wsgi.py#L187 """
+    def run(self, handler): # pragma: no cover
+        import tornado.wsgi
+        import tornado.httpserver
+        import tornado.ioloop
+        container = tornado.wsgi.WSGIContainer(handler)
+        server = tornado.httpserver.HTTPServer(container)
+        server.listen(port=self.port)
+        tornado.ioloop.IOLoop.instance().start()
+
+
+class AppEngineServer(ServerAdapter):
+    """ Untested. """
+    quiet = True
+    def run(self, handler):
+        from google.appengine.ext.webapp import util
+        util.run_wsgi_app(handler)
+
+
+class TwistedServer(ServerAdapter):
+    """ Untested. """
+    def run(self, handler):
+        from twisted.web import server, wsgi
+        from twisted.python.threadpool import ThreadPool
+        from twisted.internet import reactor
+        thread_pool = ThreadPool()
+        thread_pool.start()
+        reactor.addSystemEventTrigger('after', 'shutdown', thread_pool.stop)
+        factory = server.Site(wsgi.WSGIResource(reactor, thread_pool, handler))
+        reactor.listenTCP(self.port, factory, interface=self.host)
+        reactor.run()
+
+
+class DieselServer(ServerAdapter):
+    """ Untested. """
+    def run(self, handler):
+        from diesel.protocols.wsgi import WSGIApplication
+        app = WSGIApplication(handler, port=self.port)
+        app.run()
+
+
+class GeventServer(ServerAdapter):
+    """ Untested. """
+    def run(self, handler):
+        from gevent import wsgi
+        #from gevent.hub import getcurrent
+        #self.set_context_ident(getcurrent, weakref=True) # see contextlocal
+        wsgi.WSGIServer((self.host, self.port), handler).serve_forever()
+
+
+class GunicornServer(ServerAdapter):
+    """ Untested. """
+    def run(self, handler):
+        from gunicorn.arbiter import Arbiter
+        from gunicorn.config import Config
+        handler.cfg = Config({'bind': "%s:%d" % (self.host, self.port), 'workers': 4})
+        arbiter = Arbiter(handler)
+        arbiter.run()
+
+
+class EventletServer(ServerAdapter):
+    """ Untested """
+    def run(self, handler):
+        from eventlet import wsgi, listen
+        wsgi.server(listen((self.host, self.port)), handler)
+
+
+class RocketServer(ServerAdapter):
+    """ Untested. As requested in issue 63
+        http://github.com/defnull/bottle/issues/#issue/63 """
+    def run(self, handler):
+        from rocket import Rocket
+        server = Rocket((self.host, self.port), 'wsgi', { 'wsgi_app' : handler })
+        server.start()
+            
+        
+class AutoServer(ServerAdapter):
+    """ Untested. """
+    adapters = [PasteServer, CherryPyServer, TwistedServer, WSGIRefServer]
+    def run(self, handler):
+        for sa in self.adapters:
+            try:
+                return sa(self.host, self.port, **self.options).run(handler)
+            except ImportError:
+                pass
+
+
+server_names = {
+    'cgi': CGIServer,
+    'flup': FlupFCGIServer,
+    'wsgiref': WSGIRefServer,
+    'cherrypy': CherryPyServer,
+    'paste': PasteServer,
+    'fapws3': FapwsServer,
+    'tornado': TornadoServer,
+    'gae': AppEngineServer,
+    'twisted': TwistedServer,
+    'diesel': DieselServer,
+    'meinheld': MeinheldServer,
+    'gunicorn': GunicornServer,
+    'eventlet': EventletServer,
+    'gevent': GeventServer,
+    'rocket': RocketServer,
+    'auto': AutoServer,
+}
+
+
+
+
+
+
+###############################################################################
+# Application Control ##########################################################
+###############################################################################
+
+def load_app(target):
+    """ Load a bottle application based on a target string and return the app
+        object.
+        
+        The target should be a valid python import path
+        (e.g. mypackage.mymodule). The default application is returned.
+        If the target contains a colon (e.g. mypackage.mymodule:myapp) the
+        module variable specified after the colon is returned instead.
+    """
+    path, name = target.split(":", 1) if ':' in target else (target, None)
+    rv = None if name else app.push()
+    __import__(path)
+    module = sys.modules[path]
+    if rv and rv in app: app.remove(rv)
+    return rv if rv else getattr(module, target)
+
+
+def run(app=None, server='wsgiref', host='127.0.0.1', port=8080,
+        interval=1, reloader=False, quiet=False, **kargs):
+    """ Start a server instance. This method blocks until the server
+        terminates.
+
+        :param app: WSGI application or target string supported by
+               :func:`load_app`. (default: :func:`default_app`)
+        :param server: Server adapter to use. See :data:`server_names` dict
+               for valid names or pass a :class:`ServerAdapter` subclass.
+               (default: wsgiref)
+        :param host: Server address to bind to. Pass ``0.0.0.0`` to listens on
+               all interfaces including the external one. (default: 127.0.0.1)
+        :param host: Server port to bind to. Values below 1024 require root
+               privileges. (default: 8080)
+        :param reloader: Start auto-reloading server? (default: False)
+        :param interval: Auto-reloader interval in seconds (default: 1)
+        :param quiet: Suppress output to stdout and stderr? (default: False)
+        :param options: Options passed to the server adapter.
+     """
+    app = app or default_app()
+    if isinstance(app, basestring):
+        app = load_app(app)
+    if isinstance(server, basestring):
+        server = server_names.get(server)
+    if isinstance(server, type):
+        server = server(host=host, port=port, **kargs)
+    if not isinstance(server, ServerAdapter):
+        raise RuntimeError("Server must be a subclass of ServerAdapter")
+    server.quiet = server.quiet or quiet
+    if not server.quiet and not os.environ.get('BOTTLE_CHILD'):
+        print "Bottle server starting up (using %s)..." % repr(server)
+        print "Listening on http://%s:%d/" % (server.host, server.port)
+        print "Use Ctrl-C to quit."
+        print
+    try:
+        if reloader:
+            interval = min(interval, 1)
+            if os.environ.get('BOTTLE_CHILD'):
+                _reloader_child(server, app, interval)
+            else:
+                _reloader_observer(server, app, interval)
+        else:
+            server.run(app)
+    except KeyboardInterrupt:
+        pass
+    if not server.quiet and not os.environ.get('BOTTLE_CHILD'):
+        print "Shutting down..."
+
+
+class FileCheckerThread(threading.Thread):
+    ''' Thread that periodically checks for changed module files. '''
+
+    def __init__(self, lockfile, interval):
+        threading.Thread.__init__(self)
+        self.lockfile, self.interval = lockfile, interval
+        #1: lockfile to old; 2: lockfile missing
+        #3: module file changed; 5: external exit
+        self.status = 0
+
+    def run(self):
+        exists = os.path.exists
+        mtime = lambda path: os.stat(path).st_mtime
+        files = dict()
+        for module in sys.modules.values():
+            try:
+                path = inspect.getsourcefile(module)
+                if path and exists(path): files[path] = mtime(path)
+            except TypeError:
+                pass
+        while not self.status:
+            for path, lmtime in files.iteritems():
+                if not exists(path) or mtime(path) > lmtime:
+                    self.status = 3
+            if not exists(self.lockfile):
+                self.status = 2
+            elif mtime(self.lockfile) < time.time() - self.interval - 5:
+                self.status = 1
+            if not self.status:
+                time.sleep(self.interval)
+        if self.status != 5:
+            thread.interrupt_main()
+
+
+def _reloader_child(server, app, interval):
+    ''' Start the server and check for modified files in a background thread.
+        As soon as an update is detected, KeyboardInterrupt is thrown in
+        the main thread to exit the server loop. The process exists with status
+        code 3 to request a reload by the observer process. If the lockfile
+        is not modified in 2*interval second or missing, we assume that the
+        observer process died and exit with status code 1 or 2.
+    '''
+    lockfile = os.environ.get('BOTTLE_LOCKFILE')
+    bgcheck = FileCheckerThread(lockfile, interval)
+    try:
+        bgcheck.start()
+        server.run(app)
+    except KeyboardInterrupt:
+        pass
+    bgcheck.status, status = 5, bgcheck.status
+    bgcheck.join() # bgcheck.status == 5 --> silent exit
+    if status: sys.exit(status)
+
+
+def _reloader_observer(server, app, interval):
+    ''' Start a child process with identical commandline arguments and restart
+        it as long as it exists with status code 3. Also create a lockfile and
+        touch it (update mtime) every interval seconds.
+    '''
+    fd, lockfile = tempfile.mkstemp(prefix='bottle-reloader.', suffix='.lock')
+    os.close(fd) # We only need this file to exist. We never write to it
+    try:
+        while os.path.exists(lockfile):
+            args = [sys.executable] + sys.argv
+            environ = os.environ.copy()
+            environ['BOTTLE_CHILD'] = 'true'
+            environ['BOTTLE_LOCKFILE'] = lockfile
+            p = subprocess.Popen(args, env=environ)
+            while p.poll() is None: # Busy wait...
+                os.utime(lockfile, None) # I am alive!
+                time.sleep(interval)
+            if p.poll() != 3:
+                if os.path.exists(lockfile): os.unlink(lockfile)
+                sys.exit(p.poll())
+            elif not server.quiet:
+                print "Reloading server..."
+    except KeyboardInterrupt:
+        pass
+    if os.path.exists(lockfile): os.unlink(lockfile)
+
+
+
+
+
+
+###############################################################################
+# Template Adapters ############################################################
+###############################################################################
+
+class TemplateError(HTTPError):
+    def __init__(self, message):
+        HTTPError.__init__(self, 500, message)
+
+
+class BaseTemplate(object):
+    """ Base class and minimal API for template adapters """
+    extentions = ['tpl','html','thtml','stpl']
+    settings = {} #used in prepare()
+    defaults = {} #used in render()
+
+    def __init__(self, source=None, name=None, lookup=[], encoding='utf8', **settings):
+        """ Create a new template.
+        If the source parameter (str or buffer) is missing, the name argument
+        is used to guess a template filename. Subclasses can assume that
+        self.source and/or self.filename are set. Both are strings.
+        The lookup, encoding and settings parameters are stored as instance
+        variables.
+        The lookup parameter stores a list containing directory paths.
+        The encoding parameter should be used to decode byte strings or files.
+        The settings parameter contains a dict for engine-specific settings.
+        """
+        self.name = name
+        self.source = source.read() if hasattr(source, 'read') else source
+        self.filename = source.filename if hasattr(source, 'filename') else None
+        self.lookup = map(os.path.abspath, lookup)
+        self.encoding = encoding
+        self.settings = self.settings.copy() # Copy from class variable
+        self.settings.update(settings) # Apply 
+        if not self.source and self.name:
+            self.filename = self.search(self.name, self.lookup)
+            if not self.filename:
+                raise TemplateError('Template %s not found.' % repr(name))
+        if not self.source and not self.filename:
+            raise TemplateError('No template specified.')
+        self.prepare(**self.settings)
+
+    @classmethod
+    def search(cls, name, lookup=[]):
+        """ Search name in all directories specified in lookup.
+        First without, then with common extensions. Return first hit. """
+        if os.path.isfile(name): return name
+        for spath in lookup:
+            fname = os.path.join(spath, name)
+            if os.path.isfile(fname):
+                return fname
+            for ext in cls.extentions:
+                if os.path.isfile('%s.%s' % (fname, ext)):
+                    return '%s.%s' % (fname, ext)
+
+    @classmethod
+    def global_config(cls, key, *args):
+        ''' This reads or sets the global settings stored in class.settings. '''
+        if args:
+            cls.settings[key] = args[0]
+        else:
+            return cls.settings[key]
+
+    def prepare(self, **options):
+        """ Run preparations (parsing, caching, ...).
+        It should be possible to call this again to refresh a template or to
+        update settings.
+        """
+        raise NotImplementedError
+
+    def render(self, *args, **kwargs):
+        """ Render the template with the specified local variables and return
+        a single byte or unicode string. If it is a byte string, the encoding
+        must match self.encoding. This method must be thread-safe!
+        Local variables may be provided in dictionaries (*args)
+        or directly, as keywords (**kwargs).
+        """
+        raise NotImplementedError
+
+
+class MakoTemplate(BaseTemplate):
+    def prepare(self, **options):
+        from mako.template import Template
+        from mako.lookup import TemplateLookup
+        options.update({'input_encoding':self.encoding})
+        #TODO: This is a hack... http://github.com/defnull/bottle/issues#issue/8
+        mylookup = TemplateLookup(directories=['.']+self.lookup, **options)
+        if self.source:
+            self.tpl = Template(self.source, lookup=mylookup)
+        else: #mako cannot guess extentions. We can, but only at top level...
+            name = self.name
+            if not os.path.splitext(name)[1]:
+                name += os.path.splitext(self.filename)[1]
+            self.tpl = mylookup.get_template(name)
+
+    def render(self, *args, **kwargs):
+        for dictarg in args: kwargs.update(dictarg)
+        _defaults = self.defaults.copy()
+        _defaults.update(kwargs)
+        return self.tpl.render(**_defaults)
+
+
+class CheetahTemplate(BaseTemplate):
+    def prepare(self, **options):
+        from Cheetah.Template import Template
+        self.context = threading.local()
+        self.context.vars = {}
+        options['searchList'] = [self.context.vars]
+        if self.source:
+            self.tpl = Template(source=self.source, **options)
+        else:
+            self.tpl = Template(file=self.filename, **options)
+
+    def render(self, *args, **kwargs):
+        for dictarg in args: kwargs.update(dictarg)
+        self.context.vars.update(self.defaults)
+        self.context.vars.update(kwargs)
+        out = str(self.tpl)
+        self.context.vars.clear()
+        return [out]
+
+
+class Jinja2Template(BaseTemplate):
+    def prepare(self, filters=None, tests=None, **kwargs):
+        from jinja2 import Environment, FunctionLoader
+        if 'prefix' in kwargs: # TODO: to be removed after a while
+            raise RuntimeError('The keyword argument `prefix` has been removed. '
+                'Use the full jinja2 environment name line_statement_prefix instead.')
+        self.env = Environment(loader=FunctionLoader(self.loader), **kwargs)
+        if filters: self.env.filters.update(filters)
+        if tests: self.env.tests.update(tests)
+        if self.source:
+            self.tpl = self.env.from_string(self.source)
+        else:
+            self.tpl = self.env.get_template(self.filename)
+
+    def render(self, *args, **kwargs):
+        for dictarg in args: kwargs.update(dictarg)
+        _defaults = self.defaults.copy()
+        _defaults.update(kwargs)
+        return self.tpl.render(**_defaults).encode("utf-8")
+
+    def loader(self, name):
+        fname = self.search(name, self.lookup)
+        if fname:
+            with open(fname, "rb") as f:
+                return f.read().decode(self.encoding)
+
+
+class SimpleTemplate(BaseTemplate):
+    blocks = ('if','elif','else','try','except','finally','for','while','with','def','class')
+    dedent_blocks = ('elif', 'else', 'except', 'finally')
+
+    def prepare(self, escape_func=cgi.escape, noescape=False):
+        self.cache = {}
+        if self.source:
+            self.code = self.translate(self.source)
+            self.co = compile(self.code, '<string>', 'exec')
+        else:
+            self.code = self.translate(open(self.filename).read())
+            self.co = compile(self.code, self.filename, 'exec')
+        enc = self.encoding
+        self._str = lambda x: touni(x, enc)
+        self._escape = lambda x: escape_func(touni(x, enc))
+        if noescape:
+            self._str, self._escape = self._escape, self._str
+
+    def translate(self, template):
+        stack = [] # Current Code indentation
+        lineno = 0 # Current line of code
+        ptrbuffer = [] # Buffer for printable strings and token tuple instances
+        codebuffer = [] # Buffer for generated python code
+        multiline = dedent = oneline = False
+
+        def yield_tokens(line):
+            for i, part in enumerate(re.split(r'\{\{(.*?)\}\}', line)):
+                if i % 2:
+                    if part.startswith('!'): yield 'RAW', part[1:]
+                    else: yield 'CMD', part
+                else: yield 'TXT', part
+
+        def split_comment(codeline):
+            """ Removes comments from a line of code. """
+            line = codeline.splitlines()[0]
+            try:
+                tokens = list(tokenize.generate_tokens(iter(line).next))
+            except tokenize.TokenError:
+                return line.rsplit('#',1) if '#' in line else (line, '')
+            for token in tokens:
+                if token[0] == tokenize.COMMENT:
+                    start, end = token[2][1], token[3][1]
+                    return codeline[:start] + codeline[end:], codeline[start:end]
+            return line, ''
+
+        def flush(): # Flush the ptrbuffer
+            if not ptrbuffer: return
+            cline = ''
+            for line in ptrbuffer:
+                for token, value in line:
+                    if token == 'TXT': cline += repr(value)
+                    elif token == 'RAW': cline += '_str(%s)' % value
+                    elif token == 'CMD': cline += '_escape(%s)' % value
+                    cline +=  ', '
+                cline = cline[:-2] + '\\\n'
+            cline = cline[:-2]
+            if cline[:-1].endswith('\\\\\\\\\\n'):
+                cline = cline[:-7] + cline[-1] # 'nobr\\\\\n' --> 'nobr'
+            cline = '_printlist([' + cline + '])'
+            del ptrbuffer[:] # Do this before calling code() again
+            code(cline)
+
+        def code(stmt):
+            for line in stmt.splitlines():
+                codebuffer.append('  ' * len(stack) + line.strip())
+
+        for line in template.splitlines(True):
+            lineno += 1
+            line = line if isinstance(line, unicode)\
+                        else unicode(line, encoding=self.encoding)
+            if lineno <= 2:
+                m = re.search(r"%.*coding[:=]\s*([-\w\.]+)", line)
+                if m: self.encoding = m.group(1)
+                if m: line = line.replace('coding','coding (removed)')
+            if line.strip()[:2].count('%') == 1:
+                line = line.split('%',1)[1].lstrip() # Full line following the %
+                cline = split_comment(line)[0].strip()
+                cmd = re.split(r'[^a-zA-Z0-9_]', cline)[0]
+                flush() ##encodig (TODO: why?)
+                if cmd in self.blocks or multiline:
+                    cmd = multiline or cmd
+                    dedent = cmd in self.dedent_blocks # "else:"
+                    if dedent and not oneline and not multiline:
+                        cmd = stack.pop()
+                    code(line)
+                    oneline = not cline.endswith(':') # "if 1: pass"
+                    multiline = cmd if cline.endswith('\\') else False
+                    if not oneline and not multiline:
+                        stack.append(cmd)
+                elif cmd == 'end' and stack:
+                    code('#end(%s) %s' % (stack.pop(), line.strip()[3:]))
+                elif cmd == 'include':
+                    p = cline.split(None, 2)[1:]
+                    if len(p) == 2:
+                        code("_=_include(%s, _stdout, %s)" % (repr(p[0]), p[1]))
+                    elif p:
+                        code("_=_include(%s, _stdout)" % repr(p[0]))
+                    else: # Empty %include -> reverse of %rebase
+                        code("_printlist(_base)")
+                elif cmd == 'rebase':
+                    p = cline.split(None, 2)[1:]
+                    if len(p) == 2:
+                        code("globals()['_rebase']=(%s, dict(%s))" % (repr(p[0]), p[1]))
+                    elif p:
+                        code("globals()['_rebase']=(%s, {})" % repr(p[0]))
+                else:
+                    code(line)
+            else: # Line starting with text (not '%') or '%%' (escaped)
+                if line.strip().startswith('%%'):
+                    line = line.replace('%%', '%', 1)
+                ptrbuffer.append(yield_tokens(line))
+        flush()
+        return '\n'.join(codebuffer) + '\n'
+
+    def subtemplate(self, _name, _stdout, *args, **kwargs):
+        for dictarg in args: kwargs.update(dictarg)
+        if _name not in self.cache:
+            self.cache[_name] = self.__class__(name=_name, lookup=self.lookup)
+        return self.cache[_name].execute(_stdout, kwargs)
+
+    def execute(self, _stdout, *args, **kwargs):
+        for dictarg in args: kwargs.update(dictarg)
+        env = self.defaults.copy()
+        env.update({'_stdout': _stdout, '_printlist': _stdout.extend,
+               '_include': self.subtemplate, '_str': self._str,
+               '_escape': self._escape})
+        env.update(kwargs)
+        eval(self.co, env)
+        if '_rebase' in env:
+            subtpl, rargs = env['_rebase']
+            subtpl = self.__class__(name=subtpl, lookup=self.lookup)
+            rargs['_base'] = _stdout[:] #copy stdout
+            del _stdout[:] # clear stdout
+            return subtpl.execute(_stdout, rargs)
+        return env
+
+    def render(self, *args, **kwargs):
+        """ Render the template using keyword arguments as local variables. """
+        for dictarg in args: kwargs.update(dictarg)
+        stdout = []
+        self.execute(stdout, kwargs)
+        return ''.join(stdout)
+
+
+def template(*args, **kwargs):
+    '''
+    Get a rendered template as a string iterator.
+    You can use a name, a filename or a template string as first parameter.
+    Template rendering arguments can be passed as dictionaries
+    or directly (as keyword arguments).
+    '''
+    tpl = args[0] if args else None
+    template_adapter = kwargs.pop('template_adapter', SimpleTemplate)
+    if tpl not in TEMPLATES or DEBUG:
+        settings = kwargs.pop('template_settings', {})
+        lookup = kwargs.pop('template_lookup', TEMPLATE_PATH)
+        if isinstance(tpl, template_adapter):
+            TEMPLATES[tpl] = tpl
+            if settings: TEMPLATES[tpl].prepare(**settings)
+        elif "\n" in tpl or "{" in tpl or "%" in tpl or '$' in tpl:
+            TEMPLATES[tpl] = template_adapter(source=tpl, lookup=lookup, **settings)
+        else:
+            TEMPLATES[tpl] = template_adapter(name=tpl, lookup=lookup, **settings)
+    if not TEMPLATES[tpl]:
+        abort(500, 'Template (%s) not found' % tpl)
+    for dictarg in args[1:]: kwargs.update(dictarg)
+    return TEMPLATES[tpl].render(kwargs)
+
+mako_template = functools.partial(template, template_adapter=MakoTemplate)
+cheetah_template = functools.partial(template, template_adapter=CheetahTemplate)
+jinja2_template = functools.partial(template, template_adapter=Jinja2Template)
+
+def view(tpl_name, **defaults):
+    ''' Decorator: renders a template for a handler.
+        The handler can control its behavior like that:
+
+          - return a dict of template vars to fill out the template
+          - return something other than a dict and the view decorator will not
+            process the template, but return the handler result as is.
+            This includes returning a HTTPResponse(dict) to get,
+            for instance, JSON with autojson or other castfilters.
+    '''
+    def decorator(func):
+        @functools.wraps(func)
+        def wrapper(*args, **kwargs):
+            result = func(*args, **kwargs)
+            if isinstance(result, dict):
+                tplvars = defaults.copy()
+                tplvars.update(result)
+                return template(tpl_name, **tplvars)
+            return result
+        return wrapper
+    return decorator
+
+mako_view = functools.partial(view, template_adapter=MakoTemplate)
+cheetah_view = functools.partial(view, template_adapter=CheetahTemplate)
+jinja2_view = functools.partial(view, template_adapter=Jinja2Template)
+
+
+
+
+
+
+###############################################################################
+# Constants and Globals ########################################################
+###############################################################################
+
+TEMPLATE_PATH = ['./', './views/']
+TEMPLATES = {}
+DEBUG = False
+MEMFILE_MAX = 1024*100
+HTTP_CODES = httplib.responses
+HTTP_CODES[418] = "I'm a teapot" # RFC 2324
+
+ERROR_PAGE_TEMPLATE = SimpleTemplate("""
+%try:
+    %from bottle import DEBUG, HTTP_CODES, request
+    %status_name = HTTP_CODES.get(e.status, 'Unknown').title()
+    <!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML 2.0//EN">
+    <html>
+        <head>
+            <title>Error {{e.status}}: {{status_name}}</title>
+            <style type="text/css">
+              html {background-color: #eee; font-family: sans;}
+              body {background-color: #fff; border: 1px solid #ddd; padding: 15px; margin: 15px;}
+              pre {background-color: #eee; border: 1px solid #ddd; padding: 5px;}
+            </style>
+        </head>
+        <body>
+            <h1>Error {{e.status}}: {{status_name}}</h1>
+            <p>Sorry, the requested URL <tt>{{request.url}}</tt> caused an error:</p>
+            <pre>{{str(e.output)}}</pre>
+            %if DEBUG and e.exception:
+              <h2>Exception:</h2>
+              <pre>{{repr(e.exception)}}</pre>
+            %end
+            %if DEBUG and e.traceback:
+              <h2>Traceback:</h2>
+              <pre>{{e.traceback}}</pre>
+            %end
+        </body>
+    </html>
+%except ImportError:
+    <b>ImportError:</b> Could not generate the error page. Please add bottle to sys.path
+%end
+""")
+""" The HTML template used for error messages """
+
+request = Request()
+""" Whenever a page is requested, the :class:`Bottle` WSGI handler stores
+metadata about the current request into this instance of :class:`Request`.
+It is thread-safe and can be accessed from within handler functions. """
+
+response = Response()
+""" The :class:`Bottle` WSGI handler uses metadata assigned to this instance
+of :class:`Response` to generate the WSGI response. """
+
+local = threading.local()
+""" Thread-local namespace. Not used by Bottle, but could get handy """
+
+# Initialize app stack (create first empty Bottle app)
+# BC: 0.6.4 and needed for run()
+app = default_app = AppStack()
+app.push()


--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/yt/gui/reason/bottle_mods.py	Wed Mar 02 21:36:02 2011 -0500
@@ -0,0 +1,71 @@
+"""
+Modifications and extensions to Bottle, to make it slightly more useful for
+yt's purposes
+
+Author: Matthew Turk <matthewturk at gmail.com>
+Affiliation: NSF / Columbia
+Homepage: http://yt.enzotools.org/
+License:
+  Copyright (C) 2011 Matthew Turk.  All Rights Reserved.
+
+  This file is part of yt.
+
+  yt is free software; you can redistribute it and/or modify
+  it under the terms of the GNU General Public License as published by
+  the Free Software Foundation; either version 3 of the License, or
+  (at your option) any later version.
+
+  This program is distributed in the hope that it will be useful,
+  but WITHOUT ANY WARRANTY; without even the implied warranty of
+  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+  GNU General Public License for more details.
+
+  You should have received a copy of the GNU General Public License
+  along with this program.  If not, see <http://www.gnu.org/licenses/>.
+"""
+
+from .bottle import server_names, debug, route
+import uuid
+
+route_functions = {}
+
+def preroute(future_route, *args, **kwargs):
+    def router(func):
+        route_functions[future_route] = (args, kwargs, func)
+        return func
+    return router
+
+def uuid_serve_functions(pre_routed, open_browser=False):
+    debug(mode=True)
+    token = uuid.uuid1()
+    for r in pre_routed:
+        args, kwargs, f = pre_routed[r]
+        if r[0] == "/": r = r[1:]
+        rp = "/%s/%s" % (token, r)
+        print "Routing from %s => %s" % (rp, f.func_name)
+        route(rp, *args, **kwargs)(f)
+    print "Greetings! Your private token is %s ." % token
+    print
+    print "Please direct your browser to:"
+    print
+    print "     http://localhost:8080/%s/" % token
+    print
+    print
+    if open_browser:
+        # We do some fancy footwork so that we can open the browser while the
+        # server starts up.  I got this from some recipe whose URL escapes me.
+        # Thank you, to whoever wrote it!
+        def local_browse():
+            """Start a browser after waiting for half a second."""
+            import webbrowser, threading
+            def _local_browse():
+                webbrowser.open('http://localhost:%s/%s/' % (8080, token))
+            thread = threading.Timer(0.5, _open_browser)
+            thread.start()
+        local_browse()
+    # Right now we only really support the built-in wsgiref, but this may
+    # change if we start using Rocket.
+    server_type = server_names.get("wsgiref")
+    server = server_type(host='localhost', port=8080)
+    #repl.locals['server'] = server
+    run(server=server)


--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/yt/gui/reason/http_repl.py	Wed Mar 02 21:36:02 2011 -0500
@@ -0,0 +1,81 @@
+"""
+A read-eval-print-loop that is served up through Bottle and accepts its
+commands through HTTP
+
+Author: Matthew Turk <matthewturk at gmail.com>
+Affiliation: NSF / Columbia
+Homepage: http://yt.enzotools.org/
+License:
+  Copyright (C) 2011 Matthew Turk.  All Rights Reserved.
+
+  This file is part of yt.
+
+  yt is free software; you can redistribute it and/or modify
+  it under the terms of the GNU General Public License as published by
+  the Free Software Foundation; either version 3 of the License, or
+  (at your option) any later version.
+
+  This program is distributed in the hope that it will be useful,
+  but WITHOUT ANY WARRANTY; without even the implied warranty of
+  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+  GNU General Public License for more details.
+
+  You should have received a copy of the GNU General Public License
+  along with this program.  If not, see <http://www.gnu.org/licenses/>.
+"""
+
+import json
+import os
+
+from .bottle_mods import preroute
+from .basic_repl import ProgrammaticREPL
+
+class HTTPREPL(ProgrammaticREPL):
+
+    def __init__(self, locals=None):
+        # First we do the standard initialization
+        ProgrammaticREPL.__init__(self, locals)
+        # Now, since we want to only preroute functions we know about, and
+        # since they have different arguments, and most of all because we only
+        # want to add them to the routing tables (which are a singleton for the
+        # entire interpreter state) we apply all the pre-routing now, rather
+        # than through metaclasses or other fancy decorating.
+        preroute_table = dict(index = ("/", "GET"),
+                              push = ("/push", "POST"),
+                              dir = ("/dir", "GET"),
+                              doc = ("/doc", "GET"))
+        for v, args in preroute_table:
+            preroute(args[0], method=args[1])(getattr(self, v))
+
+    def index(self):
+        """Return an HTTP-based Read-Eval-Print-Loop terminal."""
+        # For now this doesn't work!  We will need to move to a better method
+        # for this.
+        return open(os.path.join(localDir, "httprepl.html")).read()
+        
+    def push(self):
+        """Push 'line' and return exec results as a bare response."""
+        line = request.POST['line']
+        result = ProgrammaticREPL.push(self, line)
+        new_values = self.locals.pop("new_values", "")
+        if result is None:
+            # More input lines needed.
+            response.status = 204
+        return json.dumps( dict(text = result, new_values = new_values ))
+
+    def dir(self):
+        """Push 'line' and return result of eval on the final expr."""
+        line = request.GET['line']
+        result = ProgrammaticREPL.dir(self, line)
+        if not result:
+            response.status = 204
+            return
+        return repr(result)
+
+    def doc(self):
+        """Push 'line' and return result of getargspec on the final expr."""
+        line = request.GET['line']
+        result = ProgrammaticREPL.doc(self, line)
+        if not result:
+            response.status = 204
+        return result


--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/yt/gui/reason/setup.py	Wed Mar 02 21:36:02 2011 -0500
@@ -0,0 +1,10 @@
+#!/usr/bin/env python
+import setuptools
+import os, sys, os.path
+
+def configuration(parent_package='',top_path=None):
+    from numpy.distutils.misc_util import Configuration
+    config = Configuration('reason',parent_package,top_path)
+    config.make_config_py() # installs __config__.py
+    config.make_svn_version_py()
+    return config


--- a/yt/gui/reason_v2.py	Wed Mar 02 21:05:10 2011 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,479 +0,0 @@
-"""
-New version of Reason, using a TraitsUI-based approach
-
-Author: Matthew Turk <matthewturk at gmail.com>
-Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
-License:
-  Copyright (C) 2009 Matthew Turk.  All Rights Reserved.
-
-  This file is part of yt.
-
-  yt is free software; you can redistribute it and/or modify
-  it under the terms of the GNU General Public License as published by
-  the Free Software Foundation; either version 3 of the License, or
-  (at your option) any later version.
-
-  This program is distributed in the hope that it will be useful,
-  but WITHOUT ANY WARRANTY; without even the implied warranty of
-  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-  GNU General Public License for more details.
-
-  You should have received a copy of the GNU General Public License
-  along with this program.  If not, see <http://www.gnu.org/licenses/>.
-"""
-
-from yt.mods import *
-from yt.utilities.definitions import \
-    x_dict, \
-    y_dict
-#pf = EnzoStaticOutput("/Users/matthewturk/Research/data/galaxy1200.dir/galaxy1200")
-
-from enthought.traits.api import \
-    HasTraits, List, Instance, Str, Float, Any, Code, PythonValue, Int, CArray, \
-    Property, Enum, cached_property, DelegatesTo, Callable, Array, \
-    Button
-from enthought.traits.ui.api import \
-    Group, VGroup, HGroup, Tabbed, View, Item, ShellEditor, InstanceEditor, ListStrEditor, \
-    ListEditor, VSplit, VFlow, HSplit, VFold, ValueEditor, TreeEditor, TreeNode, RangeEditor, \
-    EnumEditor, Handler, Controller, DNDEditor
-from enthought.traits.ui.menu import \
-    Menu, Action, Separator, OKCancelButtons, OKButton
-from enthought.pyface.action.api import \
-    ActionController
-from enthought.tvtk.pyface.scene_editor import SceneEditor
-from enthought.tvtk.pyface.api import \
-    DecoratedScene
-from enthought.tvtk.pyface.scene_model import SceneModel
-
-from plot_editors import Figure, MPLFigureEditor, MPLVMPlotEditor, Axes
-
-from yt.visualization.plot_types import VMPlot, ProjectionPlot, SlicePlot
-
-import traceback
-from tvtk_interface import \
-    HierarchyImporter, YTScene
-
-class PlotCreationHandler(Controller):
-    main_window = Instance(HasTraits)
-    pnode = Instance(HasTraits)
-
-    format = Str
-    plot_type = Any
-    
-    def close(self, info, is_ok):
-        if not is_ok:
-            super(Controller, self).close(info, True)
-            return
-        spt = self.plot_type(plot_spec=self.model, pf=self.pnode.pf,
-                           name=self.format % (self.model.axis))
-        self.pnode.data_objects.append(spt)
-        self.main_window.plot_frame_tabs.append(spt)
-        spt.plot
-
-class VTKSceneCreationHandler(PlotCreationHandler):
-    importer = Instance(HierarchyImporter)
-
-    def close(self, info, is_ok):
-        if is_ok: 
-            yt_scene = YTScene(importer=self.importer,
-                scene=SceneModel())
-            spt = VTKDataObject(name = "VTK: %s" % self.pnode.pf,
-                    scene=yt_scene.scene,
-                    yt_scene=yt_scene)
-            self.pnode.data_objects.append(spt)
-            self.main_window.plot_frame_tabs.append(spt)
-        super(Controller, self).close(info, True)
-        return True
-
-
-class DataObject(HasTraits):
-    name = Str
-
-class VTKDataObject(DataObject):
-    yt_scene = Instance(YTScene)
-    scene = DelegatesTo("yt_scene")
-    add_contours = Button
-    add_isocontour = Button
-    add_x_plane = Button
-    add_y_plane = Button
-    add_z_plane = Button
-    edit_camera = Button
-    edit_operators = Button
-    edit_pipeline = Button
-    center_on_max = Button
-    operators = DelegatesTo("yt_scene")
-    traits_view = View(
-            Item("scene", editor = 
-        SceneEditor(scene_class=DecoratedScene),
-                    resizable=True, show_label=False),
-            HGroup(Item("add_contours", show_label=False),
-                   Item("add_isocontour", show_label=False),
-                   Item("add_x_plane", show_label=False),
-                   Item("add_y_plane", show_label=False),
-                   Item("add_z_plane", show_label=False),
-                   Item("edit_camera", show_label=False),
-                   Item("edit_operators", show_label=False),
-                   Item("edit_pipeline", show_label=False),
-                   Item("center_on_max", show_label=False),
-                ),
-            )
-
-    operators_edit = View(
-        Item("operators", style='custom', show_label=False,
-             editor=ListEditor(editor=InstanceEditor(),
-                               use_notebook=True),
-              name="Edit Operators"),
-        height=500.0, width=500.0, resizable=True)
-    
-    def _edit_camera_fired(self):
-        self.yt_scene.camera_path.edit_traits()
-
-    def _edit_operators_fired(self):
-        self.edit_traits(view='operators_edit')
-
-    def _edit_pipeline_fired(self):
-        from enthought.tvtk.pipeline.browser import PipelineBrowser
-        pb = PipelineBrowser(self.scene)
-        pb.show()
-
-    def _add_contours_fired(self):
-        self.yt_scene.add_contour()
-
-    def _add_isocontour_fired(self):
-        self.yt_scene.add_isocontour()
-
-    def _add_x_plane_fired(self):
-        self.yt_scene.add_x_plane()
-
-    def _add_y_plane_fired(self):
-        self.yt_scene.add_y_plane()
-
-    def _add_z_plane_fired(self):
-        self.yt_scene.add_z_plane()
-
-    def _center_on_max_fired(self):
-        self.yt_scene.do_center_on_max()
-
-class ParameterFile(HasTraits):
-    pf = Instance(EnzoStaticOutput)
-    data_objects = List(Instance(DataObject))
-    name = Str
-
-    def _name_default(self):
-        return str(self.pf)
-
-    def do_slice(self):
-        cons_view = View(
-                Item('axis'), 
-                Item('center'), 
-                Item('field', editor=EnumEditor(name='field_list')),
-                buttons=OKCancelButtons, title="Slicer: %s" % self.pf)
-        ps = SlicePlotSpec(pf=self.pf)
-        hand = PlotCreationHandler(main_window=mw, pnode=self, model=ps,
-                                   plot_type=SlicePlotTab, format="Slice: %s")
-        ps.edit_traits(cons_view, handler=hand)
-
-    def do_proj(self):
-        cons_view = View(
-                Item('axis'), 
-                Item('field', editor=EnumEditor(name='field_list')),
-                Item('weight_field', editor=EnumEditor(name='none_field_list')),
-                buttons=OKCancelButtons, title="Projector: %s" % self.pf)
-        ps = ProjPlotSpec(pf=self.pf)
-        hand = PlotCreationHandler(main_window=mw, pnode=self, model=ps,
-                                   plot_type=ProjPlotTab, format="Proj: %s")
-        ps.edit_traits(cons_view, handler=hand)
-
-    def do_vtk(self):
-        from tvtk_interface import HierarchyImporter, \
-            HierarchyImportHandler
-        importer = HierarchyImporter(pf=self.pf, max_level=self.pf.h.max_level)
-        importer.edit_traits(handler = VTKSceneCreationHandler(
-            main_window=mw, pnode=self, importer = importer))
-
-class ParameterFileCollection(HasTraits):
-    parameter_files = List(Instance(ParameterFile))
-    name = Str
-    collection = Any
-
-    def _parameter_files_default(self):
-        my_list = []
-        for f in self.collection:
-            try:
-                pf = EnzoStaticOutput(f)
-                my_list.append(
-                    ParameterFile(pf=pf, 
-                            data_objects = []))
-            except IOError: pass
-        return my_list
-
-    def _name_default(self):
-        return str(self.collection)
-
-class ParameterFileCollectionList(HasTraits):
-    parameter_file_collections = List(Instance(ParameterFileCollection))
-
-    def _parameter_file_collections_default(self):
-        return [ParameterFileCollection(collection=c)
-                for c in fido.GrabCollections()]
-
-class DataObjectList(HasTraits):
-    data_objects = List(Str)
-
-    traits_view = View(
-              Item('data_objects', show_label=False,
-                   editor=ListStrEditor())
-               )
-
-    def _data_objects_default(self):
-        return ['a','b','c']
-
-class PlotFrameTab(DataObject):
-    figure = Instance(Figure)
-
-class VMPlotSpec(HasTraits):
-    pf = Instance(EnzoStaticOutput)
-    field = Str('Density')
-    field_list = Property(depends_on = 'pf')
-
-    center = Array(shape=(3,), dtype='float64')
-    axis = Enum(0,1,2)
-
-    @cached_property
-    def _get_field_list(self):
-        fl = self.pf.h.field_list
-        df = self.pf.h.derived_field_list
-        fl.sort(); df.sort()
-        return fl + df
-
-    def _center_default(self):
-        return self.pf.h.find_max("Density")[1]
-
-class SlicePlotSpec(VMPlotSpec):
-    pass
-
-class ProjPlotSpec(VMPlotSpec):
-    weight_field = Str("None")
-    none_field_list = Property(depends_on = 'field_list')
-
-    @cached_property
-    def _get_none_field_list(self):
-        return ["None"] + self.field_list
-
-class VMPlotTab(PlotFrameTab):
-    pf = Instance(EnzoStaticOutput)
-    figure = Instance(Figure, args=())
-    field = DelegatesTo('plot_spec')
-    field_list = DelegatesTo('plot_spec')
-    plot = Instance(VMPlot)
-    axes = Instance(Axes)
-    disp_width = Float(1.0)
-    unit = Str('unitary')
-    min_width = Property(Float, depends_on=['pf','unit'])
-    max_width = Property(Float, depends_on=['pf','unit'])
-    unit_list = Property(depends_on = 'pf')
-    smallest_dx = Property(depends_on = 'pf')
-
-    traits_view = View(VGroup(
-            HGroup(Item('figure', editor=MPLVMPlotEditor(),
-                     show_label=False)),
-            HGroup(Item('disp_width',
-                     editor=RangeEditor(format="%0.2e",
-                        low_name='min_width', high_name='max_width',
-                        mode='logslider', enter_set=True),
-                     show_label=False, width=400.0),
-                   Item('unit',
-                      editor=EnumEditor(name='unit_list')),),
-            HGroup(Item('field',
-                      editor=EnumEditor(name='field_list')),
-                )),
-             resizable=True)
-
-    def __init__(self, **traits):
-        super(VMPlotTab, self).__init__(**traits)
-        self.axes = self.figure.add_subplot(111, aspect='equal')
-
-    def _field_changed(self, old, new):
-        self.plot.switch_z(new)
-        self._redraw()
-
-    @cached_property
-    def _get_min_width(self):
-        return 50.0*self.smallest_dx*self.pf[self.unit]
-
-    @cached_property
-    def _get_max_width(self):
-        return self.pf['unitary']*self.pf[self.unit]
-
-    @cached_property
-    def _get_smallest_dx(self):
-        return self.pf.h.get_smallest_dx()
-
-    @cached_property
-    def _get_unit_list(self):
-        return self.pf.units.keys()
-
-    def _unit_changed(self, old, new):
-        self.disp_width = self.disp_width * self.pf[new]/self.pf[old]
-
-    def _disp_width_changed(self, old, new):
-        self.plot.set_width(new, self.unit)
-        self._redraw()
-
-    def _redraw(self):
-        self.figure.canvas.draw()
-
-    def recenter(self, event):
-        xp, yp = event.xdata, event.ydata
-        dx = abs(self.plot.xlim[0] - self.plot.xlim[1])/self.plot.pix[0]
-        dy = abs(self.plot.ylim[0] - self.plot.ylim[1])/self.plot.pix[1]
-        x = (dx * xp) + self.plot.xlim[0]
-        y = (dy * yp) + self.plot.ylim[0]
-        xi = x_dict[self.axis]
-        yi = y_dict[self.axis]
-        cc = self.center[:]
-        cc[xi] = x; cc[yi] = y
-        self.plot.data.center = cc[:]
-        self.plot.data.set_field_parameter('center', cc.copy())
-        self.center = cc
-
-class SlicePlotTab(VMPlotTab):
-    plot_spec = Instance(SlicePlotSpec)
-
-    axis = DelegatesTo('plot_spec')
-    center = DelegatesTo('plot_spec')
-    
-    plot = Instance(SlicePlot)
-
-    def _plot_default(self):
-        coord = self.center[self.axis]
-        sl = self.pf.h.slice(self.axis, coord, center=self.center[:])
-        sp = SlicePlot(sl, self.field, self.figure, self.axes)
-        self.figure.canvas.draw()
-        return sp
-
-    def _center_changed(self, old, new):
-        #traceback.print_stack()
-        if na.all(na.abs(old - new) == 0.0): return
-        print na.abs(old-new)
-        print "Re-slicing", old, new
-        pp = self.center
-        self.plot.data.reslice(pp[self.axis])
-        self.plot._refresh_display_width()
-        self.figure.canvas.draw()
-
-class ProjPlotTab(VMPlotTab):
-    plot_spec = Instance(ProjPlotSpec)
-
-    axis = DelegatesTo('plot_spec')
-    center = DelegatesTo('plot_spec')
-    weight_field = DelegatesTo('plot_spec')
-
-    plot = Instance(ProjectionPlot)
-
-    def _plot_default(self):
-        self.field = self.field[:]
-        self.weight_field = self.weight_field[:]
-        wf = self.weight_field
-        if str(wf) == "None": wf = None
-        proj = self.pf.h.proj(self.axis, self.field, wf,
-                        center=self.center[:])
-        pp = ProjectionPlot(proj, self.field, self.figure, self.axes)
-        self.figure.canvas.draw()
-        return pp
-
-    def _center_changed(self, old, new):
-        self.plot._refresh_display_width()
-
-class SphereWrapper(DataObject):
-    radius = Float
-    unit = Str
-
-class MainWindow(HasTraits):
-    parameter_file_collections = Instance(ParameterFileCollectionList)
-    parameter_files = Instance(ParameterFileCollection)
-    plot_frame_tabs = List(Instance(DataObject))
-    open_parameterfile = Button
-    shell = PythonValue
-
-    def _shell_default(self):
-        return globals()
-    notebook_editor = ListEditor(editor=InstanceEditor(editable=True),
-                                 use_notebook=True)
-
-    traits_view = View(VSplit(
-                    HSplit(VGroup(
-                       Item('parameter_file_collections', 
-                            width=120.0, height=500.0,
-                            show_label=False,
-                            editor = TreeEditor(editable=False,
-                    nodes=[
-                        TreeNode(node_for=[ParameterFileCollectionList],
-                                 children='parameter_file_collections',
-                                 label="=Data Collections"),
-                        TreeNode(node_for=[ParameterFileCollection],
-                                 children='parameter_files',
-                                 label="name",
-                                 view=View()),
-                        TreeNode(node_for=[ParameterFile],
-                                 children='data_objects',
-                                 label="name",
-                                 menu = Menu(Action(name='Slice',
-                                                    action='object.do_slice'),
-                                             Action(name='Project',
-                                                    action='object.do_proj'),
-                                             Action(name='VTK',
-                                                    action='object.do_vtk')),
-                                 view=View()),
-                        TreeNode(node_for=[DataObject],
-                                 children='',
-                                 label="name"),
-                                ], show_icons=False),),
-                        Item('open_parameterfile', show_label=False)),
-                       Item('plot_frame_tabs', style='custom',
-                            editor = notebook_editor,
-                            show_label=False, height=500.0, width=500.0),
-                    ),
-                    HGroup(
-                       #Item('shell', editor=ShellEditor(share=True),
-                            #show_label=False, height=120.0),
-                    ),
-                ),
-               resizable=True, width=800.0, height=660.0,
-               title="reason v2 [prototype]")
-
-    def _open_parameterfile_fired(self):
-        print "OPENING"
-
-    def _parameter_file_collections_default(self):
-        return ParameterFileCollectionList()
-
-class YTScript(HasTraits):
-    code = Code
-    traits_view = View(Item('code', show_label=False),
-                       height=0.8, width=0.8, resizable=True,
-                       buttons=OKCancelButtons)
-
-class ObjectViewer(HasTraits):
-    to_view=Any
-    traits_view = View(
-            Item('to_view', editor=ValueEditor(), show_label=False),
-                     resizable=True, height=0.8, width=0.8)
-
-def view_object(obj):
-    ObjectViewer(to_view=obj).edit_traits()
-
-def run_script():
-    my_script = YTScript()
-    my_script.edit_traits()
-    return my_script
-
-class event_mock(object):
-    inaxes = True
-    button = 3
-
-dol = DataObjectList()
-mw = MainWindow(plot_frame_tabs = [])
-mw.edit_traits()
-#mw.edit_traits()


--- a/yt/gui/tvtk_interface.py	Wed Mar 02 21:05:10 2011 -0500
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,692 +0,0 @@
-"""
-This is the preliminary interface to VTK.  Note that as of VTK 5.2, it still
-requires a patchset prepared here:
-http://yt.enzotools.org/files/vtk_composite_data.zip
-
-Author: Matthew Turk <matthewturk at gmail.com>
-Affiliation: KIPAC/SLAC/Stanford
-Homepage: http://yt.enzotools.org/
-License:
-  Copyright (C) 2007-2009 Matthew Turk.  All Rights Reserved.
-
-  This file is part of yt.
-
-  yt is free software; you can redistribute it and/or modify
-  it under the terms of the GNU General Public License as published by
-  the Free Software Foundation; either version 3 of the License, or
-  (at your option) any later version.
-
-  This program is distributed in the hope that it will be useful,
-  but WITHOUT ANY WARRANTY; without even the implied warranty of
-  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-  GNU General Public License for more details.
-
-  You should have received a copy of the GNU General Public License
-  along with this program.  If not, see <http://www.gnu.org/licenses/>.
-"""
-
-from enthought.tvtk.tools import ivtk
-from enthought.tvtk.api import tvtk 
-from enthought.traits.api import \
-    Float, HasTraits, Instance, Range, Any, Delegate, Tuple, File, Int, Str, \
-    CArray, List, Button, Bool, Property, cached_property
-from enthought.traits.ui.api import View, Item, HGroup, VGroup, TableEditor, \
-    Handler, Controller, RangeEditor, EnumEditor, InstanceEditor
-from enthought.traits.ui.menu import \
-    Menu, Action, Separator, OKCancelButtons, OKButton
-from enthought.traits.ui.table_column import ObjectColumn
-from enthought.tvtk.pyface.api import DecoratedScene
-
-import enthought.pyface.api as pyface
-
-#from yt.reason import *
-import sys
-import numpy as na
-import time, pickle, os, os.path
-from yt.funcs import *
-from yt.analysis_modules.hierarchy_subset.api import \
-        ExtractedHierarchy, ExtractedParameterFile
-
-#from enthought.tvtk.pyface.ui.wx.wxVTKRenderWindowInteractor \
-     #import wxVTKRenderWindowInteractor
-
-from enthought.mayavi.core.lut_manager import LUTManager
-
-#wxVTKRenderWindowInteractor.USE_STEREO = 1
-
-class TVTKMapperWidget(HasTraits):
-    alpha = Float(1.0)
-    post_call = Any
-    lut_manager = Instance(LUTManager)
-
-    def _alpha_changed(self, old, new):
-        self.lut_manager.lut.alpha_range = (new, new)
-        self.post_call()
-
-class MappingPlane(TVTKMapperWidget):
-    plane = Instance(tvtk.Plane)
-    _coord_redit = editor=RangeEditor(format="%0.2e",
-                              low_name='vmin', high_name='vmax',
-                              auto_set=False, enter_set=True)
-    auto_set = Bool(False)
-    traits_view = View(Item('coord', editor=_coord_redit),
-                       Item('auto_set'),
-                       Item('alpha', editor=RangeEditor(
-                              low=0.0, high=1.0,
-                              enter_set=True, auto_set=False)),
-                       Item('lut_manager', show_label=False,
-                            editor=InstanceEditor(), style='custom'))
-    vmin = Float
-    vmax = Float
-
-    def _auto_set_changed(self, old, new):
-        if new is True:
-            self._coord_redit.auto_set = True
-            self._coord_redit.enter_set = False
-        else:
-            self._coord_redit.auto_set = False
-            self._coord_redit.enter_set = True
-
-    def __init__(self, vmin, vmax, vdefault, **traits):
-        HasTraits.__init__(self, **traits)
-        self.vmin = vmin
-        self.vmax = vmax
-        trait = Range(float(vmin), float(vmax), value=vdefault)
-        self.add_trait("coord", trait)
-        self.coord = vdefault
-
-    def _coord_changed(self, old, new):
-        orig = self.plane.origin[:]
-        orig[self.axis] = new
-        self.plane.origin = orig
-        self.post_call()
-
-class MappingMarchingCubes(TVTKMapperWidget):
-    operator = Instance(tvtk.MarchingCubes)
-    mapper = Instance(tvtk.HierarchicalPolyDataMapper)
-    vmin = Float
-    vmax = Float
-    auto_set = Bool(False)
-    _val_redit = RangeEditor(format="%0.2f",
-                             low_name='vmin', high_name='vmax',
-                             auto_set=False, enter_set=True)
-    traits_view = View(Item('value', editor=_val_redit),
-                       Item('auto_set'),
-                       Item('alpha', editor=RangeEditor(
-                            low=0.0, high=1.0,
-                            enter_set=True, auto_set=False,)),
-                       Item('lut_manager', show_label=False,
-                            editor=InstanceEditor(), style='custom'))
-
-    def __init__(self, vmin, vmax, vdefault, **traits):
-        HasTraits.__init__(self, **traits)
-        self.vmin = vmin
-        self.vmax = vmax
-        trait = Range(float(vmin), float(vmax), value=vdefault)
-        self.add_trait("value", trait)
-        self.value = vdefault
-
-    def _auto_set_changed(self, old, new):
-        if new is True:
-            self._val_redit.auto_set = True
-            self._val_redit.enter_set = False
-        else:
-            self._val_redit.auto_set = False
-            self._val_redit.enter_set = True
-
-    def _value_changed(self, old, new):
-        self.operator.set_value(0, new)
-        self.post_call()
-
-class MappingIsoContour(MappingMarchingCubes):
-    operator = Instance(tvtk.ContourFilter)
-
-class CameraPosition(HasTraits):
-    position = CArray(shape=(3,), dtype='float64')
-    focal_point = CArray(shape=(3,), dtype='float64')
-    view_up = CArray(shape=(3,), dtype='float64')
-    clipping_range = CArray(shape=(2,), dtype='float64')
-    distance = Float
-    num_steps = Int(10)
-    orientation_wxyz = CArray(shape=(4,), dtype='float64')
-
-class CameraControl(HasTraits):
-    # Traits
-    positions = List(CameraPosition)
-    yt_scene = Instance('YTScene')
-    center = Delegate('yt_scene')
-    scene = Delegate('yt_scene')
-    camera = Instance(tvtk.OpenGLCamera)
-    reset_position = Instance(CameraPosition)
-    fps = Float(25.0)
-    export_filename = 'frames'
-    periodic = Bool
-
-    # UI elements
-    snapshot = Button()
-    play = Button()
-    export_frames = Button()
-    reset_path = Button()
-    recenter = Button()
-    save_path = Button()
-    load_path = Button()
-    export_path = Button()
-
-    table_def = TableEditor(
-        columns = [ ObjectColumn(name='position'),
-                    ObjectColumn(name='focal_point'),
-                    ObjectColumn(name='view_up'),
-                    ObjectColumn(name='clipping_range'),
-                    ObjectColumn(name='num_steps') ],
-        reorderable=True, deletable=True,
-        sortable=True, sort_model=True,
-        show_toolbar=True,
-        selection_mode='row',
-        selected = 'reset_position'
-                )
-
-    default_view = View(
-                VGroup(
-                  HGroup(
-                    Item('camera', show_label=False),
-                    Item('recenter', show_label=False),
-                    label='Camera'),
-                  HGroup(
-                    Item('snapshot', show_label=False),
-                    Item('play', show_label=False),
-                    Item('export_frames',show_label=False),
-                    Item('reset_path', show_label=False),
-                    Item('save_path', show_label=False),
-                    Item('load_path', show_label=False),
-                    Item('export_path', show_label=False),
-                    Item('export_filename'),
-                    Item('periodic'),
-                    Item('fps'),
-                    label='Playback'),
-                  VGroup(
-                    Item('positions', show_label=False,
-                        editor=table_def),
-                    label='Camera Path'),
-                 ),
-                resizable=True, title="Camera Path Editor",
-                       )
-
-    def _reset_position_changed(self, old, new):
-        if new is None: return
-        cam = self.scene.camera
-        cam.position = new.position
-        cam.focal_point = new.focal_point
-        cam.view_up = new.view_up
-        cam.clipping_range = new.clipping_range
-        self.scene.render()
-
-    def __init__(self, **traits):
-        HasTraits.__init__(self, **traits)
-
-    def take_snapshot(self):
-        cam = self.scene.camera
-        self.positions.append(CameraPosition(
-                position=cam.position,
-                focal_point=cam.focal_point,
-                view_up=cam.view_up,
-                clipping_range=cam.clipping_range,
-                distance=cam.distance,
-                orientation_wxyz=cam.orientation_wxyz))
-
-    def _export_path_fired(self): 
-        dlg = pyface.FileDialog(
-            action='save as',
-            wildcard="*.cpath",
-        )
-        if dlg.open() == pyface.OK:
-            print "Saving:", dlg.path
-            self.export_camera_path(dlg.path)
-
-    def export_camera_path(self, fn):
-        to_dump = dict(positions=[], focal_points=[],
-                       view_ups=[], clipping_ranges=[],
-                       distances=[], orientation_wxyzs=[])
-        def _write(cam):
-            to_dump['positions'].append(cam.position)
-            to_dump['focal_points'].append(cam.focal_point)
-            to_dump['view_ups'].append(cam.view_up)
-            to_dump['clipping_ranges'].append(cam.clipping_range)
-            to_dump['distances'].append(cam.distance)
-            to_dump['orientation_wxyzs'].append(cam.orientation_wxyz)
-        self.step_through(0.0, callback=_write)
-        pickle.dump(to_dump, open(fn, "wb"))
-
-    def _save_path_fired(self): 
-        dlg = pyface.FileDialog(
-            action='save as',
-            wildcard="*.cpath",
-        )
-        if dlg.open() == pyface.OK:
-            print "Saving:", dlg.path
-            self.dump_camera_path(dlg.path)
-
-    def dump_camera_path(self, fn):
-        to_dump = dict(positions=[], focal_points=[],
-                       view_ups=[], clipping_ranges=[],
-                       distances=[], orientation_wxyzs=[],
-                       num_stepss=[])
-        for p in self.positions:
-            to_dump['positions'].append(p.position)
-            to_dump['focal_points'].append(p.focal_point)
-            to_dump['view_ups'].append(p.view_up)
-            to_dump['clipping_ranges'].append(p.clipping_range)
-            to_dump['distances'].append(p.distance)
-            to_dump['num_stepss'].append(p.num_steps) # stupid s
-            to_dump['orientation_wxyzs'].append(p.orientation_wxyz)
-        pickle.dump(to_dump, open(fn, "wb"))
-
-    def _load_path_fired(self):
-        dlg = pyface.FileDialog(
-            action='open',
-            wildcard="*.cpath",
-        )
-        if dlg.open() == pyface.OK:
-            print "Loading:", dlg.path
-            self.load_camera_path(dlg.path)
-
-    def load_camera_path(self, fn):
-        to_use = pickle.load(open(fn, "rb"))
-        self.positions = []
-        for i in range(len(to_use['positions'])):
-            dd = {}
-            for kw in to_use:
-                # Strip the s
-                dd[kw[:-1]] = to_use[kw][i]
-            self.positions.append(
-                CameraPosition(**dd))
-
-    def _recenter_fired(self):
-        self.camera.focal_point = self.center
-        self.scene.render()
-
-    def _snapshot_fired(self):
-        self.take_snapshot()
-
-    def _play_fired(self):
-        self.step_through()
-
-    def _export_frames_fired(self):
-        self.step_through(save_frames=True)
-
-    def _reset_path_fired(self):
-        self.positions = []
-
-    def step_through(self, pause = 1.0, callback=None, save_frames=False):
-        cam = self.scene.camera
-        frame_counter=0
-        if self.periodic:
-            cyclic_pos = self.positions + [self.positions[0]]
-        else:
-            cyclic_pos = self.positions
-        for i in range(len(cyclic_pos)-1):
-            pos1 = cyclic_pos[i]
-            pos2 = cyclic_pos[i+1]
-            r = pos1.num_steps
-            for p in range(pos1.num_steps):
-                po = _interpolate(pos1.position, pos2.position, p, r)
-                fp = _interpolate(pos1.focal_point, pos2.focal_point, p, r)
-                vu = _interpolate(pos1.view_up, pos2.view_up, p, r)
-                cr = _interpolate(pos1.clipping_range, pos2.clipping_range, p, r)
-                _set_cpos(cam, po, fp, vu, cr)
-                self.scene.render()
-                if callback is not None: callback(cam)
-                if save_frames:
-                    self.scene.save("%s_%0.5d.png" % (self.export_filename,frame_counter))
-                else:
-                    time.sleep(pause * 1.0/self.fps)
-                frame_counter += 1
-
-def _interpolate(q1, q2, p, r):
-    return q1 + p*(q2 - q1)/float(r)
-
-def _set_cpos(cam, po, fp, vu, cr):
-    cam.position = po
-    cam.focal_point = fp
-    cam.view_up = vu
-    cam.clipping_range = cr
-
-class HierarchyImporter(HasTraits):
-    pf = Any
-    min_grid_level = Int(0)
-    max_level = Int(1)
-    number_of_levels = Range(0, 13)
-    max_import_levels = Property(depends_on='min_grid_level')
-    field = Str("Density")
-    field_list = List
-    center_on_max = Bool(True)
-    center = CArray(shape = (3,), dtype = 'float64')
-    cache = Bool(True)
-    smoothed = Bool(True)
-    show_grids = Bool(True)
-
-    def _field_list_default(self):
-        fl = self.pf.h.field_list
-        df = self.pf.h.derived_field_list
-        fl.sort(); df.sort()
-        return fl + df
-    
-    default_view = View(Item('min_grid_level',
-                              editor=RangeEditor(low=0,
-                                                 high_name='max_level')),
-                        Item('number_of_levels', 
-                              editor=RangeEditor(low=1,
-                                                 high_name='max_import_levels')),
-                        Item('field', editor=EnumEditor(name='field_list')),
-                        Item('center_on_max'),
-                        Item('center', enabled_when='not object.center_on_max'),
-                        Item('smoothed'),
-                        Item('cache', label='Pre-load data'),
-                        Item('show_grids'),
-                        buttons=OKCancelButtons)
-
-    def _center_default(self):
-        return [0.5,0.5,0.5]
-
-    @cached_property
-    def _get_max_import_levels(self):
-        return min(13, self.pf.h.max_level - self.min_grid_level + 1)
-
-class HierarchyImportHandler(Controller):
-    importer = Instance(HierarchyImporter)
-    
-
-    def close(self, info, is_ok):
-        if is_ok: 
-            yt_scene = YTScene(
-                importer=self.importer)
-        super(Controller, self).close(info, True)
-        return
-
-
-class YTScene(HasTraits):
-
-    # Traits
-    importer = Instance(HierarchyImporter)
-    pf = Delegate("importer")
-    min_grid_level = Delegate("importer")
-    number_of_levels = Delegate("importer")
-    field = Delegate("importer")
-    center = CArray(shape = (3,), dtype = 'float64')
-    center_on_max = Delegate("importer")
-    smoothed = Delegate("importer")
-    cache = Delegate("importer")
-    show_grids = Delegate("importer")
-
-    camera_path = Instance(CameraControl)
-    #window = Instance(ivtk.IVTKWithCrustAndBrowser)
-    #python_shell = Delegate('window')
-    #scene = Delegate('window')
-    scene = Instance(HasTraits)
-    operators = List(HasTraits)
-
-    # State variables
-    _grid_boundaries_actor = None
-
-    # Views
-    def _window_default(self):
-        # Should experiment with passing in a pipeline browser
-        # that has two root objects -- one for TVTKBases, i.e. the render
-        # window, and one that accepts our objects
-        return ivtk.IVTKWithCrustAndBrowser(size=(800,600), stereo=1)
-
-    def _camera_path_default(self):
-        return CameraControl(yt_scene=self, camera=self.scene.camera)
-
-    def __init__(self, **traits):
-        HasTraits.__init__(self, **traits)
-        max_level = min(self.pf.h.max_level,
-                        self.min_grid_level + self.number_of_levels - 1)
-        self.extracted_pf = ExtractedParameterFile(self.pf,
-                             self.min_grid_level, max_level, offset=None)
-        self.extracted_hierarchy = self.extracted_pf.h
-        self._hdata_set = tvtk.HierarchicalBoxDataSet()
-        self._ugs = []
-        self._grids = []
-        self._min_val = 1e60
-        self._max_val = -1e60
-        gid = 0
-        if self.cache:
-            for grid_set in self.extracted_hierarchy.get_levels():
-                for grid in grid_set:
-                    grid[self.field]
-        for l, grid_set in enumerate(self.extracted_hierarchy.get_levels()):
-            gid = self._add_level(grid_set, l, gid)
-        if self.show_grids:
-            self.toggle_grid_boundaries()
-            
-    def _center_default(self):
-        return self.extracted_hierarchy._convert_coords(
-                [0.5, 0.5, 0.5])
-
-    def do_center_on_max(self):
-        self.center = self.extracted_hierarchy._convert_coords(
-            self.pf.h.find_max("Density")[1])
-        self.scene.camera.focal_point = self.center
-
-    def _add_level(self, grid_set, level, gid):
-        for grid in grid_set:
-            self._hdata_set.set_refinement_ratio(level, 2)
-            gid = self._add_grid(grid, gid, level)
-        return gid
-
-    def _add_grid(self, grid, gid, level=0):
-        mylog.debug("Adding grid %s on level %s (%s)",
-                    grid.id, level, grid.Level)
-        if grid in self._grids: return
-        self._grids.append(grid)
-
-        scalars = grid.get_vertex_centered_data(self.field, smoothed=self.smoothed)
-
-        left_index = grid.get_global_startindex()
-        origin = grid.LeftEdge
-        dds = grid.dds
-        right_index = left_index + scalars.shape - 1
-        ug = tvtk.UniformGrid(origin=origin, spacing=dds,
-                              dimensions=grid.ActiveDimensions+1)
-        if self.field not in self.pf.field_info or \
-            self.pf.field_info[self.field].take_log:
-            scalars = na.log10(scalars)
-        ug.point_data.scalars = scalars.transpose().ravel()
-        ug.point_data.scalars.name = self.field
-        if grid.Level != self.min_grid_level + self.number_of_levels - 1:
-            ug.cell_visibility_array = grid.child_mask.transpose().ravel()
-        else:
-            ug.cell_visibility_array = na.ones(
-                    grid.ActiveDimensions, dtype='int').ravel()
-        self._ugs.append((grid,ug))
-        self._hdata_set.set_data_set(level, gid, left_index, right_index, ug)
-
-        self._min_val = min(self._min_val, scalars.min())
-        self._max_val = max(self._max_val, scalars.max())
-
-        gid += 1
-        return gid
-
-    def _add_data_to_ug(self, field):
-        for g, ug in self._ugs:
-            scalars_temp = grid.get_vertex_centered_data(field, smoothed=self.smoothed)
-            ii = ug.point_data.add_array(scalars_temp.transpose().ravel())
-            ug.point_data.get_array(ii).name = field
-
-    def zoom(self, dist, unit='1'):
-        vec = self.scene.camera.focal_point - \
-              self.scene.camera.position
-        self.scene.camera.position += \
-            vec * dist/self._grids[0].pf[unit]
-        self.scene.render()
-
-    def toggle_grid_boundaries(self):
-        if self._grid_boundaries_actor is None:
-            # We don't need to track this stuff right now.
-            ocf = tvtk.OutlineCornerFilter(
-                    executive=tvtk.CompositeDataPipeline(),
-                    corner_factor = 0.5)
-            ocf.input = self._hdata_set
-            ocm = tvtk.HierarchicalPolyDataMapper(
-                input_connection = ocf.output_port)
-            self._grid_boundaries_actor = tvtk.Actor(mapper = ocm)
-            self.scene.add_actor(self._grid_boundaries_actor)
-        else:
-            self._grid_boundaries_actor.visibility = \
-            (not self._grid_boundaries_actor.visibility)
-
-    def _add_sphere(self, origin=(0.0,0.0,0.0), normal=(0,1,0)):
-        sphere = tvtk.Sphere(center=origin, radius=0.25)
-        cutter = tvtk.Cutter(executive = tvtk.CompositeDataPipeline(),
-                             cut_function = sphere)
-        cutter.input = self._hdata_set
-        lut_manager = LUTManager(data_name=self.field, scene=self.scene)
-        smap = tvtk.HierarchicalPolyDataMapper(
-                        scalar_range=(self._min_val, self._max_val),
-                        lookup_table=lut_manager.lut,
-                        input_connection = cutter.output_port)
-        sactor = tvtk.Actor(mapper=smap)
-        self.scene.add_actors(sactor)
-        return sphere, lut_manager
-
-    def _add_plane(self, origin=(0.0,0.0,0.0), normal=(0,1,0)):
-        plane = tvtk.Plane(origin=origin, normal=normal)
-        cutter = tvtk.Cutter(executive = tvtk.CompositeDataPipeline(),
-                             cut_function = plane)
-        cutter.input = self._hdata_set
-        lut_manager = LUTManager(data_name=self.field, scene=self.scene)
-        smap = tvtk.HierarchicalPolyDataMapper(
-                        scalar_range=(self._min_val, self._max_val),
-                        lookup_table=lut_manager.lut,
-                        input_connection = cutter.output_port)
-        sactor = tvtk.Actor(mapper=smap)
-        self.scene.add_actors(sactor)
-        return plane, lut_manager
-
-    def add_plane(self, origin=(0.0,0.0,0.0), normal=(0,1,0)):
-        self.operators.append(self._add_plane(origin, normal))
-        return self.operators[-1]
-
-    def _add_axis_plane(self, axis):
-        normal = [0,0,0]
-        normal[axis] = 1
-        np, lut_manager = self._add_plane(self.center, normal=normal)
-        LE = self.extracted_hierarchy.min_left_edge
-        RE = self.extracted_hierarchy.max_right_edge
-        self.operators.append(MappingPlane(
-                vmin=LE[axis], vmax=RE[axis],
-                vdefault = self.center[axis],
-                post_call = self.scene.render,
-                plane = np, axis=axis, coord=0.0,
-                lut_manager = lut_manager,
-                scene=self.scene))
-
-    def add_x_plane(self):
-        self._add_axis_plane(0)
-        return self.operators[-1]
-
-    def add_y_plane(self):
-        self._add_axis_plane(1)
-        return self.operators[-1]
-
-    def add_z_plane(self):
-        self._add_axis_plane(2)
-        return self.operators[-1]
-
-    def add_contour(self, val=None):
-        if val is None: 
-            if self._min_val != self._min_val:
-                self._min_val = 1.0
-            val = (self._max_val+self._min_val) * 0.5
-        cubes = tvtk.MarchingCubes(
-                    executive = tvtk.CompositeDataPipeline())
-        cubes.input = self._hdata_set
-        cubes.set_value(0, val)
-        lut_manager = LUTManager(data_name=self.field, scene=self.scene)
-        cube_mapper = tvtk.HierarchicalPolyDataMapper(
-                                input_connection = cubes.output_port,
-                                lookup_table=lut_manager.lut)
-        cube_mapper.color_mode = 'map_scalars'
-        cube_mapper.scalar_range = (self._min_val, self._max_val)
-        cube_actor = tvtk.Actor(mapper=cube_mapper)
-        self.scene.add_actors(cube_actor)
-        self.operators.append(MappingMarchingCubes(operator=cubes,
-                    vmin=self._min_val, vmax=self._max_val,
-                    vdefault=val,
-                    mapper = cube_mapper,
-                    post_call = self.scene.render,
-                    lut_manager = lut_manager,
-                    scene=self.scene))
-        return self.operators[-1]
-
-    def add_isocontour(self, val=None):
-        if val is None: val = (self._max_val+self._min_val) * 0.5
-        isocontour = tvtk.ContourFilter(
-                    executive = tvtk.CompositeDataPipeline())
-        isocontour.input = self._hdata_set
-        isocontour.generate_values(1, (val, val))
-        lut_manager = LUTManager(data_name=self.field, scene=self.scene)
-        isocontour_normals = tvtk.PolyDataNormals(
-            executive=tvtk.CompositeDataPipeline())
-        isocontour_normals.input_connection = isocontour.output_port
-        iso_mapper = tvtk.HierarchicalPolyDataMapper(
-                                input_connection = isocontour_normals.output_port,
-                                lookup_table=lut_manager.lut)
-        iso_mapper.scalar_range = (self._min_val, self._max_val)
-        iso_actor = tvtk.Actor(mapper=iso_mapper)
-        self.scene.add_actors(iso_actor)
-        self.operators.append(MappingIsoContour(operator=isocontour,
-                    vmin=self._min_val, vmax=self._max_val,
-                    vdefault=val,
-                    mapper = iso_mapper,
-                    post_call = self.scene.render,
-                    lut_manager = lut_manager,
-                    scene=self.scene))
-        return self.operators[-1]
-
-    def display_points(self):
-        dd = self.pf.h.all_data()
-        points = tvtk.Points()
-        good = (dd["creation_time"] > 0.0)
-        points.data = na.array([ dd["particle_position_%s" % ax][good] for ax in 'xyz' ]).transpose()
-        mass = na.log10(dd["ParticleAge"][good])
-        self.conn = tvtk.CellArray()
-        for i in xrange(mass.shape[0]):
-            self.conn.insert_next_cell(1)
-            self.conn.insert_cell_point(i)
-        self.points = points
-        self.pd = tvtk.PolyData(points = self.points, verts = self.conn)
-        self.pd.point_data.scalars = mass
-        lut = tvtk.LookupTable()
-        self.pdm = tvtk.PolyDataMapper(input = self.pd,
-                                       lookup_table = lut)
-        self.pdm.scalar_range = (mass.min(), mass.max())
-        self.pdm.scalar_mode = 'use_point_data'
-        self.point_actor = tvtk.Actor(mapper = self.pdm)
-        self.scene.add_actor(self.point_actor)
-
-def get_all_parents(grid):
-    parents = []
-    if len(grid.Parents) == 0: return grid
-    for parent in grid.Parents: parents.append(get_all_parents(parent))
-    return list(set(parents))
-
-def run_vtk():
-    gui = pyface.GUI()
-    importer = HierarchyImporter()
-    importer.edit_traits(handler = HierarchyImportHandler(
-            importer = importer))
-    #ehds.edit_traits()
-    gui.start_event_loop()
-
-
-if __name__=="__main__":
-    print "This code probably won't work.  But if you want to give it a try,"
-    print "you need:"
-    print
-    print "VTK (CVS)"
-    print "Mayavi2 (from Enthought)"
-    print
-    print "If you have 'em, give it a try!"
-    print
-    run_vtk()


http://bitbucket.org/yt_analysis/yt/changeset/b40c189a8945/
changeset:   r3840:b40c189a8945
branch:      yt
user:        MatthewTurk
date:        2011-03-11 04:06:41
summary:     Merging with recent changes
affected #:  2 files (2.8 KB)

--- a/README	Wed Mar 02 21:36:02 2011 -0500
+++ b/README	Thu Mar 10 19:06:41 2011 -0800
@@ -1,9 +1,12 @@
-Hi there!  You've just downloaded yt, an analysis tool for 3D Enzo adaptive
-mesh refinement datasets.  It's written in python and heavily leverages both
-NumPy and Matplotlib for fast arrays and visualization, respectively.
+Hi there!  You've just downloaded yt, an analysis tool for astrophysical
+simulation datasets, generated by simulation platforms like Enzo, Orion,
+CASTRO, MAESTRO, ART and Ramses.  It's written in python and heavily leverages
+both NumPy and Matplotlib for fast arrays and visualization, respectively.
 
-Full documentation and a user community can be found at
-http://yt.enzotools.org/ .
+Full documentation and a user community can be found at:
+
+http://yt.enzotools.org/
+http://yt.enzotools.org/doc/
 
 If you have used Python before, and are comfortable with installing packages,
 you should find the setup.py script fairly straightforward: simply execute
@@ -13,10 +16,10 @@
 doc/install_script.sh .  You will have to set the destination directory, and
 there are options available, but it should be straightforward.
 
-Please feel free to help out, and if you make any cool changes or enhancements,
-let me know!  The Trac site at http://yt.enzotools.org/ has mechanisms for
-ticket and problem submission, as well as an ever-growing Wiki.  I also
-encourage you to sign up for the yt-users mailing list, where there is a
-growing community of users ready to help out with problems and ideas!
+In case of any problems, please email the yt-users mailing list, and if you're
+interested in helping out, see the developer documentation in
+doc/how_to_develop_yt.txt or at:
 
-        -- Matthew Turk, matthewturk at gmail.com
+http://yt.enzotools.org/doc/advanced/developing.html
+
+Enjoy!


--- a/doc/how_to_develop_yt.txt	Wed Mar 02 21:36:02 2011 -0500
+++ b/doc/how_to_develop_yt.txt	Thu Mar 10 19:06:41 2011 -0800
@@ -30,8 +30,9 @@
 How To Get The Source Code
 --------------------------
 
-With the yt installation script you should have a copy of Mercurial.  You can
-clone the repository like so:
+yt is hosted on BitBucket, and you can see all of the yt repositories at
+http://hg.enzotools.org/ .  With the yt installation script you should have a
+copy of Mercurial.  You can clone the repository like so:
 
    $ hg clone http://hg.enzotools.org/yt/
 
@@ -47,6 +48,27 @@
 
 This will rebuild all C modules as well.
 
+How To Submit Changes
+---------------------
+
+You can submit changes a couple different ways, but the easiest is to use the
+"fork" mechanism on BitBucket.  Just go here:
+
+http://hg.enzotools.org/yt/fork
+
+and you're all set, ready to go.  You'll have to either clone a new copy of the
+repository or edit .hg/hgrc to point to the location of your new fork, first,
+though.
+
+When you're ready to submit them to the main repository, simply go to:
+
+http://hg.enzotools.org/yt/fork
+
+Make sure you notify "yt_analysis" and put in a little description.  That'll
+notify the core developers that you've got something ready to submit, and we
+will review it an (hopefully!) merge it in.  If it goes well, you may end up
+with push access to the main repository.
+
 How To Read The Source Code
 ---------------------------
 
@@ -133,21 +155,3 @@
 
 Once your branch has been merged in, mark it as closed on the wiki page.
 
-How To Submit Changes
----------------------
-
-If you do not have "push" rights on the primary mercurial repository, set up
-and use the "patchbomb" extension in mercurial to email a bundle of changes to
-the developer mailing list, yt-dev at lists.spacepope.org .
-
-The patchbomb extension is documented here:
-
-http://mercurial.selenic.com/wiki/PatchbombExtension
-
-Please be sure to specify that you wish to send a bundle.  This can be
-accomplished by setting up your hgrc to email the yt-dev mailing list and
-executing the command:
-
-   $ hg email -b
-
-Be sure to read the output of "hg help email" before doing this.


--- a/doc/install_script.sh	Wed Mar 02 21:36:02 2011 -0500
+++ b/doc/install_script.sh	Thu Mar 10 19:06:41 2011 -0800
@@ -39,7 +39,8 @@
 INST_PNG=1      # Install a local libpng?  Same things apply as with zlib.
 INST_FTYPE=1    # Install FreeType2 locally?
 INST_ENZO=0     # Clone a copy of Enzo?
-INST_FORTHON=1
+INST_SQLITE3=1  # Install a local version of SQLite3?
+INST_FORTHON=1  # Install Forthon?
 
 # If you've got YT some other place, set this to point to it.
 YT_DIR=""
@@ -179,6 +180,10 @@
 get_willwont ${INST_FTYPE}
 echo "be installing freetype2"
 
+printf "%-15s = %s so I " "INST_SQLITE3" "${INST_SQLITE3}"
+get_willwont ${INST_SQLITE3}
+echo "be installing SQLite3"
+
 printf "%-15s = %s so I " "INST_FORTHON" "${INST_FORTHON}"
 get_willwont ${INST_FORTHON}
 echo "be installing Forthon (for Halo Finding, etc)"
@@ -276,6 +281,7 @@
 [ $INST_BZLIB -eq 1 ] && get_enzotools bzip2-1.0.5.tar.gz
 [ $INST_PNG -eq 1 ] && get_enzotools libpng-1.2.43.tar.gz
 [ $INST_FTYPE -eq 1 ] && get_enzotools freetype-2.4.4.tar.gz
+[ $INST_SQLITE3 -eq 1 ] && get_enzotools sqlite-autoconf-3070500.tar.gz
 get_enzotools Python-2.6.3.tgz
 get_enzotools numpy-1.5.1.tar.gz
 get_enzotools matplotlib-1.0.0.tar.gz
@@ -374,6 +380,20 @@
 fi
 export HDF5_API=16
 
+if [ $INST_SQLITE3 -eq 1 ]
+then
+    if [ ! -e sqlite-autoconf-3070500/done ]
+    then
+        [ ! -e sqlite-autoconf-3070500 ] && tar xfz sqlite-autoconf-3070500.tar.gz
+        echo "Installing SQLite3"
+        cd sqlite-autoconf-3070500
+        ( ./configure --prefix=${DEST_DIR}/ 2>&1 ) 1>> ${LOG_FILE} || do_exit
+        ( make install 2>&1 ) 1>> ${LOG_FILE} || do_exit
+        touch done
+        cd ..
+    fi
+fi
+
 if [ ! -e Python-2.6.3/done ]
 then
     echo "Installing Python.  This may take a while, but don't worry.  YT loves you."


--- a/yt/analysis_modules/api.py	Wed Mar 02 21:36:02 2011 -0500
+++ b/yt/analysis_modules/api.py	Thu Mar 10 19:06:41 2011 -0800
@@ -35,16 +35,19 @@
     Halo, \
     HOPHalo, \
     parallelHOPHalo, \
+    LoadedHalo, \
     FOFHalo, \
     HaloList, \
     HOPHaloList, \
     FOFHaloList, \
     parallelHOPHaloList, \
+    LoadedHaloList, \
     GenericHaloFinder, \
     parallelHF, \
     HOPHaloFinder, \
     FOFHaloFinder, \
-    HaloFinder
+    HaloFinder, \
+    LoadHaloes
 
 from .halo_mass_function.api import \
     HaloMassFcn, \


--- a/yt/analysis_modules/halo_finding/api.py	Wed Mar 02 21:36:02 2011 -0500
+++ b/yt/analysis_modules/halo_finding/api.py	Thu Mar 10 19:06:41 2011 -0800
@@ -32,13 +32,16 @@
     Halo, \
     HOPHalo, \
     parallelHOPHalo, \
+    LoadedHalo, \
     FOFHalo, \
     HaloList, \
     HOPHaloList, \
     FOFHaloList, \
     parallelHOPHaloList, \
+    LoadedHaloList, \
     GenericHaloFinder, \
     parallelHF, \
     HOPHaloFinder, \
     FOFHaloFinder, \
-    HaloFinder
+    HaloFinder, \
+    LoadHaloes


--- a/yt/analysis_modules/halo_finding/halo_objects.py	Wed Mar 02 21:36:02 2011 -0500
+++ b/yt/analysis_modules/halo_finding/halo_objects.py	Thu Mar 10 19:06:41 2011 -0800
@@ -32,12 +32,14 @@
 import numpy as na
 import random
 import sys
+from collections import defaultdict
 
 from yt.funcs import *
 
 from yt.config import ytcfg
 from yt.utilities.performance_counters import \
     yt_counters, time_function
+from yt.utilities.math_utils import periodic_dist
 
 from .hop.EnzoHop import RunHOP
 from .fof.EnzoFOF import RunFOF
@@ -73,6 +75,7 @@
         self._max_dens = halo_list._max_dens
         self.id = id
         self.data = halo_list._data_source
+        self.pf = self.data.pf
         if indices is not None:
             self.indices = halo_list._base_indices[indices]
         else:
@@ -360,25 +363,26 @@
             return None
         self.bin_count = bins
         # Cosmology
-        h = self.data.pf.hubble_constant
-        Om_matter = self.data.pf.omega_matter
-        z = self.data.pf.current_redshift
+        h = self.pf.hubble_constant
+        Om_matter = self.pf.omega_matter
+        z = self.pf.current_redshift
+        period = self.pf.domain_right_edge - \
+            self.pf.domain_left_edge
+        cm = self.pf["cm"]
+        thissize = max(self.size, self.indices.size)
         rho_crit_now = 1.8788e-29 * h**2.0 * Om_matter # g cm^-3
         Msun2g = 1.989e33
         rho_crit = rho_crit_now * ((1.0 + z)**3.0)
-        
         # Get some pertinent information about the halo.
         self.mass_bins = na.zeros(self.bin_count+1, dtype='float64')
-        dist = na.empty(self.indices.size, dtype='float64')
+        dist = na.empty(thissize, dtype='float64')
         cen = self.center_of_mass()
-        period = self.data.pf.domain_right_edge - \
-            self.data.pf.domain_left_edge
         mark = 0
         # Find the distances to the particles. I don't like this much, but I
         # can't see a way to eliminate a loop like this, either here or in
         # yt.math.
-        for pos in izip(self["particle_position_x"], self["particle_position_y"],
-                self["particle_position_z"]):
+        for pos in itertools.izip(self["particle_position_x"],
+                self["particle_position_y"], self["particle_position_z"]):
             dist[mark] = periodic_dist(cen, pos, period)
             mark += 1
         # Set up the radial bins.
@@ -397,7 +401,7 @@
         # Calculate the over densities in the bins.
         self.overdensity = self.mass_bins * Msun2g / \
         (4./3. * math.pi * rho_crit * \
-        (self.radial_bins * self.data.pf["cm"])**3.0)
+        (self.radial_bins * cm)**3.0)
         
 
 class HOPHalo(Halo):
@@ -720,8 +724,8 @@
             # Find the distances to the particles. I don't like this much, but I
             # can't see a way to eliminate a loop like this, either here or in
             # yt.math.
-            for pos in izip(self["particle_position_x"], self["particle_position_y"],
-                    self["particle_position_z"]):
+            for pos in itertools.izip(self["particle_position_x"],
+                    self["particle_position_y"], self["particle_position_z"]):
                 dist[mark] = periodic_dist(cen, pos, period)
                 mark += 1
             dist_min, dist_max = min(dist), max(dist)
@@ -785,6 +789,186 @@
         r"""Not implemented."""
         return self.center_of_mass()
 
+class LoadedHalo(Halo):
+    def __init__(self, pf, id, size=None, CoM=None,
+        max_dens_point=None, group_total_mass=None, max_radius=None, bulk_vel=None,
+        rms_vel=None, fnames=None):
+        self.pf = pf
+        self.id = id
+        self.size = size
+        self.CoM = CoM
+        self.max_dens_point = max_dens_point
+        self.group_total_mass = group_total_mass
+        self.max_radius = max_radius
+        self.bulk_vel = bulk_vel
+        self.rms_vel = rms_vel
+        # locs=the names of the h5 files that have particle data for this halo
+        self.fnames = fnames
+        self.bin_count = None
+        self.overdensity = None
+        self.saved_fields = {}
+        self.particle_mask = None
+        self.ds_sort = None
+        self.indices = na.array([]) # Never used for a LoadedHalo.
+
+    def __getitem__(self, key):
+        # This function will try to get particle data in one of three ways,
+        # in descending preference.
+        # 1. From saved_fields, e.g. we've already got it.
+        # 2. From the halo h5 files off disk.
+        # 3. Use the unique particle indexes of the halo to select a missing
+        # field from an AMR Sphere.
+        try:
+            # We've already got it.
+            return self.saved_fields[key]
+        except KeyError:
+            # Gotta go get it from the halo h5 files.
+            field_data = self._get_particle_data(self.id, self.fnames,
+                self.size, key)
+            #if key == 'particle_position_x': field_data = None
+            if field_data is not None:
+                self.saved_fields[key] = field_data
+                return self.saved_fields[key]
+            else:
+                # Dynamically create the masking array for particles, and get
+                # the data using standard yt methods. The 1.05 is there to
+                # account for possible silliness having to do with whether
+                # the maximum density or center of mass was used to calculate
+                # the maximum radius.
+                ds = self.pf.h.sphere(self.CoM, 1.05 * self.max_radius)
+                if self.particle_mask is None:
+                    pid = self.__getitem__('particle_index')
+                    sp_pid = ds['particle_index']
+                    self.ds_sort = sp_pid.argsort()
+                    sp_pid = sp_pid[self.ds_sort]
+                    # The result of searchsorted is an array with the positions
+                    # of the indexes in pid as they are in sp_pid. This is
+                    # because each element of pid is in sp_pid only once.
+                    self.particle_mask = na.searchsorted(sp_pid, pid)
+                # We won't store this field below in saved_fields because
+                # that would mean keeping two copies of it, one in the yt
+                # machinery and one here.
+                return ds[key][self.ds_sort][self.particle_mask]
+
+    def _get_particle_data(self, halo, fnames, size, field):
+        # Given a list of file names, a halo, its size, and the desired field,
+        # this returns the particle data for that halo.
+        # First get the list of fields from the first file. Not all fields
+        # are saved all the time (e.g. creation_time, particle_type).
+        mylog.info("Getting field %s from hdf5 halo particle files." % field)
+        f = h5py.File(fnames[0])
+        fields = f["Halo%08d" % halo].keys()
+        # If we dont have this field, we can give up right now.
+        if field not in fields: return None
+        if field == 'particle_index' or field == 'particle_type':
+            # the only integer field
+            field_data = na.empty(size, dtype='int64')
+        else:
+            field_data = na.empty(size, dtype='float64')
+        f.close()
+        offset = 0
+        for fname in fnames:
+            f = h5py.File(fname)
+            this = f["Halo%08d" % halo][field][:]
+            s = this.size
+            field_data[offset:offset+s] = this
+            offset += s
+            f.close()
+        return field_data
+        
+    def center_of_mass(self):
+        r"""Calculate and return the center of mass.
+
+        The center of mass of the halo is directly calculated and returned.
+        
+        Examples
+        --------
+        >>> com = halos[0].center_of_mass()
+        """
+        return self.CoM
+    
+    def maximum_density_location(self):
+        r"""Return the location HOP identified as maximally dense.
+        
+        Return the location HOP identified as maximally dense.
+
+        Examples
+        --------
+        >>> max_dens_loc = halos[0].maximum_density_location()
+        """
+        return self.max_dens_point[1:]
+
+    def maximum_density(self):
+        r"""Return the HOP-identified maximum density.
+
+        Return the HOP-identified maximum density.
+
+        Examples
+        --------
+        >>> max_dens = halos[0].maximum_density()
+        """
+        return self.max_dens_point[0]
+
+    def total_mass(self):
+        r"""Returns the total mass in solar masses of the halo.
+        
+        Returns the total mass in solar masses of just the particles in the
+        halo.
+
+        Examples
+        --------
+        >>> halos[0].total_mass()
+        """
+        return self.group_total_mass
+
+    def bulk_velocity(self):
+        r"""Returns the mass-weighted average velocity in cm/s.
+
+        This calculates and returns the mass-weighted average velocity of just
+        the particles in the halo in cm/s.
+        
+        Examples
+        --------
+        >>> bv = halos[0].bulk_velocity()
+        """
+        return self.bulk_vel
+
+    def rms_velocity(self):
+        r"""Returns the mass-weighted RMS velocity for the halo
+        particles in cgs units.
+
+        Calculate and return the mass-weighted RMS velocity for just the
+        particles in the halo.  The bulk velocity of the halo is subtracted
+        before computation.
+        
+        Examples
+        --------
+        >>> rms_vel = halos[0].rms_velocity()
+        """
+        return self.rms_vel
+
+    def maximum_radius(self):
+        r"""Returns the maximum radius in the halo for all particles,
+        either from the point of maximum density or from the
+        center of mass.
+
+        The maximum radius from the most dense point is calculated.  This
+        accounts for periodicity.
+        
+        Parameters
+        ----------
+        center_of_mass : bool
+            True chooses the center of mass when calculating the maximum radius.
+            False chooses from the maximum density location for HOP halos
+            (it has no effect for FOF halos).
+            Default = True.
+        
+        Examples
+        --------
+        >>> radius = halos[0].maximum_radius()
+        """
+        return self.max_radius
+
 class HaloList(object):
 
     _fields = ["particle_position_%s" % ax for ax in 'xyz']
@@ -1106,6 +1290,52 @@
         """
         HaloList.write_out(self, filename)
 
+class LoadedHaloList(HaloList):
+    _name = "Loaded"
+    
+    def __init__(self, pf, basename):
+        self.pf = pf
+        self._groups = []
+        self.basename = basename
+        self._retrieve_halos()
+    
+    def _retrieve_halos(self):
+        # First get the halo particulars.
+        lines = file("%s.out" % self.basename)
+        # The location of particle data for each halo.
+        locations = self._collect_halo_data_locations()
+        halo = 0
+        for line in lines:
+            # Skip the comment lines at top.
+            if line[0] == "#": continue
+            line = line.split()
+            # get the particle data
+            size = int(line[2])
+            fnames = locations[halo]
+            # Everything else
+            CoM = na.array([float(line[7]), float(line[8]), float(line[9])])
+            max_dens_point = na.array([float(line[3]), float(line[4]),
+                float(line[5]), float(line[6])])
+            group_total_mass = float(line[1])
+            max_radius = float(line[13])
+            bulk_vel = na.array([float(line[10]), float(line[11]),
+                float(line[12])])
+            rms_vel = float(line[14])
+            self._groups.append(LoadedHalo(self.pf, halo, size, CoM,
+                max_dens_point, group_total_mass, max_radius, bulk_vel,
+                rms_vel, fnames))
+            halo += 1
+    
+    def _collect_halo_data_locations(self):
+        # The halos are listed in order in the file.
+        lines = file("%s.txt" % self.basename)
+        locations = []
+        for line in lines:
+            line = line.split()
+            locations.append(line[1:])
+        lines.close()
+        return locations
+
 class parallelHOPHaloList(HaloList,ParallelAnalysisInterface):
     _name = "parallelHOP"
     _halo_class = parallelHOPHalo
@@ -1481,6 +1711,32 @@
             if not self._is_mine(halo): continue
             halo.write_particle_list(f)
 
+    def dump(self, basename="HopAnalysis"):
+        r"""Save the full halo data to disk.
+        
+        This function will save the halo data in such a manner that it can be
+        easily re-loaded later using `GenericHaloFinder.load`.
+        This is similar in concept to
+        pickling the data, but outputs the data in the already-established
+        data formats. The simple halo data is written to a text file
+        (e.g. "HopAnalysis.out") using
+        write_out(), and the particle data to hdf5 files (e.g. "HopAnalysis.h5")
+        using write_particle_lists().
+        
+        Parameters
+        ----------
+        basename : String
+            The base name for the files the data will be written to. Default = 
+            "HopAnalysis".
+        
+        Examples
+        --------
+        >>> halos.dump("MyHalos")
+        """
+        self.write_out("%s.out" % basename)
+        self.write_particle_lists(basename)
+        self.write_particle_lists_txt(basename)
+
 class parallelHF(GenericHaloFinder, parallelHOPHaloList):
     def __init__(self, pf, subvolume=None,threshold=160, dm_only=True, \
         resize=True, rearrange=True,\
@@ -1927,3 +2183,32 @@
         self._join_halolists()
 
 HaloFinder = HOPHaloFinder
+
+class LoadHaloes(GenericHaloFinder, LoadedHaloList):
+    def __init__(self, pf, basename):
+        r"""Load the full halo data into memory.
+        
+        This function takes the output of `GenericHaloFinder.dump` and
+        re-establishes the list of halos in memory. This enables the full set
+        of halo analysis features without running the halo finder again. To
+        be precise, the particle data for each halo is only read in when
+        necessary, so examining a single halo will not require as much memory
+        as is required for halo finding.
+        
+        Parameters
+        ----------
+        basename : String
+            The base name of the files that will be read in. This should match
+            what was used when `GenericHaloFinder.dump` was called. Default =
+            "HopAnalysis".
+        
+        Examples
+        --------
+        >>> pf = load("data0005")
+        >>> halos = LoadHaloes(pf, "HopAnalysis")
+        """
+        self.basename = basename
+        LoadedHaloList.__init__(self, pf, self.basename)
+
+
+        
\ No newline at end of file


--- a/yt/analysis_modules/halo_profiler/multi_halo_profiler.py	Wed Mar 02 21:36:02 2011 -0500
+++ b/yt/analysis_modules/halo_profiler/multi_halo_profiler.py	Thu Mar 10 19:06:41 2011 -0800
@@ -34,8 +34,7 @@
     load
 from yt.data_objects.profiles import \
     BinnedProfile1D, EmptyProfileData
-from yt.analysis_modules.halo_finding.api import \
-    HaloFinder
+from yt.analysis_modules.halo_finding.api import *
 from .halo_filters import \
     VirialFilter
 from yt.data_objects.field_info_container import \
@@ -54,15 +53,23 @@
 
 class HaloProfiler(ParallelAnalysisInterface):
     "Radial profiling, filtering, and projections for halos in cosmological simulations."
-    def __init__(self, dataset, halos='multiple', halo_list_file='HopAnalysis.out', halo_list_format='yt_hop',
-                 halo_finder_function=HaloFinder, halo_finder_args=None, halo_finder_kwargs=None,
-                 use_density_center=False, density_center_exponent=1.0, use_field_max_center=None,
+    def __init__(self, dataset, output_dir=None,
+                 halos='multiple', halo_list_file='HopAnalysis.out', 
+                 halo_list_format='yt_hop', halo_finder_function=parallelHF, 
+                 halo_finder_args=None, 
+                 halo_finder_kwargs=dict(threshold=160.0, safety=1.5, 
+                                         dm_only=False, resize=True, 
+                                         fancy_padding=True, rearrange=True),
+                 use_density_center=False, density_center_exponent=1.0,
+                 use_field_max_center=None,
                  halo_radius=0.1, radius_units='1', n_profile_bins=50,
                  profile_output_dir='radial_profiles', projection_output_dir='projections',
                  projection_width=8.0, projection_width_units='mpc', project_at_level='max',
                  velocity_center=['bulk', 'halo'], filter_quantities=['id','center']):
         """
         Initialize a HaloProfiler object.
+        :param output_dir (str): if specified, all output will be put into this path instead of 
+               in the dataset directories.  Default: None.
         :param halos (str): "multiple" for profiling more than one halo.  In this mode halos are read in 
                from a list or identified with a halo finder.  In "single" mode, the one and only halo 
                center is identified automatically as the location of the peak in the density field.  
@@ -114,7 +121,7 @@
         """
 
         self.dataset = dataset
-
+        self.output_dir = output_dir
         self.profile_output_dir = profile_output_dir
         self.projection_output_dir = projection_output_dir
         self.n_profile_bins = n_profile_bins
@@ -132,6 +139,10 @@
         self.filtered_halos = []
         self._projection_halo_list = []
 
+        # Create output directory if specified
+        if self.output_dir is not None:
+            self.__check_directory(self.output_dir)
+
         # Set halo finder function and parameters, if needed.
         self.halo_finder_function = halo_finder_function
         self.halo_finder_args = halo_finder_args
@@ -153,7 +164,8 @@
         # dictionary: a dictionary containing fields and their corresponding columns.
         self.halo_list_file = halo_list_file
         if halo_list_format == 'yt_hop':
-            self.halo_list_format = {'id':0, 'mass':1, 'center':[7, 8, 9], 'velocity':[10, 11, 12], 'r_max':13}
+            self.halo_list_format = {'id':0, 'mass':1, 'np': 2, 
+                                     'center':[7, 8, 9], 'velocity':[10, 11, 12], 'r_max':13}
         elif halo_list_format == 'enzo_hop':
             self.halo_list_format = {'id':0, 'center':[4, 5, 6]}
         elif halo_list_format == 'p-groupfinder':
@@ -169,7 +181,8 @@
         self.density_center_exponent = density_center_exponent
         if self.use_density_center:
             def _MatterDensityXTotalMass(field, data):
-                return na.power((data['Matter_Density'] * data['TotalMassMsun']), self.density_center_exponent)
+                return na.power((data['Matter_Density'] * data['TotalMassMsun']), 
+                                self.density_center_exponent)
             def _Convert_MatterDensityXTotalMass(data):
                 return 1
             add_field("MatterDensityXTotalMass", units=r"",
@@ -288,8 +301,14 @@
         # Add profile fields necessary for calculating virial quantities.
         if virial_filter: self._check_for_needed_profile_fields()
 
-        outputDir = "%s/%s" % (self.pf.fullpath, self.profile_output_dir)
-        self.__check_directory(outputDir)
+        # Create output directory.
+        if self.output_dir is not None:
+            self.__check_directory("%s/%s" % (self.output_dir, self.pf.directory))
+            my_output_dir = "%s/%s/%s" % (self.output_dir, self.pf.directory, 
+                                          self.profile_output_dir)
+        else:
+            my_output_dir = "%s/%s" % (self.pf.fullpath, self.profile_output_dir)
+        self.__check_directory(my_output_dir)
 
         # Profile all halos.
         for halo in self._get_objs('all_halos', round_robin=True):
@@ -305,7 +324,7 @@
 
             if filter_result and len(self.profile_fields) > 0:
 
-                profile_filename = "%s/Halo_%04d_profile.dat" % (outputDir, halo['id'])
+                profile_filename = "%s/Halo_%04d_profile.dat" % (my_output_dir, halo['id'])
 
                 profiledHalo = self._get_halo_profile(halo, profile_filename, virial_filter=virial_filter)
 
@@ -456,8 +475,14 @@
                 (self.pf.parameters['RefineBy']**proj_level)
             projectionResolution = int(self.projection_width / proj_dx)
 
-        outputDir = "%s/%s" % (self.pf.fullpath, self.projection_output_dir)
-        self.__check_directory(outputDir)
+        # Create output directory.
+        if self.output_dir is not None:
+            self.__check_directory("%s/%s" % (self.output_dir, self.pf.directory))
+            my_output_dir = "%s/%s/%s" % (self.output_dir, self.pf.directory, 
+                                          self.projection_output_dir)
+        else:
+            my_output_dir = "%s/%s" % (self.pf.fullpath, self.projection_output_dir)
+        self.__check_directory(my_output_dir)
 
         center = [0.5 * (self.pf.parameters['DomainLeftEdge'][w] + self.pf.parameters['DomainRightEdge'][w])
                   for w in range(self.pf.parameters['TopGridRank'])]
@@ -519,7 +544,7 @@
                 if save_cube:
                     axis_labels = ['x', 'y', 'z']
                     dataFilename = "%s/Halo_%04d_%s_data.h5" % \
-                            (outputDir, halo['id'], axis_labels[w])
+                            (my_output_dir, halo['id'], axis_labels[w])
                     mylog.info("Saving projection data to %s." % dataFilename)
 
                     output = h5py.File(dataFilename, "a")
@@ -535,7 +560,7 @@
                     output.close()
 
                 if save_images:
-                    pc.save("%s/Halo_%04d" % (outputDir, halo['id']), force_save=True)
+                    pc.save("%s/Halo_%04d" % (my_output_dir, halo['id']), force_save=True)
 
             del region
 
@@ -573,13 +598,17 @@
         if filename is None:
             filename = self.halo_list_file
 
-        hopFile = "%s/%s" % (self.pf.fullpath, filename)
+        if self.output_dir is not None:
+            self.__check_directory("%s/%s" % (self.output_dir, self.pf.directory))
+            hop_file = "%s/%s/%s" % (self.output_dir, self.pf.directory, filename)
+        else:
+            hop_file = "%s/%s" % (self.pf.fullpath, filename)
 
-        if not(os.path.exists(hopFile)):
-            mylog.info("Hop file not found, running hop to get halos.")
-            self._run_hop(hopFile)
+        if not(os.path.exists(hop_file)):
+            mylog.info("Halo finder file not found, running halo finder to get halos.")
+            self._run_hop(hop_file)
 
-        self.all_halos = self._read_halo_list(hopFile)
+        self.all_halos = self._read_halo_list(hop_file)
 
     def _read_halo_list(self, listFile):
         """
@@ -685,11 +714,11 @@
             return None
 
     @parallel_blocking_call
-    def _run_hop(self, hopFile):
+    def _run_hop(self, hop_file):
         "Run hop to get halos."
 
         hop_results = self.halo_finder_function(self.pf, *self.halo_finder_args, **self.halo_finder_kwargs)
-        hop_results.write_out(hopFile)
+        hop_results.write_out(hop_file)
 
         del hop_results
         self.pf.h.clear_all_data()
@@ -752,13 +781,13 @@
         fid.close()
 
     @parallel_root_only
-    def __check_directory(self, outputDir):
-        if (os.path.exists(outputDir)):
-            if not(os.path.isdir(outputDir)):
-                mylog.error("Output directory exists, but is not a directory: %s." % outputDir)
-                raise IOError(outputDir)
+    def __check_directory(self, my_output_dir):
+        if (os.path.exists(my_output_dir)):
+            if not(os.path.isdir(my_output_dir)):
+                mylog.error("Output directory exists, but is not a directory: %s." % my_output_dir)
+                raise IOError(my_output_dir)
         else:
-            os.mkdir(outputDir)
+            os.mkdir(my_output_dir)
 
 def shift_projections(pf, pc, oldCenter, newCenter, axis):
     """


--- a/yt/analysis_modules/level_sets/contour_finder.py	Wed Mar 02 21:36:02 2011 -0500
+++ b/yt/analysis_modules/level_sets/contour_finder.py	Thu Mar 10 19:06:41 2011 -0800
@@ -291,6 +291,7 @@
         total_contours += na.unique(grid["tempContours"][grid["tempContours"] > -1]).size
         new_contours = na.unique(grid["tempContours"][grid["tempContours"] > -1]).tolist()
         tree += zip(new_contours, new_contours)
+    tree = set(tree)
     pbar.finish()
     pbar = get_pbar("Calculating joins ", len(data_source._grids))
     grid_set = set()
@@ -299,9 +300,10 @@
         cg = grid.retrieve_ghost_zones(1, "tempContours", smoothed=False)
         grid_set.update(set(cg._grids))
         fd = cg["tempContours"].astype('int64')
-        tree += amr_utils.construct_boundary_relationships(fd)
+        boundary_tree = amr_utils.construct_boundary_relationships(fd)
+        tree.update(((a, b) for a, b in boundary_tree))
     pbar.finish()
-    sort_new = na.array(list(set(tree)), dtype='int64')
+    sort_new = na.array(list(tree), dtype='int64')
     mylog.info("Coalescing %s joins", sort_new.shape[0])
     joins = coalesce_join_tree(sort_new)
     #joins = [(i, na.array(list(j), dtype="int64")) for i, j in sorted(joins.items())]


--- a/yt/analysis_modules/two_point_functions/two_point_functions.py	Wed Mar 02 21:36:02 2011 -0500
+++ b/yt/analysis_modules/two_point_functions/two_point_functions.py	Thu Mar 10 19:06:41 2011 -0800
@@ -29,7 +29,7 @@
 from yt.utilities.parallel_tools.parallel_analysis_interface import ParallelAnalysisInterface, parallel_blocking_call, parallel_root_only
 
 try:
-    from yt.extensions.kdtree import *
+    from yt.utilities.kdtree import *
 except ImportError:
     mylog.debug("The Fortran kD-Tree did not import correctly.")
 
@@ -42,7 +42,7 @@
     def __init__(self, pf, fields, left_edge=None, right_edge=None,
             total_values=1000000, comm_size=10000, length_type="lin",
             length_number=10, length_range=None, vol_ratio = 1,
-            salt=0):
+            salt=0, theta=None, phi=None):
         r""" Initialize a two point functions object.
         
         Parameters
@@ -81,6 +81,15 @@
             keeping everything else constant from this set: (MPI task count, 
             number of ruler lengths, ruler min/max, number of functions,
             number of point pairs per ruler length). Default = 0.
+        theta : Float
+            For random pairs of points, the second point is found by traversing
+            a distance along a ray set by the angle (phi, theta) from the first
+            point. To keep this angle constant, set ``theta`` to a value in the
+            range [0, pi]. Default = None, which will randomize theta for
+            every pair of points.
+        phi : Float
+            Similar to theta above, but the range of values is [0, 2*pi).
+            Default = None, which will randomize phi for every pair of points.
         
         Examples
         --------
@@ -95,6 +104,8 @@
             raise ImportError("You need to install the Forthon kD-Tree")
         self._fsets = []
         self.fields = fields
+        self.constant_theta = theta
+        self.constant_phi = phi
         # MPI stuff.
         self.size = self._mpi_get_size()
         self.mine = self._mpi_get_rank()
@@ -462,13 +473,20 @@
             r1[:,dim] = self.mt.uniform(low=self.ds.left_edge[dim],
                 high=self.ds.right_edge[dim], size=size)
         # Next we find the second point, determined by a random
-        # theta, phi angle.
-        theta = self.mt.uniform(low=0, high=2.*math.pi, size=size)
-        phi = self.mt.uniform(low=-math.pi/2., high=math.pi/2., size=size)
+        # theta, phi angle. See Eqns. 1 & 2 from 
+        # http://mathworld.wolfram.com/SpherePointPicking.html,
+        # but phi and theta are switched to the Physics convention.
+        if self.constant_phi is None:
+            phi = self.mt.uniform(low=0, high=2.*math.pi, size=size)
+        else: phi = self.constant_phi * na.ones(size, dtype='float64')
+        if self.constant_theta is None:
+            v = self.mt.uniform(low=0., high=1, size=size)
+            theta = na.arccos(2 * v - 1)
+        else: theta = self.constant_theta * na.ones(size, dtype='float64')
         r2 = na.empty((size,3), dtype='float64')
-        r2[:,0] = r1[:,0] + length * na.cos(theta) * na.cos(phi)
-        r2[:,1] = r1[:,1] + length * na.sin(theta) * na.cos(phi)
-        r2[:,2] = r1[:,2] + length * na.sin(phi)
+        r2[:,0] = r1[:,0] + length * na.cos(phi) * na.sin(theta)
+        r2[:,1] = r1[:,1] + length * na.sin(phi) * na.sin(theta)
+        r2[:,2] = r1[:,2] + length * na.cos(theta)
         # Reflect so it's inside the (full) volume.
         r2 %= self.period
         return (r1, r2)


--- a/yt/data_objects/data_containers.py	Wed Mar 02 21:36:02 2011 -0500
+++ b/yt/data_objects/data_containers.py	Thu Mar 10 19:06:41 2011 -0800
@@ -136,13 +136,19 @@
         self.real_grid = grid
         self.child_mask = 1
         self.ActiveDimensions = self.data['x'].shape
+        self.DW = grid.pf.domain_right_edge - grid.pf.domain_left_edge
+        
     def __getitem__(self, field):
         if field not in self.data.keys():
             if field == "RadiusCode":
                 center = self.field_parameters['center']
-                tr = na.sqrt( (self['x'] - center[0])**2.0 +
-                              (self['y'] - center[1])**2.0 +
-                              (self['z'] - center[2])**2.0 )
+                tempx = na.abs(self['x'] - center[0])
+                tempx = na.minimum(tempx, self.DW[0] - tempx)
+                tempy = na.abs(self['y'] - center[1])
+                tempy = na.minimum(tempy, self.DW[1] - tempy)
+                tempz = na.abs(self['z'] - center[2])
+                tempz = na.minimum(tempz, self.DW[2] - tempz)
+                tr = na.sqrt( tempx**2.0 + tempy**2.0 + tempz**2.0 )
             else:
                 raise KeyError(field)
         else: tr = self.data[field]


--- a/yt/data_objects/particle_io.py	Wed Mar 02 21:36:02 2011 -0500
+++ b/yt/data_objects/particle_io.py	Thu Mar 10 19:06:41 2011 -0800
@@ -129,7 +129,10 @@
         ParticleIOHandler.__init__(self, pf, source)
 
     def _get_args(self):
-        return (1, (na.array(self.center, dtype='float64'), self.radius))
+        DLE = na.array(self.pf.domain_left_edge, dtype='float64')
+        DRE = na.array(self.pf.domain_right_edge, dtype='float64')
+        return (1, (na.array(self.center, dtype='float64'), self.radius,
+            1, DLE, DRE))
 
 class ParticleIOHandlerDisk(ParticleIOHandlerImplemented):
     _source_type = "disk"


--- a/yt/data_objects/universal_fields.py	Wed Mar 02 21:36:02 2011 -0500
+++ b/yt/data_objects/universal_fields.py	Thu Mar 10 19:06:41 2011 -0800
@@ -289,9 +289,9 @@
 add_field("Pressure", function=_Pressure, units=r"\rm{dyne}/\rm{cm}^{2}")
 
 def _Entropy(field, data):
-    return (kboltz/mh) * data["Temperature"] / \
-           (data["MeanMolecularWeight"] * data["Density"]**(2./3.))
-add_field("Entropy", units=r"\rm{ergs}\/\rm{cm}^{2}",
+    return kboltz  * data["Temperature"] / \
+           (data["NumberDensity"]**(data.pf["Gamma"] - 1.0))
+add_field("Entropy", units=r"\rm{ergs}\ \rm{cm}^{2}",
           function=_Entropy)
 
 def _Height(field, data):


--- a/yt/frontends/chombo/data_structures.py	Wed Mar 02 21:36:02 2011 -0500
+++ b/yt/frontends/chombo/data_structures.py	Thu Mar 10 19:06:41 2011 -0800
@@ -24,13 +24,34 @@
   along with this program.  If not, see <http://www.gnu.org/licenses/>.
 """
 
+import h5py
+import re
+import os
+import weakref
+import numpy as na
+
+from collections import \
+     defaultdict
+from string import \
+     strip, \
+     rstrip
+from stat import \
+     ST_CTIME
+
+from .definitions import \
+     pluto2enzoDict, \
+     yt2plutoFieldsDict, \
+     parameterDict \
+     
 from yt.funcs import *
 from yt.data_objects.grid_patch import \
-           AMRGridPatch
+     AMRGridPatch
 from yt.data_objects.hierarchy import \
-           AMRHierarchy
+     AMRHierarchy
 from yt.data_objects.static_output import \
-           StaticOutput
+     StaticOutput
+from yt.utilities.definitions import \
+     mpc_conversion
 
 from .fields import ChomboFieldContainer
 
@@ -66,12 +87,15 @@
     grid = ChomboGrid
     
     def __init__(self,pf,data_style='chombo_hdf5'):
+        self.domain_left_edge = pf.domain_left_edge # need these to determine absolute grid locations
+        self.domain_right_edge = pf.domain_right_edge # need these to determine absolute grid locations
         self.data_style = data_style
         self.field_info = ChomboFieldContainer()
         self.field_indexes = {}
         self.parameter_file = weakref.proxy(pf)
         # for now, the hierarchy file is the parameter file!
         self.hierarchy_filename = self.parameter_file.parameter_filename
+        self.hierarchy = os.path.abspath(self.hierarchy_filename)
         self.directory = os.path.dirname(self.hierarchy_filename)
         self._fhandle = h5py.File(self.hierarchy_filename)
 
@@ -117,8 +141,8 @@
                                start = si, stop = ei)
                 self.grids.append(pg)
                 self.grids[-1]._level_id = level_id
-                self.grid_left_edge[i] = dx*si.astype(self.float_type)
-                self.grid_right_edge[i] = dx*(ei.astype(self.float_type) + 1)
+                self.grid_left_edge[i] = dx*si.astype(self.float_type) + self.domain_left_edge
+                self.grid_right_edge[i] = dx*(ei.astype(self.float_type)+1) + self.domain_left_edge
                 self.grid_particle_count[i] = 0
                 self.grid_dimensions[i] = ei - si + 1
                 i += 1
@@ -152,18 +176,13 @@
     _fieldinfo_class = ChomboFieldContainer
     
     def __init__(self, filename, data_style='chombo_hdf5',
-                 storage_filename = None):
+                 storage_filename = None, ini_filename = None):
+        # hardcoded for now 
+        self.current_time = 0.0
+        self.ini_filename = ini_filename
         StaticOutput.__init__(self,filename,data_style)
         self.storage_filename = storage_filename
-
         self.field_info = self._fieldinfo_class()
-        # hardcoded for now
-        self.current_time = 0.0
-        # These should be explicitly obtained from the file, but for now that
-        # will wait until a reorganization of the source tree and better
-        # generalization.
-        self.dimensionality = 3
-        self.refine_by = 2
         
     def _set_units(self):
         """
@@ -177,11 +196,11 @@
         self.conversion_factors = defaultdict(lambda: 1.0)
         self.time_units['1'] = 1
         self.units['1'] = 1.0
-        self.units['unitary'] = 1.0 / (self["DomainRightEdge"] - self["DomainLeftEdge"]).max()
+        self.units['unitary'] = 1.0 / (self.domain_right_edge - self.domain_left_edge).max()
         seconds = 1 #self["Time"]
         self.time_units['years'] = seconds / (365*3600*24.0)
         self.time_units['days']  = seconds / (3600*24.0)
-        for key in yt2orionFieldsDict:
+        for key in yt2plutoFieldsDict:
             self.conversion_factors[key] = 1.0
 
     def _setup_nounits_units(self):
@@ -194,13 +213,81 @@
             self.units[unit] = mpc_conversion[unit] / mpc_conversion["cm"]
 
 
+    def _localize(self, f, default):
+        if f is None:
+            return os.path.join(self.directory, default)
+        return f
+
     def _parse_parameter_file(self):
+        """
+        Check to see whether a 'pluto.ini' or 'orion2.ini' file
+        exists in the plot file directory. If one does, attempt to parse it.
+        Otherwise, assume the left edge starts at 0 and get the right edge
+        from the hdf5 file.
+        """
+        if os.path.isfile('pluto.ini'):
+            self._parse_pluto_file('pluto.ini')
+        elif os.path.isfile('orion2.ini'):
+            self._parse_pluto_file('orion2.ini')
+        else:
+            self.unique_identifier = \
+                                   int(os.stat(self.parameter_filename)[ST_CTIME])
+            self.domain_left_edge = na.array([0.,0.,0.])
+            self.domain_right_edge = self.__calc_right_edge()
+            self.dimensionality = 3
+            self.refine_by = 2
+
+    def _parse_pluto_file(self, ini_filename):
+        """
+        Reads in an inputs file in the 'pluto.ini' format. Probably not
+        especially robust at the moment.
+        """
+        self.fullplotdir = os.path.abspath(self.parameter_filename)
+        self.ini_filename = self._localize( \
+            self.ini_filename, ini_filename)
         self.unique_identifier = \
-            int(os.stat(self.parameter_filename)[ST_CTIME])
-        self.domain_left_edge = na.array([0.,0.,0.])
-        self.domain_right_edge = self.__calc_right_edge()
-        
+                               int(os.stat(self.parameter_filename)[ST_CTIME])
+        lines = open(self.ini_filename).readlines()
+        # read the file line by line, storing important parameters
+        for lineI, line in enumerate(lines):
+            try: 
+                param, sep, vals = map(rstrip,line.partition(' '))
+            except ValueError:
+                mylog.error("ValueError: '%s'", line)
+            if pluto2enzoDict.has_key(param):
+                paramName = pluto2enzoDict[param]
+                t = map(parameterDict[paramName], vals.split())
+                if len(t) == 1:
+                    self.parameters[paramName] = t[0]
+                else:
+                    if paramName == "RefineBy":
+                        self.parameters[paramName] = t[0]
+                    else:
+                        self.parameters[paramName] = t
 
+            # assumes 3D for now
+            elif param.startswith("X1-grid"):
+                t = vals.split()
+                low1 = float(t[1])
+                high1 = float(t[4])
+                N1 = int(t[2])
+            elif param.startswith("X2-grid"):
+                t = vals.split()
+                low2 = float(t[1])
+                high2 = float(t[4])
+                N2 = int(t[2])
+            elif param.startswith("X3-grid"):
+                t = vals.split()
+                low3 = float(t[1])
+                high3 = float(t[4])
+                N3 = int(t[2])
+
+        self.dimensionality = 3
+        self.domain_left_edge = na.array([low1,low2,low3])
+        self.domain_right_edge = na.array([high1,high2,high3])
+        self.domain_dimensions = na.array([N1,N2,N3])
+        self.refine_by = self.parameters["RefineBy"]
+            
     def __calc_right_edge(self):
         fileh = h5py.File(self.parameter_filename,'r')
         dx0 = fileh['/level_0'].attrs['dx']


--- a/yt/frontends/chombo/definitions.py	Wed Mar 02 21:36:02 2011 -0500
+++ b/yt/frontends/chombo/definitions.py	Thu Mar 10 19:06:41 2011 -0800
@@ -23,3 +23,43 @@
   along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
 """
+
+parameterDict = {"CosmologyCurrentRedshift": float,
+                 "CosmologyComovingBoxSize": float,
+                 "CosmologyOmegaMatterNow": float,
+                 "CosmologyOmegaLambdaNow": float,
+                 "CosmologyHubbleConstantNow": float,
+                 "CosmologyInitialRedshift": float,
+                 "DualEnergyFormalismEta1": float,
+                 "DualEnergyFormalismEta2": float,
+                 "MetaDataString": str,
+                 "HydroMethod": int,
+                 "DualEnergyFormalism": int,
+                 "InitialTime": float,
+                 "ComovingCoordinates": int,
+                 "DensityUnits": float,
+                 "LengthUnits": float,
+                 "LengthUnit": float,
+                 "TemperatureUnits": float,
+                 "TimeUnits": float,
+                 "GravitationalConstant": float,
+                 "Gamma": float,
+                 "MultiSpecies": int,
+                 "CompilerPrecision": str,
+                 "CurrentTimeIdentifier": int,
+                 "RefineBy": int,
+                 "BoundaryConditionName": str,
+                 "TopGridRank": int,
+                 "TopGridDimensions": int,
+                 "EOSSoundSpeed": float,
+                 "EOSType": int,
+                 "NumberOfParticleAttributes": int,
+                                 }
+
+pluto2enzoDict = {"GAMMA": "Gamma",
+                  "Ref_ratio": "RefineBy"
+                                    }
+
+yt2plutoFieldsDict = {}
+pluto2ytFieldsDict = {}
+


--- a/yt/frontends/chombo/io.py	Wed Mar 02 21:36:02 2011 -0500
+++ b/yt/frontends/chombo/io.py	Thu Mar 10 19:06:41 2011 -0800
@@ -23,6 +23,9 @@
   You should have received a copy of the GNU General Public License
   along with this program.  If not, see <http://www.gnu.org/licenses/>.
 """
+import h5py
+import re
+
 from yt.utilities.io_handler import \
            BaseIOHandler
 


--- a/yt/frontends/enzo/data_structures.py	Wed Mar 02 21:36:02 2011 -0500
+++ b/yt/frontends/enzo/data_structures.py	Thu Mar 10 19:06:41 2011 -0800
@@ -226,7 +226,9 @@
         si, ei, LE, RE, fn, np = [], [], [], [], [], []
         all = [si, ei, LE, RE, fn]
         f.readline() # Blank at top
+        pbar = get_pbar("Parsing Hierarchy", self.num_grids)
         for grid_id in xrange(self.num_grids):
+            pbar.update(grid_id)
             # We will unroll this list
             si.append(_next_token_line("GridStartIndex", f))
             ei.append(_next_token_line("GridEndIndex", f))
@@ -246,6 +248,7 @@
                     continue
                 params = line.split()
                 line = f.readline()
+        pbar.finish()
         self._fill_arrays(ei, si, LE, RE, np)
         self.grids = na.array(self.grids, dtype='object')
         self.filenames = fn
@@ -559,6 +562,18 @@
     def save_data(self, *args, **kwargs):
         pass
 
+    _cached_field_list = None
+    _cached_derived_field_list = None
+
+    def _detect_fields(self):
+        if self.__class__._cached_field_list is None:
+            EnzoHierarchy._detect_fields(self)
+            self.__class__._cached_field_list = self.field_list
+            self.__class__._cached_derived_field_list = self.derived_field_list
+        else:
+            self.field_list = self.__class__._cached_field_list
+            self.derived_field_list = self.__class__._cached_derived_field_list
+
     def _generate_random_grids(self):
         my_rank = self._mpi_get_rank()
         my_grids = self.grids[self.grid_procs.ravel() == my_rank]


--- a/yt/mods.py	Wed Mar 02 21:36:02 2011 -0500
+++ b/yt/mods.py	Thu Mar 10 19:06:41 2011 -0800
@@ -86,7 +86,7 @@
     HaloFinder
 
 from yt.utilities.definitions import \
-    axis_names, x_dict, y_dict
+    axis_names, x_dict, y_dict, inv_axis_names
 
 # Now individual component imports from the visualization API
 from yt.visualization.api import \


--- a/yt/utilities/_amr_utils/ContourFinding.pyx	Wed Mar 02 21:36:02 2011 -0500
+++ b/yt/utilities/_amr_utils/ContourFinding.pyx	Thu Mar 10 19:06:41 2011 -0800
@@ -38,24 +38,27 @@
     if i0 < i1: return i0
     return i1
 
- at cython.boundscheck(False)
- at cython.wraparound(False)
+#@cython.boundscheck(False)
+#@cython.wraparound(False)
 def construct_boundary_relationships(
         np.ndarray[dtype=np.int64_t, ndim=3] contour_ids):
     # We only look at the boundary and one cell in
     cdef int i, j, nx, ny, nz, offset_i, offset_j, oi, oj
     cdef np.int64_t c1, c2
-    tree = []
     nx = contour_ids.shape[0]
     ny = contour_ids.shape[1]
     nz = contour_ids.shape[2]
+    # We allocate an array of fixed (maximum) size
+    cdef int s = (ny*nx + nx*nz + nx*nz - 4) * 9
+    cdef np.ndarray[np.int64_t, ndim=2] tree = np.zeros((s, 2), dtype="int64")
+    cdef int ti = 0
     # First x-pass
     for i in range(ny):
         for j in range(nz):
             for offset_i in range(3):
                 oi = offset_i - 1
                 if i == 0 and oi == -1: continue
-                if i == ny - 1 and oj == 1: continue
+                if i == ny - 1 and oi == 1: continue
                 for offset_j in range(3):
                     oj = offset_j - 1
                     if j == 0 and oj == -1: continue
@@ -63,18 +66,22 @@
                     c1 = contour_ids[0, i, j]
                     c2 = contour_ids[1, i + oi, j + oj]
                     if c1 > -1 and c2 > -1:
-                        tree.append((i64max(c1,c2), i64min(c1,c2)))
+                        tree[ti,0] = i64max(c1,c2)
+                        tree[ti,1] = i64min(c1,c2)
+                        ti += 1
                     c1 = contour_ids[nx-1, i, j]
                     c2 = contour_ids[nx-2, i + oi, j + oj]
                     if c1 > -1 and c2 > -1:
-                        tree.append((i64max(c1,c2), i64min(c1,c2)))
+                        tree[ti,0] = i64max(c1,c2)
+                        tree[ti,1] = i64min(c1,c2)
+                        ti += 1
     # Now y-pass
     for i in range(nx):
         for j in range(nz):
             for offset_i in range(3):
                 oi = offset_i - 1
                 if i == 0 and oi == -1: continue
-                if i == nx - 1 and oj == 1: continue
+                if i == nx - 1 and oi == 1: continue
                 for offset_j in range(3):
                     oj = offset_j - 1
                     if j == 0 and oj == -1: continue
@@ -82,17 +89,21 @@
                     c1 = contour_ids[i, 0, j]
                     c2 = contour_ids[i + oi, 1, j + oj]
                     if c1 > -1 and c2 > -1:
-                        tree.append((i64max(c1,c2), i64min(c1,c2)))
+                        tree[ti,0] = i64max(c1,c2)
+                        tree[ti,1] = i64min(c1,c2)
+                        ti += 1
                     c1 = contour_ids[i, ny-1, j]
                     c2 = contour_ids[i + oi, ny-2, j + oj]
                     if c1 > -1 and c2 > -1:
-                        tree.append((i64max(c1,c2), i64min(c1,c2)))
+                        tree[ti,0] = i64max(c1,c2)
+                        tree[ti,1] = i64min(c1,c2)
+                        ti += 1
     for i in range(nx):
         for j in range(ny):
             for offset_i in range(3):
                 oi = offset_i - 1
                 if i == 0 and oi == -1: continue
-                if i == nx - 1 and oj == 1: continue
+                if i == nx - 1 and oi == 1: continue
                 for offset_j in range(3):
                     oj = offset_j - 1
                     if j == 0 and oj == -1: continue
@@ -100,12 +111,16 @@
                     c1 = contour_ids[i, j, 0]
                     c2 = contour_ids[i + oi, j + oj, 1]
                     if c1 > -1 and c2 > -1:
-                        tree.append((i64max(c1,c2), i64min(c1,c2)))
+                        tree[ti,0] = i64max(c1,c2)
+                        tree[ti,1] = i64min(c1,c2)
+                        ti += 1
                     c1 = contour_ids[i, j, nz-1]
                     c2 = contour_ids[i + oi, j + oj, nz-2]
                     if c1 > -1 and c2 > -1:
-                        tree.append((i64max(c1,c2), i64min(c1,c2)))
-    return tree
+                        tree[ti,0] = i64max(c1,c2)
+                        tree[ti,1] = i64min(c1,c2)
+                        ti += 1
+    return tree[:ti,:]
 
 cdef inline int are_neighbors(
             np.float64_t x1, np.float64_t y1, np.float64_t z1,


--- a/yt/utilities/_amr_utils/VolumeIntegrator.pyx	Wed Mar 02 21:36:02 2011 -0500
+++ b/yt/utilities/_amr_utils/VolumeIntegrator.pyx	Thu Mar 10 19:06:41 2011 -0800
@@ -707,6 +707,75 @@
             gaussian = self.star_coeff * exp(-gexp/self.star_sigma_num)
             for i in range(3): rgba[i] += gaussian*dt*colors[i]
         kdtree_utils.kd_res_rewind(ballq)
+        
+    @cython.boundscheck(False)
+    @cython.wraparound(False)
+    @cython.cdivision(True)
+    def integrate_streamline(self, pos, np.float64_t h):
+        cdef np.float64_t k1[3], k2[3], k3[3], k4[3]
+        cdef np.float64_t newpos[3], oldpos[3]
+        for i in range(3):
+            newpos[i] = oldpos[i] = pos[i]
+        self.get_vector_field(newpos, k1)
+        for i in range(3):
+            newpos[i] = oldpos[i] + 0.5*k1[i]*h
+
+        if not (self.left_edge[0] < newpos[0] and newpos[0] < self.right_edge[0] and \
+                self.left_edge[1] < newpos[1] and newpos[1] < self.right_edge[1] and \
+                self.left_edge[2] < newpos[2] and newpos[2] < self.right_edge[2]):
+            for i in range(3):
+                pos[i] = newpos[i]
+            return
+        
+        self.get_vector_field(newpos, k2)
+        for i in range(3):
+            newpos[i] = oldpos[i] + 0.5*k2[i]*h
+
+        if not (self.left_edge[0] <= newpos[0] and newpos[0] <= self.right_edge[0] and \
+                self.left_edge[1] <= newpos[1] and newpos[1] <= self.right_edge[1] and \
+                self.left_edge[2] <= newpos[2] and newpos[2] <= self.right_edge[2]):
+            for i in range(3):
+                pos[i] = newpos[i]
+            return
+
+        self.get_vector_field(newpos, k3)
+        for i in range(3):
+            newpos[i] = oldpos[i] + k3[i]*h
+            
+        if not (self.left_edge[0] <= newpos[0] and newpos[0] <= self.right_edge[0] and \
+                self.left_edge[1] <= newpos[1] and newpos[1] <= self.right_edge[1] and \
+                self.left_edge[2] <= newpos[2] and newpos[2] <= self.right_edge[2]):
+            for i in range(3):
+                pos[i] = newpos[i]
+            return
+
+        self.get_vector_field(newpos, k4)
+
+        for i in range(3):
+            pos[i] = oldpos[i] + h*(k1[i]/6.0 + k2[i]/3.0 + k3[i]/3.0 + k4[i]/6.0)
+    
+    @cython.boundscheck(False)
+    @cython.wraparound(False)
+    @cython.cdivision(True)
+    cdef void get_vector_field(self, np.float64_t pos[3],
+                               np.float64_t *vel):
+        cdef np.float64_t dp[3]
+        cdef int ci[3] 
+        cdef np.float64_t vel_mag = 0.0
+        for i in range(3):
+            ci[i] = (int)((pos[i]-self.left_edge[i])/self.dds[i])
+            dp[i] = (pos[i] - self.left_edge[i])%(self.dds[i])
+
+        cdef int offset = ci[0] * (self.dims[1] + 1) * (self.dims[2] + 1) \
+                          + ci[1] * (self.dims[2] + 1) + ci[2]
+        
+        for i in range(3):
+            vel[i] = offset_interpolate(self.dims, dp, self.data[i] + offset)
+            vel_mag += vel[i]*vel[i]
+        vel_mag = np.sqrt(vel_mag)
+        if vel_mag != 0.0:
+            for i in range(3):
+                vel[i] /= vel_mag
 
 cdef class GridFace:
     cdef int direction


--- a/yt/utilities/amr_kdtree/amr_kdtree.py	Wed Mar 02 21:36:02 2011 -0500
+++ b/yt/utilities/amr_kdtree/amr_kdtree.py	Thu Mar 10 19:06:41 2011 -0800
@@ -126,7 +126,7 @@
 class AMRKDTree(HomogenizedVolume):
     def __init__(self, pf,  l_max=None, le=None, re=None,
                  fields=None, no_ghost=False,
-                 tree_type='domain',log_fields=None):
+                 tree_type='domain',log_fields=None, merge_trees=False):
         r"""
         AMR kd-Tree object, a homogenized volume.
 
@@ -198,7 +198,14 @@
             prone to longer data IO times.  If all the data can fit in
             memory on each cpu, this can be the fastest option for
             multiple ray casts on the same dataset.
-
+        merge_trees: bool, optional
+            If True, the distributed kD-tree can be merged
+            together in an allgather-like fashion.  This should not be
+            used for parallel rendering, as it will cause all
+            processors to render all bricks.  This is primarily useful
+            for applications that are not domain decomposed but still
+            want to build the kD-tree in parallel. Default:False
+        
 
         Returns
         -------
@@ -306,19 +313,30 @@
         t2 = time()
         mylog.debug('It took %e seconds to build AMRKDTree.tree' % (t2-t1))
         
-        # Build Tree Dictionary
         self._build_tree_dict()
 
+        # If the full amr kD-tree is requested, merge the results from
+        # the parallel build.
+        if merge_trees and nprocs > 1:
+            self.join_parallel_trees()            
+            self.my_l_corner = self.domain_left_edge
+            self.my_r_corner = self.domain_right_edge
+        
         # Initialize the kd leafs:
         self.initialize_leafs()
         
         # Add properties to leafs/nodes
         self.total_cost = self.count_cost()
+
         # Calculate the total volume spanned by the tree
         self.volume = self.count_volume()
         mylog.debug('Cost is %d' % self.total_cost)
         mylog.debug('Volume is %e' % self.volume) 
 
+        self.current_saved_grids = []
+        self.current_vcds = []
+
+
     def _build_tree_dict(self):
         self.tree_dict = {}
         for node in self.depth_traverse():
@@ -511,6 +529,32 @@
         self.brick_dimensions = na.array(self.brick_dimensions)
         del current_saved_grids, current_vcds
         self.bricks_loaded = True
+
+    def get_brick_data(self,current_node):
+        if current_node.brick is not None:
+            return 
+
+        if current_node.grid in self.current_saved_grids:
+            dds = self.current_vcds[self.current_saved_grids.index(current_node.grid)]
+        else:
+            dds = []
+            for i,field in enumerate(self.fields):
+                vcd = current_node.grid.get_vertex_centered_data(field,smoothed=True,no_ghost=self.no_ghost).astype('float64')
+                if self.log_fields[i]: vcd = na.log10(vcd)
+                dds.append(vcd)
+                self.current_saved_grids.append(current_node.grid)
+                self.current_vcds.append(dds)
+                
+        data = [d[current_node.li[0]:current_node.ri[0]+1,
+                  current_node.li[1]:current_node.ri[1]+1,
+                  current_node.li[2]:current_node.ri[2]+1].copy() for d in dds]
+
+        current_node.brick = PartitionedGrid(current_node.grid.id, len(self.fields), data,
+                                             current_node.l_corner.copy(), 
+                                             current_node.r_corner.copy(), 
+                                             current_node.dims.astype('int64'))
+
+        return
         
     def set_leaf_props(self,thisnode):
         r"""Given a leaf, gathers grid, indices, dimensions, and cost properties.
@@ -541,7 +585,49 @@
         for node in self.depth_traverse():
             if node.grid is not None:
                 self.set_leaf_props(node)
+
+    def join_parallel_trees(self):
+        self.trim_references()
+        self.merge_trees()
+        self.rebuild_references()
                 
+    def trim_references(self):
+        par_tree_depth = long(na.log2(nprocs))
+        for i in range(2**nprocs):
+            if ((i + 1)>>par_tree_depth) == 1:
+                # There are nprocs nodes that meet this criteria
+                if (i+1-nprocs) is not my_rank:
+                    self.tree_dict.pop(i)
+                    continue
+        for node in self.tree_dict.itervalues():
+            del node.parent, node.left_child, node.right_child
+            try:
+                del node.grids
+            except:
+                pass
+            if not na.isreal(node.grid):
+                node.grid = node.grid.id
+        if self.tree_dict[0].split_pos is None:
+            self.tree_dict.pop(0)
+    def merge_trees(self):
+        self.tree_dict = self._mpi_joindict(self.tree_dict)
+
+    def rebuild_references(self):
+        self.tree = self.tree_dict[0]
+        self.tree.parent = None
+        for node in self.depth_traverse():
+            try:
+                node.parent = self.tree_dict[_parent_id(node.id)]
+            except:
+                node.parent = None
+            try:
+                node.left_child = self.tree_dict[_lchild_id(node.id)]
+            except:
+                node.left_child = None
+            try:
+                node.right_child = self.tree_dict[_rchild_id(node.id)]
+            except:
+                node.right_child = None
 
     def count_cost(self):
         r"""Counts the cost of the entire tree, while filling in branch costs.
@@ -1075,3 +1161,20 @@
         f.create_dataset("/split_pos",data=kd_split_pos)
         f.create_dataset("/kd_owners",data=kd_owners)
         f.close()
+        
+    def corners_to_line(self,lc, rc):
+        x = na.array([ lc[0], lc[0], lc[0], lc[0], lc[0],
+                       rc[0], rc[0], rc[0], rc[0], rc[0],
+                       rc[0], lc[0], lc[0], rc[0],
+                       rc[0], lc[0], lc[0] ])
+        
+        y = na.array([ lc[1], lc[1], rc[1], rc[1], lc[1],
+                       lc[1], lc[1], rc[1], rc[1], lc[1],
+                       lc[1], lc[1], rc[1], rc[1],
+                       rc[1], rc[1], lc[1] ])
+        
+        z = na.array([ lc[2], rc[2], rc[2], lc[2], lc[2],
+                       lc[2], rc[2], rc[2], lc[2], lc[2],
+                       rc[2], rc[2], rc[2], rc[2],
+                       lc[2], lc[2], lc[2] ])
+        return x,y,z


--- a/yt/utilities/definitions.py	Wed Mar 02 21:36:02 2011 -0500
+++ b/yt/utilities/definitions.py	Thu Mar 10 19:06:41 2011 -0800
@@ -30,7 +30,8 @@
 
 axis_labels = [('y','z'),('x','z'),('x','y')]
 axis_names = {0: 'x', 1: 'y', 2: 'z', 4:''}
-inv_axis_names = {'x':0,'y':1,'z':2}
+inv_axis_names = {'x':0,'y':1,'z':2,
+                  'X':0,'Y':1,'Z':2}
 
 vm_axis_names = {0:'x', 1:'y', 2:'z', 3:'dx', 4:'dy'}
 


--- a/yt/utilities/hdf5_light_reader.c	Wed Mar 02 21:36:02 2011 -0500
+++ b/yt/utilities/hdf5_light_reader.c	Thu Mar 10 19:06:41 2011 -0800
@@ -38,6 +38,7 @@
 #define npy_float128 npy_longdouble
 #endif
 
+#define MIN(a,b) ((a) <= (b) ? (a) : (b))
 
 static PyObject *_hdf5ReadError;
 herr_t iterate_dataset(hid_t loc_id, const char *name, void *nodelist);
@@ -78,6 +79,8 @@
     /* These cannot contain any pointers */
     npy_float64 center[3];
     npy_float64 radius;
+    npy_float64 period[3];
+    int periodic;
 } sphere_validation;
 
 typedef struct cylinder_validation_ {
@@ -992,6 +995,8 @@
     /* These are borrowed references */
     PyArrayObject *center = (PyArrayObject *) PyTuple_GetItem(InputData, 0);
     PyObject *radius = (PyObject *) PyTuple_GetItem(InputData, 1);
+    PyObject *operiodic = PyTuple_GetItem(InputData, 2);
+    npy_float64 DW;
 
     /* This will get freed in the finalization of particle validation */
     sphere_validation *sv = (sphere_validation *)
@@ -1004,6 +1009,18 @@
 
     sv->radius = (npy_float64) PyFloat_AsDouble(radius);
 
+    sv->periodic = PyInt_AsLong(operiodic);
+    if(sv->periodic == 1) {
+      PyArrayObject *domain_left_edge = (PyArrayObject *) PyTuple_GetItem(InputData, 3);
+      PyArrayObject *domain_right_edge = (PyArrayObject *) PyTuple_GetItem(InputData, 4);
+      for (i = 0; i < 3; i++){
+        DW = (*(npy_float64*) PyArray_GETPTR1(domain_right_edge, i))
+           - (*(npy_float64*) PyArray_GETPTR1(domain_left_edge, i));
+        sv->period[i] = DW;
+        //fprintf(stderr, "Setting period equal to %lf\n", sv->period[i]);
+      }
+    }
+
     data->count_func = NULL;
     data->count_func_float = count_particles_sphere_FLOAT;
     data->count_func_double = count_particles_sphere_DOUBLE;
@@ -1523,6 +1540,7 @@
 
     double pradius;
 
+    if (vdata->periodic == 0) {
       for (ind = 0; ind < data->particles_to_check; ind++) {
         pradius = 0.0;
         tempr = (particle_position_x[ind] - vdata->center[0]); pradius += tempr*tempr;
@@ -1537,6 +1555,25 @@
           data->mask[ind] = 0;
         }
       }
+    } else {
+      for (ind = 0; ind < data->particles_to_check; ind++) {
+        pradius = 0.0;
+        tempr = fabs(particle_position_x[ind] - vdata->center[0]);
+        tempr = MIN(tempr, vdata->period[0] - tempr); pradius += tempr*tempr;
+        tempr = fabs(particle_position_y[ind] - vdata->center[1]);
+        tempr = MIN(tempr, vdata->period[1] - tempr); pradius += tempr*tempr;
+        tempr = fabs(particle_position_z[ind] - vdata->center[2]);
+        tempr = MIN(tempr, vdata->period[2] - tempr); pradius += tempr*tempr;
+        pradius = pow(pradius, 0.5);
+        if (pradius <= vdata->radius) {
+          if(data->update_count == 1) data->total_valid_particles++;
+          data->mask[ind] = 1;
+          n++;
+        } else {
+          data->mask[ind] = 0;
+        }
+      }
+    }
     return n;
 }
 
@@ -1560,6 +1597,7 @@
 
     double pradius;
 
+    if (vdata->periodic == 0) {
       for (ind = 0; ind < data->particles_to_check; ind++) {
         pradius = 0.0;
         tempr = (particle_position_x[ind] - vdata->center[0]); pradius += tempr*tempr;
@@ -1574,6 +1612,25 @@
           data->mask[ind] = 0;
         }
       }
+    } else {
+      for (ind = 0; ind < data->particles_to_check; ind++) {
+        pradius = 0.0;
+        tempr = fabs(particle_position_x[ind] - vdata->center[0]);
+        tempr = MIN(tempr, vdata->period[0] - tempr); pradius += tempr*tempr;
+        tempr = fabs(particle_position_y[ind] - vdata->center[1]);
+        tempr = MIN(tempr, vdata->period[1] - tempr); pradius += tempr*tempr;
+        tempr = fabs(particle_position_z[ind] - vdata->center[2]);
+        tempr = MIN(tempr, vdata->period[2] - tempr); pradius += tempr*tempr;
+        pradius = pow(pradius, 0.5);
+        if (pradius <= vdata->radius) {
+          if(data->update_count == 1) data->total_valid_particles++;
+          data->mask[ind] = 1;
+          n++;
+        } else {
+          data->mask[ind] = 0;
+        }
+      }
+    }
     return n;
 }
 
@@ -1597,6 +1654,7 @@
 
     long double pradius;
 
+    if (vdata->periodic == 0) {
       for (ind = 0; ind < data->particles_to_check; ind++) {
         pradius = 0.0;
         tempr = (particle_position_x[ind] - vdata->center[0]); pradius += tempr*tempr;
@@ -1611,6 +1669,25 @@
           data->mask[ind] = 0;
         }
       }
+    } else {
+      for (ind = 0; ind < data->particles_to_check; ind++) {
+        pradius = 0.0;
+        tempr = fabs(particle_position_x[ind] - vdata->center[0]);
+        tempr = MIN(tempr, vdata->period[0] - tempr); pradius += tempr*tempr;
+        tempr = fabs(particle_position_y[ind] - vdata->center[1]);
+        tempr = MIN(tempr, vdata->period[1] - tempr); pradius += tempr*tempr;
+        tempr = fabs(particle_position_z[ind] - vdata->center[2]);
+        tempr = MIN(tempr, vdata->period[2] - tempr); pradius += tempr*tempr;
+        pradius = pow(pradius, 0.5);
+        if (pradius <= vdata->radius) {
+          if(data->update_count == 1) data->total_valid_particles++;
+          data->mask[ind] = 1;
+          n++;
+        } else {
+          data->mask[ind] = 0;
+        }
+      }
+    }
     return n;
 }
 


--- a/yt/utilities/logger.py	Wed Mar 02 21:36:02 2011 -0500
+++ b/yt/utilities/logger.py	Thu Mar 10 19:06:41 2011 -0800
@@ -73,11 +73,11 @@
 original_emitter = logging.StreamHandler.emit
 def colorize_logging():
     f = logging.Formatter(cfstring)
-    rootLogger.handlers[0].setFormatter(f)
+    if len(rootLogger.handlers) > 0: rootLogger.handlers[0].setFormatter(f)
     logging.StreamHandler.emit = add_coloring_to_emit_ansi(logging.StreamHandler.emit)
 def uncolorize_logging():
     f = logging.Formatter(ufstring)
-    rootLogger.handlers[0].setFormatter(f)
+    if len(rootLogger.handlers) > 0: rootLogger.handlers[0].setFormatter(f)
     logging.StreamHandler.emit = original_emitter
 
 if ytcfg.getboolean("yt","coloredlogs"):


--- a/yt/utilities/parallel_tools/parallel_analysis_interface.py	Wed Mar 02 21:36:02 2011 -0500
+++ b/yt/utilities/parallel_tools/parallel_analysis_interface.py	Thu Mar 10 19:06:41 2011 -0800
@@ -69,7 +69,8 @@
         # we reset it again so that it includes the processor.
         f = logging.Formatter("P%03i %s" % (MPI.COMM_WORLD.rank,
                                             yt.utilities.logger.ufstring))
-        yt.utilities.logger.rootLogger.handlers[0].setFormatter(f)
+        if len(yt.utilities.logger.rootLogger.handlers) > 0:
+            yt.utilities.logger.rootLogger.handlers[0].setFormatter(f)
         if ytcfg.getboolean("yt", "parallel_traceback"):
             sys.excepthook = traceback_writer_hook("_%03i" % MPI.COMM_WORLD.rank)
     if ytcfg.getint("yt","LogLevel") < 20:
@@ -1036,6 +1037,19 @@
         return data
 
     @parallel_passthrough
+    def _mpi_cat_na_array(self,data):
+        self._barrier()
+        comm = MPI.COMM_WORLD
+        if comm.rank == 0:
+            for i in range(1,comm.size):
+                buf = comm.recv(source=i, tag=0)
+                data = na.concatenate([data,buf])
+        else:
+            comm.send(data, 0, tag = 0)
+        data = comm.bcast(data, root=0)
+        return data
+
+    @parallel_passthrough
     def _mpi_catarray(self, data):
         if data is None:
             ncols = -1
@@ -1104,7 +1118,13 @@
         self._barrier()
         # We use old-school pickling here on the assumption the arrays are
         # relatively small ( < 1e7 elements )
-        return MPI.COMM_WORLD.allreduce(data, op=MPI.SUM)
+        if isinstance(data, na.ndarray) and data.dtype != na.bool:
+            tr = na.zeros_like(data)
+            if not data.flags.c_contiguous: data = data.copy()
+            MPI.COMM_WORLD.Allreduce(data, tr, op=MPI.SUM)
+            return tr
+        else:
+            return MPI.COMM_WORLD.allreduce(data, op=MPI.SUM)
 
     @parallel_passthrough
     def _mpi_Allsum_double(self, data):


--- a/yt/utilities/performance_counters.py	Wed Mar 02 21:36:02 2011 -0500
+++ b/yt/utilities/performance_counters.py	Thu Mar 10 19:06:41 2011 -0800
@@ -100,3 +100,36 @@
 
 yt_counters = PerformanceCounters()
 time_function = yt_counters.call_func
+
+
+class ProfilingController(object):
+    def __init__(self):
+        self.profilers = {}
+
+    def profile_function(self, function_name):
+        def wrapper(func):
+            try:
+                import cProfile
+            except ImportError:
+                return func
+            my_prof = cProfile.Profile()
+            self.profilers[function_name] = my_prof
+            @wraps(func)
+            def run_in_profiler(*args, **kwargs):
+                my_prof.enable()
+                func(*args, **kwargs)
+                my_prof.disable()
+            return run_in_profiler
+        return wrapper
+
+    def write_out(self, filename_prefix):
+        if ytcfg.getboolean("yt","__parallel"):
+            pfn = "%s_%03i_%03i" % (filename_prefix,
+                     ytcfg.getint("yt", "__parallel_rank"),
+                    ytcfg.getint("yt", "__parallel_size"))
+        else:
+            pfn = "%s" % (filename_prefix)
+        for n, p in sorted(self.profilers.items()):
+            fn = "%s_%s.cprof" % (pfn, n)
+            mylog.info("Dumping %s into %s", n, fn)
+            p.dump_stats(fn)


--- a/yt/visualization/api.py	Wed Mar 02 21:36:02 2011 -0500
+++ b/yt/visualization/api.py	Thu Mar 10 19:06:41 2011 -0800
@@ -55,3 +55,6 @@
 
 from easy_plots import \
     plot_type_registry
+
+from streamlines import \
+     Streamlines


--- a/yt/visualization/plot_collection.py	Wed Mar 02 21:36:02 2011 -0500
+++ b/yt/visualization/plot_collection.py	Thu Mar 10 19:06:41 2011 -0800
@@ -31,7 +31,7 @@
 from yt.data_objects.profiles import \
     BinnedProfile1D, \
     BinnedProfile2D
-from yt.utilities.definitions import axis_names
+from yt.utilities.definitions import axis_names, inv_axis_names
 from .plot_types import \
     FixedResolutionPlot, \
     SlicePlot, \
@@ -55,6 +55,9 @@
         outfile.addPage(infile.getPage(0))
     outfile.write(open(output_fn, "wb"))
 
+def _fix_axis(axis):
+    return inv_axis_names.get(axis, axis)
+
 class PlotCollection(object):
     __id_counter = 0
     def __init__(self, pf, center=None):
@@ -93,7 +96,7 @@
         --------
 
         >>> pc = PlotCollection(pf, center=[0.5, 0.5, 0.5])
-        >>> pc.add_slice("Density", 0)
+        >>> pc.add_slice("Density", 'x')
         >>> pc.save()
 
         """
@@ -325,7 +328,7 @@
         field : string
             The initial field to slice and display.
         axis : int
-            The axis along which to slice.  Can be 0, 1, or 2 for x, y, z.
+            The axis along which to slice.  Can be 0, 1, or 2 or x, y, z.
         coord : float, optional
             The coordinate to place the slice at, along the slicing axis.
         center : array_like, optional
@@ -377,8 +380,9 @@
 
         >>> pf = load("RD0005-mine/RedshiftOutput0005")
         >>> pc = PlotCollection(pf, [0.5, 0.5, 0.5])
-        >>> p = pc.add_slice("Density", 0)
+        >>> p = pc.add_slice("Density", 'x')
         """
+        axis = _fix_axis(axis)
         if center == None:
             center = self.c
         if coord == None:
@@ -408,7 +412,7 @@
         ----------
         axis : int
             The axis along which to create the thick slab.  Can be 0, 1, or 2
-            for x, y, z.
+            or x, y, z.
         width : float
             The width of the thick slab, in code units, from which particles
             will be plotted.
@@ -451,6 +455,7 @@
         >>> pc = PlotCollection(pf, [0.5, 0.5, 0.5])
         >>> p = pc.add_particles(0, 1.0)
         """
+        axis = _fix_axis(axis)
         LE = self.pf.domain_left_edge.copy()
         RE = self.pf.domain_right_edge.copy()
         LE[axis] = self.c[axis] - width/2.0
@@ -669,7 +674,7 @@
         field : string
             The initial field to slice and display.
         axis : int
-            The axis along which to slice.  Can be 0, 1, or 2 for x, y, z.
+            The axis along which to slice.  Can be 0, 1, or 2 or x, y, z.
         data_source : `yt.data_objects.api.AMRData`
             This is a data source respecting the `AMRData` protocol (i.e., it
             has grids and so forth) that will be used as input to the
@@ -729,8 +734,9 @@
 
         >>> pf = load("RD0005-mine/RedshiftOutput0005")
         >>> pc = PlotCollection(pf, [0.5, 0.5, 0.5])
-        >>> p = pc.add_projection("Density", 0, "Density")
+        >>> p = pc.add_projection("Density", 'x', "Density")
         """
+        axis = _fix_axis(axis)
         if field_parameters is None: field_parameters = {}
         if center == None:
             center = self.c
@@ -763,7 +769,7 @@
         field : string
             The initial field to slice and display.
         axis : int
-            The axis along which to slice.  Can be 0, 1, or 2 for x, y, z.
+            The axis along which to slice.  Can be 0, 1, or 2 or x, y, z.
         thickness : float
             In 'code units' this is the thickness of the region to be
             projected through.
@@ -819,6 +825,7 @@
         >>> pc = PlotCollection(pf, [0.5, 0.5, 0.5])
         >>> p = pc.add_thin_projection("Density", 0, 0.1, "Density")
         """
+        axis = _fix_axis(axis)
         if field_parameters is None: field_parameters = {}
         if center == None:
             center = self.c
@@ -1370,7 +1377,7 @@
         Parameters
         ----------
         axis : int
-            The axis along which to cast the ray.  Can be 0, 1, or 2 for x, y,
+            The axis along which to cast the ray.  Can be 0, 1, or 2 or x, y,
             z.
         coords : tuple of floats
             The coordinates to place the ray at.  Note that the axes are in the
@@ -1409,7 +1416,7 @@
         >>> pc = PlotCollection(pf, [0.5, 0.5, 0.5])
         >>> p = pc.add_ortho_ray(0, (0.5, 0.5), "Density")
         """
-
+        axis = _fix_axis(axis)
         if field_parameters is None: field_parameters = {}
         if plot_options is None: plot_options = {}
         data_source = self.pf.h.ortho_ray(axis, coords, field,
@@ -1526,9 +1533,9 @@
         as a PDF.
 
         >>> pc = PlotCollection(pf, [0.5, 0.5, 0.5])
-        >>> pc.add_projection("Density", 0)
-        >>> pc.add_projection("Density", 1)
-        >>> pc.add_projection("Density", 2)
+        >>> pc.add_projection("Density", 'x')
+        >>> pc.add_projection("Density", 'y')
+        >>> pc.add_projection("Density", 'z')
         >>> pc.set_width(0.5, 'pc')
         >>> dd = pf.h.all_data()
         >>> pc.add_phase_object(dd, ["Density", "Temperature", "CellMassMsun"],


--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/yt/visualization/streamlines.py	Thu Mar 10 19:06:41 2011 -0800
@@ -0,0 +1,146 @@
+"""
+Import the components of the volume rendering extension
+
+Author: Samuel Skillman <samskillman at gmail.com>
+Affiliation: University of Colorado
+Homepage: http://yt.enzotools.org/
+License:
+  Copyright (C) 2010 Samuel Skillman.  All Rights Reserved.
+
+  This file is part of yt.
+
+  yt is free software; you can redistribute it and/or modify
+  it under the terms of the GNU General Public License as published by
+  the Free Software Foundation; either version 3 of the License, or
+  (at your option) any later version.
+
+  This program is distributed in the hope that it will be useful,
+  but WITHOUT ANY WARRANTY; without even the implied warranty of
+  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+  GNU General Public License for more details.
+
+  You should have received a copy of the GNU General Public License
+  along with this program.  If not, see <http://www.gnu.org/licenses/>.
+"""
+
+import numpy as na
+from yt.funcs import *
+from yt.utilities.parallel_tools.parallel_analysis_interface import \
+    ParallelAnalysisInterface, parallel_passthrough
+from yt.utilities.amr_kdtree.api import AMRKDTree
+
+class Streamlines(ParallelAnalysisInterface):
+    r"""A collection of streamlines that flow through the volume
+
+    The Streamlines object contains a collection of streamlines
+    defined as paths that are parallel to a specified vector field.  
+
+    Parameters
+    ----------
+    pf : `~yt.lagos.StaticOutput`
+        This is the parameter file to streamline
+    pos : array_like
+        An array of initial starting positions of the streamlines.
+    xfield: field
+        The x component of the vector field to be streamlined.
+    yfield: field
+        The y component of the vector field to be streamlined.
+    zfield: field
+        The z component of the vector field to be streamlined.
+    volume : `yt.extensions.volume_rendering.HomogenizedVolume`, optional
+        The volume to be streamlined.  Can be specified for
+        finer-grained control, but otherwise will be automatically
+        generated.  At this point it must use the AMRKDTree. 
+        Default: None
+    dx : float, optional
+        Optionally specify the step size during the integration.
+        Default: minimum dx
+    length : float, optional
+        Optionally specify the length of integration.  
+        Default: na.max(self.pf.domain_right_edge-self.pf.domain_left_edge)
+    
+    Examples
+    --------
+    >>> from yt.mods import *
+    >>> from yt.visualization.api import Streamlines
+    >>> pf = load('DD1701') # Load pf
+
+    >>> c = na.array([0.5]*3)
+    >>> N = 100
+    >>> scale = 1.0
+    >>> pos_dx = na.random.random((N,3))*scale-scale/2.
+    >>> pos = c+pos_dx
+    
+    >>> streamlines = Streamlines(pf,pos,'x-velocity', 'y-velocity', 'z-velocity', length=1.0) 
+    >>> streamlines.integrate_through_volume()
+    
+    >>> import matplotlib.pylab as pl
+    >>> from mpl_toolkits.mplot3d import Axes3D
+    >>> fig=pl.figure() 
+    >>> ax = Axes3D(fig)
+    >>> for stream in streamlines.streamlines:
+    >>>     stream = stream[na.all(stream != 0.0, axis=1)]
+    >>>     ax.plot3D(stream[:,0], stream[:,1], stream[:,2], alpha=0.1)
+    >>> pl.savefig('streamlines.png')
+    """
+    def __init__(self, pf, positions, xfield, yfield, zfield, volume=None,
+                 dx=None, length=None):
+        self.pf = pf
+        self.start_positions = positions
+        self.N = self.start_positions.shape[0]
+        self.xfield = xfield
+        self.yfield = yfield
+        self.zfield = zfield
+        if volume is None:
+            volume = AMRKDTree(self.pf, fields=[self.xfield,self.yfield,self.zfield],
+                            log_fields=[False,False,False], merge_trees=True)
+        self.volume = volume
+        if dx is None:
+            dx = self.pf.h.get_smallest_dx()
+        self.dx = dx
+        if length is None:
+            length = na.max(self.pf.domain_right_edge-self.pf.domain_left_edge)
+        self.length = length
+        self.steps = int(length/dx)
+        self.streamlines = na.zeros((self.N,self.steps,3), dtype='float64')
+
+    def integrate_through_volume(self):
+        nprocs = self._mpi_get_size()
+        my_rank = self._mpi_get_rank()
+        self.streamlines[my_rank::nprocs,0,:] = self.start_positions[my_rank::nprocs]
+
+        pbar = get_pbar("Streamlining", self.N)
+        for i,stream in enumerate(self.streamlines[my_rank::nprocs]):
+            step = self.steps
+            while (step > 1):
+                this_brick = self.volume.locate_brick(stream[-step,:])
+                step = self._integrate_through_brick(this_brick, stream, step)
+            pbar.update(i)
+        pbar.finish()
+        
+        self._finalize_parallel(None)
+
+    @parallel_passthrough
+    def _finalize_parallel(self,data):
+        self.streamlines = self._mpi_allsum(self.streamlines)
+        
+    def _integrate_through_brick(self, node, stream, step, periodic=False):
+        while (step > 1):
+            self.volume.get_brick_data(node)
+            brick = node.brick
+            stream[-step+1] = stream[-step]
+            brick.integrate_streamline(stream[-step+1], self.dx)
+            if na.any(stream[-step+1,:] <= self.pf.domain_left_edge) | \
+                   na.any(stream[-step+1,:] >= self.pf.domain_right_edge):
+                return 0
+
+            if na.any(stream[-step+1,:] < node.l_corner) | \
+                   na.any(stream[-step+1,:] >= node.r_corner):
+                return step-1
+            step -= 1
+        return step
+
+    
+
+
+        


--- a/yt/visualization/volume_rendering/camera.py	Wed Mar 02 21:36:02 2011 -0500
+++ b/yt/visualization/volume_rendering/camera.py	Thu Mar 10 19:06:41 2011 -0800
@@ -45,7 +45,7 @@
                  volume = None, fields = None,
                  log_fields = None,
                  sub_samples = 5, pf = None,
-                 use_kd=True, l_max=None, no_ghost=False,
+                 use_kd=True, l_max=None, no_ghost=True,
                  tree_type='domain',expand_factor=1.0,
                  le=None, re=None):
         r"""A viewpoint into a volume, for volume rendering.
@@ -203,6 +203,8 @@
         self.use_kd = use_kd
         self.l_max = l_max
         self.no_ghost = no_ghost
+        if self.no_ghost:
+            mylog.info('Warning: no_ghost is currently True (default). This may lead to artifacts at grid boundaries.')
         self.tree_type = tree_type
         if volume is None:
             if self.use_kd:


http://bitbucket.org/yt_analysis/yt/changeset/9bfec526bb74/
changeset:   r3841:9bfec526bb74
branch:      yt
user:        MatthewTurk
date:        2011-03-16 01:19:42
summary:     Adding initial support for a pasteboard
affected #:  3 files (4.2 KB)

--- a/yt/config.py	Thu Mar 10 19:06:41 2011 -0800
+++ b/yt/config.py	Tue Mar 15 17:19:42 2011 -0700
@@ -45,6 +45,7 @@
     loadfieldplugins = 'True',
     pluginfilename = 'my_plugins.py',
     parallel_traceback = 'False',
+    pasteboard_repo = '',
     )
 # Here is the upgrade.  We're actually going to parse the file in its entirety
 # here.  Then, if it has any of the Forbidden Sections, it will be rewritten


--- a/yt/utilities/command_line.py	Thu Mar 10 19:06:41 2011 -0800
+++ b/yt/utilities/command_line.py	Tue Mar 15 17:19:42 2011 -0700
@@ -591,6 +591,15 @@
                             weight="CellMassMsun")
         ph.modify["line"](pr.data["Density"], pr.data["Temperature"])
         pc.save()
+
+    @add_cmd_options([])
+    def do_pasteboard(self, subcmd, opts, arg):
+        """
+        Place a file into the user's pasteboard
+        """
+        from yt.utilities.pasteboard import PostInventory
+        pp = PostInventory()
+        pp.add_post(arg)
     
 def run_main():
     for co in ["--parallel", "--paste"]:


--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/yt/utilities/pasteboard.py	Tue Mar 15 17:19:42 2011 -0700
@@ -0,0 +1,102 @@
+"""
+Author: Matthew Turk <matthewturk at gmail.com>
+Affiliation: NSF / Columbia
+Homepage: http://yt.enzotools.org/
+License:
+  Copyright (C) 2011 Matthew Turk.  All Rights Reserved.
+
+  This file is part of yt.
+
+  yt is free software; you can redistribute it and/or modify
+  it under the terms of the GNU General Public License as published by
+  the Free Software Foundation; either version 3 of the License, or
+  (at your option) any later version.
+
+  This program is distributed in the hope that it will be useful,
+  but WITHOUT ANY WARRANTY; without even the implied warranty of
+  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+  GNU General Public License for more details.
+
+  You should have received a copy of the GNU General Public License
+  along with this program.  If not, see <http://www.gnu.org/licenses/>.
+"""
+
+from mercurial import ui, repo, commands, hg
+import json
+import os
+import time
+import uuid
+
+from yt.config import ytcfg
+
+class PostInventory(object):
+    def __init__(self, uu = None, repo_fn = None):
+        if uu is None: uu = ui.ui()
+        if repo_fn is None: repo_fn = ytcfg.get("yt","pasteboard_repo")
+        if repo_fn == '':
+            raise KeyError("~/.yt/config:[yt]pasteboard_repo")
+        self.repo_fn = repo_fn
+        self.bbrepo = hg.repository(uu, repo_fn)
+        config_fn = os.path.join(repo_fn, ".hg", "hgrc")
+        uu.readconfig(config_fn)
+        commands.pull(uu, self.bbrepo)
+        commands.update(uu, self.bbrepo, clean=True)
+        self.uu = uu
+
+    def regenerate_posts(self):
+        self.posts = []
+        for file in self.bbrepo["tip"]:
+            if file.startswith("posts/") and file.count("/") == 1:
+                filectx = self.bbrepo["tip"][file]
+                last_mod = filectx.filectx(filectx.filerev()).date()
+                self.posts.append((last_mod[0] + last_mod[1], file))
+        self.posts.sort()
+        self.posts = self.posts[::-1]
+
+    def add_post(self, filename, uu = None,
+                 highlight = True, push = True):
+        # We assume the post filename exists in the current space
+        self.regenerate_posts()
+        if uu is None: uu = self.uu
+        prefix = uuid.uuid4()
+        name = "%s-%s" % (prefix, os.path.basename(filename))
+        name_noext = name.replace(".","-")
+        hfn = "html/%s.html" % (name_noext)
+        pfn = "posts/%s" % (name)
+        abs_pfn = os.path.join(self.repo_fn, pfn)
+        abs_hfn = os.path.join(self.repo_fn, hfn)
+        self.posts.insert(0, (int(time.time()), "posts/%s" % name))
+        if not os.path.exists(abs_pfn):
+            open(abs_pfn,"w").write(open(filename).read())
+        inv_fname = self.update_inventory()
+        if highlight and not name.endswith(".html"):
+            from pygments.cmdline import main as pygmain
+            rv = pygmain(["pygmentize", "-o", abs_hfn,
+                          "-O", "full", abs_pfn])
+        if not highlight or rv:
+            content = open(abs_pfn).read()
+            open(abs_hfn, "w").write(
+                "<HTML><BODY><PRE>" + content + "</PRE></BODY></HTML>")
+        commands.add(uu, self.bbrepo, abs_pfn, abs_hfn)
+        commands.commit(uu, self.bbrepo, abs_hfn, abs_pfn,
+                        inv_fname, message="Adding %s" % name)
+        if push: commands.push(uu, self.bbrepo)
+
+    def update_inventory(self):
+        tip = self.bbrepo["tip"]
+        vals = []
+        for t, pfn in self.posts:
+            if pfn not in tip:
+                d = open(os.path.join(self.repo_fn, pfn)).read()[:80]
+            else:
+                d = tip[pfn].data()[:80]
+            vals.append(dict(modified = time.ctime(t),
+                             modtime = t,
+                             name = pfn[42:], # 6 for posts/ then 36 for UUID
+                             descr = d)) 
+        fn = os.path.join(self.repo_fn, "inventory.json")
+        f = open(fn, "w")
+        f.write("var inventory_data = ")
+        json.dump(vals, f)
+        f.write(";")
+        return fn


http://bitbucket.org/yt_analysis/yt/changeset/be04ebbc8af6/
changeset:   r3842:be04ebbc8af6
branch:      yt
user:        MatthewTurk
date:        2011-03-16 01:40:15
summary:     Fixing this up to work a bit better
affected #:  1 file (201 bytes)

--- a/yt/utilities/pasteboard.py	Tue Mar 15 17:19:42 2011 -0700
+++ b/yt/utilities/pasteboard.py	Tue Mar 15 17:40:15 2011 -0700
@@ -87,12 +87,16 @@
         vals = []
         for t, pfn in self.posts:
             if pfn not in tip:
-                d = open(os.path.join(self.repo_fn, pfn)).read()[:80]
+                d = open(os.path.join(self.repo_fn, pfn)).read()
             else:
-                d = tip[pfn].data()[:80]
+                d = tip[pfn].data()
+            if len(d) > 80: d = d[:77] + "..."
+            name_noext = pfn[6:].replace(".","-")
             vals.append(dict(modified = time.ctime(t),
                              modtime = t,
-                             name = pfn[42:], # 6 for posts/ then 36 for UUID
+                             fullname = pfn,
+                             htmlname = "html/%s.html" % name_noext,
+                             name = pfn[43:], # 6 for posts/ then 36 for UUID
                              descr = d)) 
         fn = os.path.join(self.repo_fn, "inventory.json")
         f = open(fn, "w")


http://bitbucket.org/yt_analysis/yt/changeset/84a352ed5755/
changeset:   r3843:84a352ed5755
branch:      yt
user:        MatthewTurk
date:        2011-03-16 01:57:57
summary:     Adding some desc support (currently mandatory) and also adding some failover
support for the existence of posts and html.
affected #:  2 files (944 bytes)

--- a/yt/utilities/command_line.py	Tue Mar 15 17:40:15 2011 -0700
+++ b/yt/utilities/command_line.py	Tue Mar 15 17:57:57 2011 -0700
@@ -592,14 +592,17 @@
         ph.modify["line"](pr.data["Density"], pr.data["Temperature"])
         pc.save()
 
-    @add_cmd_options([])
+    @cmdln.option("-d", "--desc", action="store",
+                  default = False, dest="desc",
+                  help="Description for this pasteboard entry")
     def do_pasteboard(self, subcmd, opts, arg):
         """
         Place a file into the user's pasteboard
         """
+        if opts.desc is None: raise RuntimeError
         from yt.utilities.pasteboard import PostInventory
         pp = PostInventory()
-        pp.add_post(arg)
+        pp.add_post(arg, desc=opts.desc)
     
 def run_main():
     for co in ["--parallel", "--paste"]:


--- a/yt/utilities/pasteboard.py	Tue Mar 15 17:40:15 2011 -0700
+++ b/yt/utilities/pasteboard.py	Tue Mar 15 17:57:57 2011 -0700
@@ -41,20 +41,25 @@
         uu.readconfig(config_fn)
         commands.pull(uu, self.bbrepo)
         commands.update(uu, self.bbrepo, clean=True)
+        if not os.path.exists(os.path.join(repo_fn, "posts")):
+            os.makedirs(os.path.join(repo_fn, "posts"))
+        if not os.path.exists(os.path.join(repo_fn, "html")):
+            os.makedirs(os.path.join(repo_fn, "html"))
         self.uu = uu
 
     def regenerate_posts(self):
         self.posts = []
         for file in self.bbrepo["tip"]:
-            if file.startswith("posts/") and file.count("/") == 1:
+            if file.startswith("posts/") and file.count("/") == 1 \
+               and not file.endswith(".desc"):
                 filectx = self.bbrepo["tip"][file]
                 last_mod = filectx.filectx(filectx.filerev()).date()
                 self.posts.append((last_mod[0] + last_mod[1], file))
         self.posts.sort()
         self.posts = self.posts[::-1]
 
-    def add_post(self, filename, uu = None,
-                 highlight = True, push = True):
+    def add_post(self, filename, desc = None,
+                 uu = None, highlight = True, push = True):
         # We assume the post filename exists in the current space
         self.regenerate_posts()
         if uu is None: uu = self.uu
@@ -65,6 +70,8 @@
         pfn = "posts/%s" % (name)
         abs_pfn = os.path.join(self.repo_fn, pfn)
         abs_hfn = os.path.join(self.repo_fn, hfn)
+        if desc is not None:
+            open(abs_pfn + ".desc", "w").write(desc)
         self.posts.insert(0, (int(time.time()), "posts/%s" % name))
         if not os.path.exists(abs_pfn):
             open(abs_pfn,"w").write(open(filename).read())
@@ -77,17 +84,26 @@
             content = open(abs_pfn).read()
             open(abs_hfn, "w").write(
                 "<HTML><BODY><PRE>" + content + "</PRE></BODY></HTML>")
-        commands.add(uu, self.bbrepo, abs_pfn, abs_hfn)
-        commands.commit(uu, self.bbrepo, abs_hfn, abs_pfn,
-                        inv_fname, message="Adding %s" % name)
+        to_manage = [abs_pfn, abs_hfn]
+        if desc is not None: to_manage.append(abs_pfn + ".desc")
+        commands.add(uu, self.bbrepo, *to_manage)
+        commands.commit(uu, self.bbrepo, *(to_manage + [inv_fname]),
+                        message="Adding %s" % name)
         if push: commands.push(uu, self.bbrepo)
 
     def update_inventory(self):
         tip = self.bbrepo["tip"]
         vals = []
         for t, pfn in self.posts:
-            if pfn not in tip:
-                d = open(os.path.join(self.repo_fn, pfn)).read()
+            dfn = pfn + ".desc"
+            if dfn in tip:
+                d = tip[dfn].data()
+            elif pfn not in tip:
+                abs_pfn = os.path.join(self.repo_fn, pfn)
+                if os.path.exists(abs_pfn + ".desc"):
+                    d = open(abs_pfn + ".desc").read()
+                else:
+                    d = open(abs_pfn).read()
             else:
                 d = tip[pfn].data()
             if len(d) > 80: d = d[:77] + "..."


http://bitbucket.org/yt_analysis/yt/changeset/e5c8f72fd1e8/
changeset:   r3844:e5c8f72fd1e8
branch:      yt
user:        MatthewTurk
date:        2011-03-16 01:59:57
summary:     Merging
affected #:  0 files (0 bytes)

--- a/yt/analysis_modules/halo_profiler/multi_halo_profiler.py	Tue Mar 15 17:57:57 2011 -0700
+++ b/yt/analysis_modules/halo_profiler/multi_halo_profiler.py	Tue Mar 15 17:59:57 2011 -0700
@@ -46,8 +46,7 @@
     parallel_root_only
 from yt.visualization.fixed_resolution import \
     FixedResolutionBuffer
-from yt.visualization.plot_collection import \
-    PlotCollection
+from yt.visualization.image_writer import write_image
 
 PROFILE_RADIUS_THRESHOLD = 2
 
@@ -259,10 +258,11 @@
 
         self.profile_fields.append({'field':field, 'weight_field':weight_field, 'accumulation':accumulation})
 
-    def add_projection(self, field, weight_field=None):
+    def add_projection(self, field, weight_field=None, cmap='algae'):
         "Add a field for projection."
 
-        self.projection_fields.append({'field':field, 'weight_field':weight_field})
+        self.projection_fields.append({'field':field, 'weight_field':weight_field, 
+                                       'cmap': cmap})
 
     @parallel_blocking_call
     def make_profiles(self, filename=None, prefilters=None, **kwargs):
@@ -466,14 +466,13 @@
             return
 
         # Set resolution for fixed resolution output.
-        if save_cube:
-            if self.project_at_level == 'max':
-                proj_level = self.pf.h.max_level
-            else:
-                proj_level = int(self.project_at_level)
-            proj_dx = self.pf.units[self.projection_width_units] / self.pf.parameters['TopGridDimensions'][0] / \
-                (self.pf.parameters['RefineBy']**proj_level)
-            projectionResolution = int(self.projection_width / proj_dx)
+        if self.project_at_level == 'max':
+            proj_level = self.pf.h.max_level
+        else:
+            proj_level = int(self.project_at_level)
+        proj_dx = self.pf.units[self.projection_width_units] / self.pf.parameters['TopGridDimensions'][0] / \
+            (self.pf.parameters['RefineBy']**proj_level)
+        projectionResolution = int(self.projection_width / proj_dx)
 
         # Create output directory.
         if self.output_dir is not None:
@@ -515,8 +514,7 @@
             # Make projections.
             if not isinstance(axes, types.ListType): axes = list([axes])
             for w in axes:
-                # Create a plot collection.
-                pc = PlotCollection(self.pf, center=center)
+                projections = []
                 # YT projections do not follow the right-hand rule.
                 coords = range(3)
                 del coords[w]
@@ -524,12 +522,15 @@
                 y_axis = coords[1]
 
                 for hp in self.projection_fields:
-                    pc.add_projection(hp['field'], w, weight_field=hp['weight_field'], data_source=region)
+                    projections.append(self.pf.h.proj(w, hp['field'], 
+                                                      weight_field=hp['weight_field'], 
+                                                      data_source=region, center=halo['center'],
+                                                      serialize=False))
                 
                 # Set x and y limits, shift image if it overlaps domain boundary.
                 if need_per:
                     pw = self.projection_width/self.pf.units[self.projection_width_units]
-                    shift_projections(self.pf, pc, halo['center'], center, w)
+                    #shift_projections(self.pf, projections, halo['center'], center, w)
                     # Projection has now been shifted to center of box.
                     proj_left = [center[x_axis]-0.5*pw, center[y_axis]-0.5*pw]
                     proj_right = [center[x_axis]+0.5*pw, center[y_axis]+0.5*pw]
@@ -537,30 +538,33 @@
                     proj_left = [leftEdge[x_axis], leftEdge[y_axis]]
                     proj_right = [rightEdge[x_axis], rightEdge[y_axis]]
 
-                pc.set_xlim(proj_left[0], proj_right[0])
-                pc.set_ylim(proj_left[1], proj_right[1])
+                # Save projection data to hdf5 file.
+                if save_cube or save_images:
+                    axis_labels = ['x', 'y', 'z']
 
-                # Save projection data to hdf5 file.
-                if save_cube:
-                    axis_labels = ['x', 'y', 'z']
-                    dataFilename = "%s/Halo_%04d_%s_data.h5" % \
+                    if save_cube:
+                        dataFilename = "%s/Halo_%04d_%s_data.h5" % \
                             (my_output_dir, halo['id'], axis_labels[w])
-                    mylog.info("Saving projection data to %s." % dataFilename)
+                        mylog.info("Saving projection data to %s." % dataFilename)
+                        output = h5py.File(dataFilename, "a")
 
-                    output = h5py.File(dataFilename, "a")
                     # Create fixed resolution buffer for each projection and write them out.
                     for e, hp in enumerate(self.projection_fields):
-                        frb = FixedResolutionBuffer(pc.plots[e].data, (proj_left[0], proj_right[0], 
-                                                                       proj_left[1], proj_right[1]),
-                                                          (projectionResolution, projectionResolution),
-                                                          antialias=False)
+                        frb = FixedResolutionBuffer(projections[e], (proj_left[0], proj_right[0], 
+                                                                     proj_left[1], proj_right[1]),
+                                                    (projectionResolution, projectionResolution),
+                                                    antialias=False)
                         dataset_name = "%s_%s" % (hp['field'], hp['weight_field'])
-                        if dataset_name in output.listnames(): del output[dataset_name]
-                        output.create_dataset(dataset_name, data=frb[hp['field']])
-                    output.close()
+                        if save_cube:
+                            if dataset_name in output.listnames(): del output[dataset_name]
+                            output.create_dataset(dataset_name, data=frb[hp['field']])
 
-                if save_images:
-                    pc.save("%s/Halo_%04d" % (my_output_dir, halo['id']), force_save=True)
+                        if save_images:
+                            filename = "%s/Halo_%04d_%s_%s.png" % (my_output_dir, halo['id'], 
+                                                                   dataset_name, axis_labels[w])
+                            write_image(frb[hp['field']], filename, cmap_name=hp['cmap'])
+
+                    if save_cube: output.close()
 
             del region
 
@@ -789,7 +793,7 @@
         else:
             os.mkdir(my_output_dir)
 
-def shift_projections(pf, pc, oldCenter, newCenter, axis):
+def shift_projections(pf, projections, oldCenter, newCenter, axis):
     """
     Shift projection data around.
     This is necessary when projecting a preiodic region.
@@ -801,10 +805,10 @@
     del offset[axis]
     del width[axis]
 
-    for plot in pc.plots:
+    for plot in projections:
         # Get name of data field.
         other_fields = {'px':True, 'py':True, 'pdx':True, 'pdy':True, 'weight_field':True}
-        for pfield in plot.data.data.keys():
+        for pfield in plot.data.keys():
             if not(other_fields.has_key(pfield)):
                 field = pfield
                 break


--- a/yt/analysis_modules/light_cone/light_cone.py	Tue Mar 15 17:57:57 2011 -0700
+++ b/yt/analysis_modules/light_cone/light_cone.py	Tue Mar 15 17:59:57 2011 -0700
@@ -34,20 +34,19 @@
 from yt.config import ytcfg
 from yt.convenience import load
 from yt.utilities.cosmology import Cosmology
-from yt.visualization.plot_collection import \
-    PlotCollection
+from yt.visualization.image_writer import write_image
 
 from .common_n_volume import commonNVolume
 from .halo_mask import light_cone_halo_map, \
     light_cone_halo_mask
-from .light_cone_projection import LightConeProjection
+from .light_cone_projection import _light_cone_projection
 
 class LightCone(EnzoSimulation):
     def __init__(self, EnzoParameterFile, initial_redshift=1.0, 
                  final_redshift=0.0, observer_redshift=0.0,
                  field_of_view_in_arcminutes=600.0, image_resolution_in_arcseconds=60.0, 
                  use_minimum_datasets=True, deltaz_min=0.0, minimum_coherent_box_fraction=0.0,
-                 output_dir='LC', output_prefix='LightCone', **kwargs):
+                 set_parameters=None, output_dir='LC', output_prefix='LightCone', **kwargs):
         """
         Initialize a LightCone object.
         :param initial_redshift (float): the initial (highest) redshift for the light cone.  Default: 1.0.
@@ -67,6 +66,7 @@
                the projection axis and center.  This was invented to allow light cones with thin slices to 
                sample coherent large scale structure, but in practice does not work so well.  Try setting 
                this parameter to 1 and see what happens.  Default: 0.0.
+        :param set_parameters (dict): dictionary of parameters to attach to pf.parameters.  Default: None.
         :param output_dir (str): the directory in which images and data files will be written.  Default: 'LC'.
         :param output_prefix (str): the prefix of all images and data files.  Default: 'LightCone'.
         """
@@ -79,6 +79,7 @@
         self.use_minimum_datasets = use_minimum_datasets
         self.deltaz_min = deltaz_min
         self.minimum_coherent_box_fraction = minimum_coherent_box_fraction
+        self.set_parameters = set_parameters
         self.output_dir = output_dir
         self.output_prefix = output_prefix
 
@@ -233,8 +234,8 @@
             del halo_mask_cube
 
     def project_light_cone(self, field, weight_field=None, apply_halo_mask=False, node=None,
-                           save_stack=True, save_slice_images=False, flatten_stack=False, photon_field=False,
-                           add_redshift_label=False, **kwargs):
+                           save_stack=True, save_slice_images=False, cmap_name='algae', 
+                           flatten_stack=False, photon_field=False):
         """
         Create projections for light cone, then add them together.
         :param weight_field (str): the weight field of the projection.  This has the same meaning as in standard 
@@ -246,6 +247,7 @@
         :param save_stack (bool): if True, the unflatted light cone data including each individual slice is written to 
                an hdf5 file.  Default: True.
         :param save_slice_images (bool): save images for each individual projection slice.  Default: False.
+        :param cmap_name (str): color map for images.  Default: 'algae'.
         :param flatten_stack (bool): if True, the light cone stack is continually flattened each time a slice is added 
                in order to save memory.  This is generally not necessary.  Default: False.
         :param photon_field (bool): if True, the projection data for each slice is decremented by 4 Pi R^2`, where R 
@@ -269,10 +271,13 @@
                 name = "%s%s_%s_%04d_%04d" % (self.output_dir, self.output_prefix,
                                               node, q, len(self.light_cone_solution))
             output['object'] = load(output['filename'])
-            frb = LightConeProjection(output, field, self.pixels, weight_field=weight_field,
-                                      save_image=save_slice_images,
-                                      name=name, node=node, **kwargs)
+            output['object'].parameters.update(self.set_parameters)
+            frb = _light_cone_projection(output, field, self.pixels, 
+                                         weight_field=weight_field, node=node)
             if ytcfg.getint("yt", "__parallel_rank") == 0:
+                if save_slice_images:
+                    write_image(frb[field], "%s_%s.png" % (name, field), cmap_name=cmap_name)
+
                 if photon_field:
                     # Decrement the flux by the luminosity distance. Assume field in frb is in erg/s/cm^2/Hz
                     co = Cosmology(HubbleConstantNow = (100.0 * self.enzoParameters['CosmologyHubbleConstantNow']),
@@ -286,7 +291,6 @@
                     mylog.info("Distance to slice = %e" % dL)
                     frb[field] *= factor #in erg/s/cm^2/Hz on observer's image plane.
 
-            if ytcfg.getint("yt", "__parallel_rank") == 0:
                 if weight_field is not None:
                     # Data come back normalized by the weight field.
                     # Undo that so it can be added up for the light cone.
@@ -305,9 +309,6 @@
                     if weight_field is not None:
                         self.projection_weight_field_stack = [sum(self.projection_weight_field_stack)]
 
-            # Delete the plot collection now that the frb is deleted.
-            del output['pc']
-
             # Unless this is the last slice, delete the dataset object.
             # The last one will be saved to make the plot collection.
             if (q < len(self.light_cone_solution) - 1):
@@ -329,6 +330,10 @@
             # but replace the data with the full light cone projection data.
             frb.data[field] = lightConeProjection
 
+            # Write image.
+            if save_slice_images:
+                write_image(frb[field], "%s_%s.png" % (filename, field), cmap_name=cmap_name)
+
             # Write stack to hdf5 file.
             if save_stack:
                 self._save_light_cone_stack(field=field, weight_field=weight_field, filename=filename)
@@ -341,17 +346,6 @@
                 else:
                     mylog.error("No halo mask loaded, call get_halo_mask.")
 
-            # Make a plot collection for the light cone projection.
-            center = [0.5 * (self.light_cone_solution[-1]['object'].parameters['DomainLeftEdge'][w] + 
-                             self.light_cone_solution[-1]['object'].parameters['DomainRightEdge'][w])
-                      for w in range(self.light_cone_solution[-1]['object'].parameters['TopGridRank'])]
-            pc = PlotCollection(self.light_cone_solution[-1]['object'], center=center)
-            pc.add_fixed_resolution_plot(frb, field, **kwargs)
-            pc.save(filename)
-
-            # Return the plot collection so the user can remake the plot if they want.
-            return pc
-
     def rerandomize_light_cone_solution(self, newSeed, recycle=True, filename=None):
         """
         When making a projection for a light cone, only randomizations along the line of sight make any 


--- a/yt/analysis_modules/light_cone/light_cone_projection.py	Tue Mar 15 17:57:57 2011 -0700
+++ b/yt/analysis_modules/light_cone/light_cone_projection.py	Tue Mar 15 17:59:57 2011 -0700
@@ -30,15 +30,12 @@
 from yt.config import ytcfg
 from yt.visualization.fixed_resolution import \
     FixedResolutionBuffer
-from yt.visualization.plot_collection import \
-    PlotCollection
 from yt.utilities.parallel_tools.parallel_analysis_interface import \
     parallel_blocking_call
 
 @parallel_blocking_call
-def LightConeProjection(lightConeSlice, field, pixels, weight_field=None, 
-                        save_image=False, name="", node=None, field_cuts=None,
-                        add_redshift_label=False, **kwargs):
+def _light_cone_projection(lightConeSlice, field, pixels, weight_field=None, 
+                           save_image=False, node=None, field_cuts=None):
     "Create a single projection to be added into the light cone stack."
 
     # Use some projection parameters to seed random number generator to make unique node name.
@@ -57,13 +54,9 @@
 
     mylog.info("Making projection at z = %f from %s." % (lightConeSlice['redshift'], lightConeSlice['filename']))
 
-    region_center = [0.5 * (lightConeSlice['object'].parameters['DomainRightEdge'][q] +
-                            lightConeSlice['object'].parameters['DomainLeftEdge'][q]) \
-                         for q in range(lightConeSlice['object'].parameters['TopGridRank'])]
-
-    # Make the plot collection and put it in the slice so we can delete it cleanly in the same scope 
-    # as where the frb will be deleted.
-    lightConeSlice['pc'] = PlotCollection(lightConeSlice['object'], center=region_center)
+    region_center = [0.5 * (lightConeSlice['object'].domain_right_edge[q] +
+                            lightConeSlice['object'].domain_left_edge[q]) \
+                         for q in range(lightConeSlice['object'].dimensionality)]
 
     # 1. The Depth Problem
     # Use coordinate field cut in line of sight to cut projection to proper depth.
@@ -90,11 +83,9 @@
         these_field_cuts.append(cut_mask)
 
     # Make projection.
-    lightConeSlice['pc'].add_projection(field, lightConeSlice['ProjectionAxis'], 
-                                        weight_field=weight_field, 
-                                        field_parameters=dict(field_cuts=these_field_cuts, 
-                                                              node_name=node_name),
-                                        **kwargs)
+    proj = lightConeSlice['object'].h.proj(lightConeSlice['ProjectionAxis'], field, 
+                                           weight_field, center=region_center, 
+                                           field_cuts=these_field_cuts, node_name=node_name)
 
     # If parallel: all the processes have the whole projection object, but we only need to do the tiling, shifting, and cutting once.
     if ytcfg.getint("yt", "__parallel_rank") == 0:
@@ -103,30 +94,24 @@
         # Tile projection to specified width.
 
         # Original projection data.
-        original_px = copy.deepcopy(lightConeSlice['pc'].plots[0].data['px'])
-        original_py = copy.deepcopy(lightConeSlice['pc'].plots[0].data['py'])
-        original_pdx = copy.deepcopy(lightConeSlice['pc'].plots[0].data['pdx'])
-        original_pdy = copy.deepcopy(lightConeSlice['pc'].plots[0].data['pdy'])
-        original_field = copy.deepcopy(lightConeSlice['pc'].plots[0].data[field])
-        original_weight_field = copy.deepcopy(lightConeSlice['pc'].plots[0].data['weight_field'])
+        original_px = copy.deepcopy(proj['px'])
+        original_py = copy.deepcopy(proj['py'])
+        original_pdx = copy.deepcopy(proj['pdx'])
+        original_pdy = copy.deepcopy(proj['pdy'])
+        original_field = copy.deepcopy(proj[field])
+        original_weight_field = copy.deepcopy(proj['weight_field'])
 
         # Copy original into offset positions to make tiles.
         for x in range(int(na.ceil(lightConeSlice['WidthBoxFraction']))):
             for y in range(int(na.ceil(lightConeSlice['WidthBoxFraction']))):
                 if ((x + y) > 0):
-                    lightConeSlice['pc'].plots[0].data['px'] = \
-                        na.concatenate([lightConeSlice['pc'].plots[0].data['px'], original_px+x])
-                    lightConeSlice['pc'].plots[0].data['py'] = \
-                        na.concatenate([lightConeSlice['pc'].plots[0].data['py'], original_py+y])
-                    lightConeSlice['pc'].plots[0].data['pdx'] = \
-                        na.concatenate([lightConeSlice['pc'].plots[0].data['pdx'], original_pdx])
-                    lightConeSlice['pc'].plots[0].data['pdy'] = \
-                        na.concatenate([lightConeSlice['pc'].plots[0].data['pdy'], original_pdy])
-                    lightConeSlice['pc'].plots[0].data[field] = \
-                        na.concatenate([lightConeSlice['pc'].plots[0].data[field], original_field])
-                    lightConeSlice['pc'].plots[0].data['weight_field'] = \
-                        na.concatenate([lightConeSlice['pc'].plots[0].data['weight_field'], 
-                                        original_weight_field])
+                    proj['px'] = na.concatenate([proj['px'], original_px+x])
+                    proj['py'] = na.concatenate([proj['py'], original_py+y])
+                    proj['pdx'] = na.concatenate([proj['pdx'], original_pdx])
+                    proj['pdy'] = na.concatenate([proj['pdy'], original_pdy])
+                    proj[field] = na.concatenate([proj[field], original_field])
+                    proj['weight_field'] = na.concatenate([proj['weight_field'], 
+                                                           original_weight_field])
 
         # Delete originals.
         del original_px
@@ -144,86 +129,74 @@
         del offset[lightConeSlice['ProjectionAxis']]
 
         # Shift x and y positions.
-        lightConeSlice['pc'].plots[0]['px'] -= offset[0]
-        lightConeSlice['pc'].plots[0]['py'] -= offset[1]
+        proj['px'] -= offset[0]
+        proj['py'] -= offset[1]
 
         # Wrap off-edge cells back around to other side (periodic boundary conditions).
-        lightConeSlice['pc'].plots[0]['px'][lightConeSlice['pc'].plots[0]['px'] < 0] += \
-            na.ceil(lightConeSlice['WidthBoxFraction'])
-        lightConeSlice['pc'].plots[0]['py'][lightConeSlice['pc'].plots[0]['py'] < 0] += \
-            na.ceil(lightConeSlice['WidthBoxFraction'])
+        proj['px'][proj['px'] < 0] += na.ceil(lightConeSlice['WidthBoxFraction'])
+        proj['py'][proj['py'] < 0] += na.ceil(lightConeSlice['WidthBoxFraction'])
 
         # After shifting, some cells have fractional coverage on both sides of the box.
         # Find those cells and make copies to be placed on the other side.
 
         # Cells hanging off the right edge.
-        add_x_right = lightConeSlice['pc'].plots[0]['px'] + \
-            0.5 * lightConeSlice['pc'].plots[0]['pdx'] > na.ceil(lightConeSlice['WidthBoxFraction'])
-        add_x_px = lightConeSlice['pc'].plots[0]['px'][add_x_right]
+        add_x_right = proj['px'] + 0.5 * proj['pdx'] > na.ceil(lightConeSlice['WidthBoxFraction'])
+        add_x_px = proj['px'][add_x_right]
         add_x_px -= na.ceil(lightConeSlice['WidthBoxFraction'])
-        add_x_py = lightConeSlice['pc'].plots[0]['py'][add_x_right]
-        add_x_pdx = lightConeSlice['pc'].plots[0]['pdx'][add_x_right]
-        add_x_pdy = lightConeSlice['pc'].plots[0]['pdy'][add_x_right]
-        add_x_field = lightConeSlice['pc'].plots[0][field][add_x_right]
-        add_x_weight_field = lightConeSlice['pc'].plots[0]['weight_field'][add_x_right]
+        add_x_py = proj['py'][add_x_right]
+        add_x_pdx = proj['pdx'][add_x_right]
+        add_x_pdy = proj['pdy'][add_x_right]
+        add_x_field = proj[field][add_x_right]
+        add_x_weight_field = proj['weight_field'][add_x_right]
         del add_x_right
 
         # Cells hanging off the left edge.
-        add_x_left = lightConeSlice['pc'].plots[0]['px'] - \
-            0.5 * lightConeSlice['pc'].plots[0]['pdx'] < 0
-        add2_x_px = lightConeSlice['pc'].plots[0]['px'][add_x_left]
+        add_x_left = proj['px'] - 0.5 * proj['pdx'] < 0
+        add2_x_px = proj['px'][add_x_left]
         add2_x_px += na.ceil(lightConeSlice['WidthBoxFraction'])
-        add2_x_py = lightConeSlice['pc'].plots[0]['py'][add_x_left]
-        add2_x_pdx = lightConeSlice['pc'].plots[0]['pdx'][add_x_left]
-        add2_x_pdy = lightConeSlice['pc'].plots[0]['pdy'][add_x_left]
-        add2_x_field = lightConeSlice['pc'].plots[0][field][add_x_left]
-        add2_x_weight_field = lightConeSlice['pc'].plots[0]['weight_field'][add_x_left]
+        add2_x_py = proj['py'][add_x_left]
+        add2_x_pdx = proj['pdx'][add_x_left]
+        add2_x_pdy = proj['pdy'][add_x_left]
+        add2_x_field = proj[field][add_x_left]
+        add2_x_weight_field = proj['weight_field'][add_x_left]
         del add_x_left
 
         # Cells hanging off the top edge.
-        add_y_right = lightConeSlice['pc'].plots[0]['py'] + \
-            0.5 * lightConeSlice['pc'].plots[0]['pdy'] > na.ceil(lightConeSlice['WidthBoxFraction'])
-        add_y_px = lightConeSlice['pc'].plots[0]['px'][add_y_right]
-        add_y_py = lightConeSlice['pc'].plots[0]['py'][add_y_right]
+        add_y_right = proj['py'] + 0.5 * proj['pdy'] > na.ceil(lightConeSlice['WidthBoxFraction'])
+        add_y_px = proj['px'][add_y_right]
+        add_y_py = proj['py'][add_y_right]
         add_y_py -= na.ceil(lightConeSlice['WidthBoxFraction'])
-        add_y_pdx = lightConeSlice['pc'].plots[0]['pdx'][add_y_right]
-        add_y_pdy = lightConeSlice['pc'].plots[0]['pdy'][add_y_right]
-        add_y_field = lightConeSlice['pc'].plots[0][field][add_y_right]
-        add_y_weight_field = lightConeSlice['pc'].plots[0]['weight_field'][add_y_right]
+        add_y_pdx = proj['pdx'][add_y_right]
+        add_y_pdy = proj['pdy'][add_y_right]
+        add_y_field = proj[field][add_y_right]
+        add_y_weight_field = proj['weight_field'][add_y_right]
         del add_y_right
 
         # Cells hanging off the bottom edge.
-        add_y_left = lightConeSlice['pc'].plots[0]['py'] - \
-            0.5 * lightConeSlice['pc'].plots[0]['pdy'] < 0
-        add2_y_px = lightConeSlice['pc'].plots[0]['px'][add_y_left]
-        add2_y_py = lightConeSlice['pc'].plots[0]['py'][add_y_left]
+        add_y_left = proj['py'] - 0.5 * proj['pdy'] < 0
+        add2_y_px = proj['px'][add_y_left]
+        add2_y_py = proj['py'][add_y_left]
         add2_y_py += na.ceil(lightConeSlice['WidthBoxFraction'])
-        add2_y_pdx = lightConeSlice['pc'].plots[0]['pdx'][add_y_left]
-        add2_y_pdy = lightConeSlice['pc'].plots[0]['pdy'][add_y_left]
-        add2_y_field = lightConeSlice['pc'].plots[0][field][add_y_left]
-        add2_y_weight_field = lightConeSlice['pc'].plots[0]['weight_field'][add_y_left]
+        add2_y_pdx = proj['pdx'][add_y_left]
+        add2_y_pdy = proj['pdy'][add_y_left]
+        add2_y_field = proj[field][add_y_left]
+        add2_y_weight_field = proj['weight_field'][add_y_left]
         del add_y_left
 
         # Add the hanging cells back to the projection data.
-        lightConeSlice['pc'].plots[0].data['px'] = \
-            na.concatenate([lightConeSlice['pc'].plots[0]['px'], add_x_px, add_y_px, 
-                            add2_x_px, add2_y_px])
-        lightConeSlice['pc'].plots[0].data['py'] = \
-            na.concatenate([lightConeSlice['pc'].plots[0]['py'], add_x_py, add_y_py, 
-                            add2_x_py, add2_y_py])
-        lightConeSlice['pc'].plots[0].data['pdx'] = \
-            na.concatenate([lightConeSlice['pc'].plots[0]['pdx'], add_x_pdx, add_y_pdx, 
-                            add2_x_pdx, add2_y_pdx])
-        lightConeSlice['pc'].plots[0].data['pdy'] = \
-            na.concatenate([lightConeSlice['pc'].plots[0]['pdy'], add_x_pdy, add_y_pdy, 
-                            add2_x_pdy, add2_y_pdy])
-        lightConeSlice['pc'].plots[0].data[field] = \
-            na.concatenate([lightConeSlice['pc'].plots[0][field], add_x_field, add_y_field, 
-                            add2_x_field, add2_y_field])
-        lightConeSlice['pc'].plots[0].data['weight_field'] = \
-            na.concatenate([lightConeSlice['pc'].plots[0]['weight_field'], 
-                            add_x_weight_field, add_y_weight_field,
-                            add2_x_weight_field, add2_y_weight_field])
+        proj['px'] = na.concatenate([proj['px'], add_x_px, add_y_px, 
+                                     add2_x_px, add2_y_px])
+        proj['py'] = na.concatenate([proj['py'], add_x_py, add_y_py, 
+                                     add2_x_py, add2_y_py])
+        proj['pdx'] = na.concatenate([proj['pdx'], add_x_pdx, add_y_pdx, 
+                                      add2_x_pdx, add2_y_pdx])
+        proj['pdy'] = na.concatenate([proj['pdy'], add_x_pdy, add_y_pdy, 
+                                      add2_x_pdy, add2_y_pdy])
+        proj[field] = na.concatenate([proj[field], add_x_field, add_y_field, 
+                                      add2_x_field, add2_y_field])
+        proj['weight_field'] = na.concatenate([proj['weight_field'], 
+                                               add_x_weight_field, add_y_weight_field,
+                                               add2_x_weight_field, add2_y_weight_field])
 
         # Delete original copies of hanging cells.
         del add_x_px, add_y_px, add2_x_px, add2_y_px
@@ -236,42 +209,29 @@
         # Tiles were made rounding up the width to the nearest integer.
         # Cut off the edges to get the specified width.
         # Cut in the x direction.
-        cut_x = lightConeSlice['pc'].plots[0].data['px'] - \
-            0.5 * lightConeSlice['pc'].plots[0].data['pdx'] < lightConeSlice['WidthBoxFraction']
-        lightConeSlice['pc'].plots[0].data['px'] = lightConeSlice['pc'].plots[0].data['px'][cut_x]
-        lightConeSlice['pc'].plots[0].data['py'] = lightConeSlice['pc'].plots[0].data['py'][cut_x]
-        lightConeSlice['pc'].plots[0].data['pdx'] = lightConeSlice['pc'].plots[0].data['pdx'][cut_x]
-        lightConeSlice['pc'].plots[0].data['pdy'] = lightConeSlice['pc'].plots[0].data['pdy'][cut_x]
-        lightConeSlice['pc'].plots[0].data[field] = lightConeSlice['pc'].plots[0].data[field][cut_x]
-        lightConeSlice['pc'].plots[0].data['weight_field'] = \
-            lightConeSlice['pc'].plots[0].data['weight_field'][cut_x]
+        cut_x = proj['px'] - 0.5 * proj['pdx'] < lightConeSlice['WidthBoxFraction']
+        proj['px'] = proj['px'][cut_x]
+        proj['py'] = proj['py'][cut_x]
+        proj['pdx'] = proj['pdx'][cut_x]
+        proj['pdy'] = proj['pdy'][cut_x]
+        proj[field] = proj[field][cut_x]
+        proj['weight_field'] = proj['weight_field'][cut_x]
         del cut_x
 
         # Cut in the y direction.
-        cut_y = lightConeSlice['pc'].plots[0].data['py'] - \
-            0.5 * lightConeSlice['pc'].plots[0].data['pdy'] < lightConeSlice['WidthBoxFraction']
-        lightConeSlice['pc'].plots[0].data['px'] = lightConeSlice['pc'].plots[0].data['px'][cut_y]
-        lightConeSlice['pc'].plots[0].data['py'] = lightConeSlice['pc'].plots[0].data['py'][cut_y]
-        lightConeSlice['pc'].plots[0].data['pdx'] = lightConeSlice['pc'].plots[0].data['pdx'][cut_y]
-        lightConeSlice['pc'].plots[0].data['pdy'] = lightConeSlice['pc'].plots[0].data['pdy'][cut_y]
-        lightConeSlice['pc'].plots[0].data[field] = lightConeSlice['pc'].plots[0].data[field][cut_y]
-        lightConeSlice['pc'].plots[0].data['weight_field'] = lightConeSlice['pc'].plots[0].data['weight_field'][cut_y]
+        cut_y = proj['py'] - 0.5 * proj['pdy'] < lightConeSlice['WidthBoxFraction']
+        proj['px'] = proj['px'][cut_y]
+        proj['py'] = proj['py'][cut_y]
+        proj['pdx'] = proj['pdx'][cut_y]
+        proj['pdy'] = proj['pdy'][cut_y]
+        proj[field] = proj[field][cut_y]
+        proj['weight_field'] = proj['weight_field'][cut_y]
         del cut_y
 
-        # Save an image if requested.
-        if save_image:
-            lightConeSlice['pc'].set_xlim(0, lightConeSlice['WidthBoxFraction'])
-            lightConeSlice['pc'].set_ylim(0, lightConeSlice['WidthBoxFraction'])
-            if add_redshift_label:
-                lightConeSlice['pc'].plots[-1].modify['text']((0.5, 0.03), "z = %.3f" % lightConeSlice['redshift'], 
-                                                              dict(color='black',size=50))
-            lightConeSlice['pc'].save(name)
-
         # Create fixed resolution buffer to return back to the light cone object.
         # These buffers will be stacked together to make the light cone.
-        frb = FixedResolutionBuffer(lightConeSlice['pc'].plots[0].data, \
-                                        (0, lightConeSlice['WidthBoxFraction'], 
-                                         0, lightConeSlice['WidthBoxFraction']),
+        frb = FixedResolutionBuffer(proj, (0, lightConeSlice['WidthBoxFraction'], 
+                                           0, lightConeSlice['WidthBoxFraction']),
                                     (pixels, pixels), antialias=False)
 
         return frb


--- a/yt/data_objects/universal_fields.py	Tue Mar 15 17:57:57 2011 -0700
+++ b/yt/data_objects/universal_fields.py	Tue Mar 15 17:59:57 2011 -0700
@@ -35,6 +35,7 @@
 from yt.funcs import *
 
 from yt.utilities.amr_utils import CICDeposit_3
+from yt.utilities.cosmology import Cosmology
 from field_info_container import \
     add_field, \
     ValidateDataField, \
@@ -53,7 +54,9 @@
      sigma_thompson, \
      clight, \
      kboltz, \
-     G
+     G, \
+     rho_crit_now, \
+     speed_of_light_cgs
      
 # Note that, despite my newfound efforts to comply with PEP-8,
 # I violate it here in order to keep the name/func_name relationship
@@ -388,6 +391,63 @@
     return (data['Density'] + data['Dark_Matter_Density'])
 add_field("Matter_Density",function=_Matter_Density,units=r"\rm{g}/\rm{cm^3}")
 
+def _ComovingDensity(field, data):
+    ef = (1.0 + data.pf.current_redshift)**3.0
+    return data["Density"]/ef
+add_field("ComovingDensity", function=_ComovingDensity, units=r"\rm{g}/\rm{cm}^3")
+
+# This is rho_total / rho_cr(z).
+def _Convert_Overdensity(data):
+    return 1 / (rho_crit_now * data.pf.hubble_constant**2 * 
+                (1+data.pf.current_redshift)**3)
+add_field("Overdensity",function=_Matter_Density,
+          convert_function=_Convert_Overdensity, units=r"")
+
+# This is (rho_total - <rho_total>) / <rho_total>.
+def _DensityPerturbation(field, data):
+    rho_bar = rho_crit_now * data.pf.omega_matter * \
+        data.pf.hubble_constant**2 * \
+        (1.0 + data.pf.current_redshift)**3
+    return ((data['Matter_Density'] - rho_bar) / rho_bar)
+add_field("DensityPerturbation",function=_DensityPerturbation,units=r"")
+
+# This is rho_b / <rho_b>.
+def _Baryon_Overdensity(field, data):
+    return data['Density']
+def _Convert_Baryon_Overdensity(data):
+    if data.pf.parameters.has_key('omega_baryon_now'):
+        omega_baryon_now = data.pf.parameters['omega_baryon_now']
+    else:
+        omega_baryon_now = 0.0441
+    return 1 / (omega_baryon_now * rho_crit_now * 
+                (data.pf['CosmologyHubbleConstantNow']**2) * 
+                ((1+data.pf['CosmologyCurrentRedshift'])**3))
+add_field("Baryon_Overdensity", function=_Baryon_Overdensity, 
+          convert_function=_Convert_Baryon_Overdensity, units=r"")
+
+# Weak lensing convergence.
+# Eqn 4 of Metzler, White, & Loken (2001, ApJ, 547, 560).
+def _convertConvergence(data):
+    if not data.pf.parameters.has_key('cosmology_calculator'):
+        data.pf.parameters['cosmology_calculator'] = Cosmology(
+            HubbleConstantNow=(100.*data.pf.hubble_constant),
+            OmegaMatterNow=data.pf.omega_matter, OmegaLambdaNow=data.pf.omega_lambda)
+    # observer to lens
+    DL = data.pf.parameters['cosmology_calculator'].AngularDiameterDistance(
+        data.pf.parameters['observer_redshift'], data.pf.current_redshift)
+    # observer to source
+    DS = data.pf.parameters['cosmology_calculator'].AngularDiameterDistance(
+        data.pf.parameters['observer_redshift'], data.pf.parameters['lensing_source_redshift'])
+    # lens to source
+    DLS = data.pf.parameters['cosmology_calculator'].AngularDiameterDistance(
+        data.pf.current_redshift, data.pf.parameters['lensing_source_redshift'])
+    return (((DL * DLS) / DS) * (1.5e14 * data.pf.omega_matter * 
+                                (data.pf.hubble_constant / speed_of_light_cgs)**2 *
+                                (1 + data.pf.current_redshift)))
+add_field("WeakLensingConvergence", function=_DensityPerturbation, 
+          convert_function=_convertConvergence, 
+          projection_conversion='mpccm')
+
 def _CellVolume(field, data):
     if data['dx'].size == 1:
         try:


--- a/yt/frontends/enzo/fields.py	Tue Mar 15 17:57:57 2011 -0700
+++ b/yt/frontends/enzo/fields.py	Tue Mar 15 17:59:57 2011 -0700
@@ -34,7 +34,7 @@
     ValidateGridType
 import yt.data_objects.universal_fields
 from yt.utilities.physical_constants import \
-    mh, rho_crit_now
+    mh
 import yt.utilities.amr_utils as amr_utils
 
 class EnzoFieldContainer(CodeFieldInfoContainer):
@@ -206,31 +206,6 @@
           function=_NumberDensity,
           convert_function=_ConvertNumberDensity)
 
-def _ComovingDensity(field,data):
-    ef = (1.0 + data.pf.current_redshift)**3.0
-    return data["Density"]/ef
-add_field("ComovingDensity", function=_ComovingDensity, units=r"\rm{g}/\rm{cm}^3")
-
-# This is rho_total / rho_cr(z).
-def Overdensity(field,data):
-    return (data['Density'] + data['Dark_Matter_Density']) / \
-        (rho_crit_now * (data.pf.hubble_constant**2) * ((1+data.pf.current_redshift)**3))
-add_field("Overdensity",function=Overdensity,units=r"")
-
-# This is rho_b / <rho_b>.
-def _Baryon_Overdensity(field, data):
-    return data['Density']
-def _Convert_Baryon_Overdensity(data):
-    if data.pf.parameters.has_key('omega_baryon_now'):
-        omega_baryon_now = data.pf.parameters['omega_baryon_now']
-    else:
-        omega_baryon_now = 0.0441
-    return 1 / (omega_baryon_now * rho_crit_now * 
-                (data.pf['CosmologyHubbleConstantNow']**2) * 
-                ((1+data.pf['CosmologyCurrentRedshift'])**3))
-add_field("Baryon_Overdensity", function=_Baryon_Overdensity, 
-          convert_function=_Convert_Baryon_Overdensity, units=r"")
-
 # Now we add all the fields that we want to control, but we give a null function
 # This is every Enzo field we can think of.  This will be installation-dependent,
 
@@ -240,7 +215,7 @@
 _default_fields = ["Density","Temperature",
                    "x-velocity","y-velocity","z-velocity",
                    "x-momentum","y-momentum","z-momentum",
-                   "Bx", "By", "Bz"]
+                   "Bx", "By", "Bz", "Dust_Temperature_Density"]
 # else:
 #     _default_fields = ["Density","Temperature","Gas_Energy","Total_Energy",
 #                        "x-velocity","y-velocity","z-velocity"]
@@ -280,6 +255,17 @@
     f._convert_function = _convertVelocity
     f.take_log = False
 
+# Dust temperature - raw field is T_dust * Density
+def _dust_temperature(field, data):
+    return data['Dust_Temperature_Density'] / data['Density']
+def _convert_dust_temperature(data):
+    ef = (1.0 + data.pf.current_redshift)**3.0
+    return data.convert("Density") / ef
+add_field("Dust_Temperature", function=_dust_temperature, 
+          convert_function=_convert_dust_temperature, take_log=True,
+          validators=[ValidateDataField('Dust_Temperature_Density')],
+          units = r"K")
+
 def _spdensity(field, data):
     blank = na.zeros(data.ActiveDimensions, dtype='float32')
     if data.NumberOfParticles == 0: return blank


--- a/yt/frontends/orion/data_structures.py	Tue Mar 15 17:57:57 2011 -0700
+++ b/yt/frontends/orion/data_structures.py	Tue Mar 15 17:59:57 2011 -0700
@@ -470,6 +470,7 @@
         self.parameters["Time"] = 1. # default unit is 1...
         self.parameters["DualEnergyFormalism"] = 0 # always off.
         self.parameters["EOSType"] = -1 # default
+
         if self.fparameters.has_key("mu"):
             self.parameters["mu"] = self.fparameters["mu"]
 
@@ -504,6 +505,9 @@
                 self.fparameter_filename, 'probin')
         if os.path.isfile(self.fparameter_filename):
             self._parse_fparameter_file()
+            for param in self.fparameters:
+                if orion2enzoDict.has_key(param):
+                    self.parameters[orion2enzoDict[param]]=self.fparameters[param]
         # Let's read the file
         self.unique_identifier = \
             int(os.stat(self.parameter_filename)[ST_CTIME])
@@ -541,6 +545,20 @@
         self.domain_dimensions = self.parameters["TopGridDimensions"]
         self.refine_by = self.parameters["RefineBy"]
 
+        if self.parameters.has_key("ComovingCoordinates") and bool(self.parameters["ComovingCoordinates"]):
+            self.cosmological_simulation = 1
+            self.omega_lambda = self.parameters["CosmologyOmegaLambdaNow"]
+            self.omega_matter = self.parameters["CosmologyOmegaMatterNow"]
+            self.hubble_constant = self.parameters["CosmologyHubbleConstantNow"]
+            a_file = open(os.path.join(self.fullplotdir,'comoving_a'))
+            line = a_file.readline().strip()
+            a_file.close()
+            self.parameters["CosmologyCurrentRedshift"] = 1/float(line) - 1
+            self.current_redshift = self.parameters["CosmologyCurrentRedshift"]
+        else:
+            self.current_redshift = self.omega_lambda = self.omega_matter = \
+                self.hubble_constant = self.cosmological_simulation = 0.0
+
     def _parse_fparameter_file(self):
         """
         Parses the fortran parameter file for Orion. Most of this will
@@ -574,6 +592,7 @@
         n_fields = int(lines[1])
         self.current_time = float(lines[3+n_fields])
 
+
                 
     def _set_units(self):
         """


--- a/yt/frontends/orion/definitions.py	Tue Mar 15 17:57:57 2011 -0700
+++ b/yt/frontends/orion/definitions.py	Tue Mar 15 17:59:57 2011 -0700
@@ -63,7 +63,12 @@
 # throughout the code. key is Orion name, value is Enzo/yt equivalent
 orion2enzoDict = {"amr.n_cell": "TopGridDimensions",
                   "materials.gamma": "Gamma",
-                  "amr.ref_ratio": "RefineBy"
+                  "amr.ref_ratio": "RefineBy",
+                  "castro.use_comoving": "ComovingCoordinates",
+                  "castro.redshift_in": "CosmologyInitialRedshift",
+                  "comoving_OmL": "CosmologyOmegaLambdaNow",
+                  "comoving_OmM": "CosmologyOmegaMatterNow",
+                  "comoving_h": "CosmologyHubbleConstantNow"
                   }
 
 yt2orionFieldsDict = {}


--- a/yt/utilities/_amr_utils/Octree.pyx	Tue Mar 15 17:57:57 2011 -0700
+++ b/yt/utilities/_amr_utils/Octree.pyx	Tue Mar 15 17:59:57 2011 -0700
@@ -51,8 +51,8 @@
         self.val[i] += val[i]
     self.weight_val += weight_val
 
-cdef void OTN_refine(OctreeNode *self):
-    cdef int i, j, i1, j1
+cdef void OTN_refine(OctreeNode *self, int incremental = 0):
+    cdef int i, j, k, i1, j1
     cdef np.int64_t npos[3]
     cdef OctreeNode *node
     for i in range(2):
@@ -66,6 +66,7 @@
                             npos,
                             self.nvals, self.val, self.weight_val,
                             self.level + 1)
+    if incremental: return
     for i in range(self.nvals): self.val[i] = 0.0
     self.weight_val = 0.0
 
@@ -73,7 +74,7 @@
                         np.float64_t *val, np.float64_t weight_val,
                         int level):
     cdef OctreeNode *node
-    cdef int i, j
+    cdef int i, j, k
     node = <OctreeNode *> malloc(sizeof(OctreeNode))
     node.pos[0] = pos[0]
     node.pos[1] = pos[1]
@@ -92,7 +93,7 @@
     return node
 
 cdef void OTN_free(OctreeNode *node):
-    cdef int i, j
+    cdef int i, j, k
     for i in range(2):
         for j in range(2):
             for k in range(2):
@@ -106,10 +107,12 @@
     cdef np.int64_t po2[80]
     cdef OctreeNode ****root_nodes
     cdef np.int64_t top_grid_dims[3]
+    cdef int incremental
 
     def __cinit__(self, np.ndarray[np.int64_t, ndim=1] top_grid_dims,
-                  int nvals):
-        cdef int i, j
+                  int nvals, int incremental = False):
+        cdef int i, j, k
+        self.incremental = incremental
         cdef OctreeNode *node
         cdef np.int64_t pos[3]
         cdef np.float64_t *vals = <np.float64_t *> alloca(
@@ -147,13 +150,15 @@
                  int level, np.int64_t pos[3],
                  np.float64_t *val,
                  np.float64_t weight_val):
-        cdef int i, j
+        cdef int i, j, k, L
         cdef OctreeNode *node
         node = self.find_on_root_level(pos, level)
         cdef np.int64_t fac
         for L in range(level):
+            if self.incremental:
+                OTN_add_value(node, val, weight_val)
             if node.children[0][0][0] == NULL:
-                OTN_refine(node)
+                OTN_refine(node, self.incremental)
             # Maybe we should use bitwise operators?
             fac = self.po2[level - L - 1]
             i = (pos[0] >= fac*(2*node.pos[0]+1))
@@ -165,7 +170,7 @@
     cdef OctreeNode *find_on_root_level(self, np.int64_t pos[3], int level):
         # We need this because the root level won't just have four children
         # So we find on the root level, then we traverse the tree.
-        cdef np.int64_t i, j
+        cdef np.int64_t i, j, k
         i = <np.int64_t> (pos[0] / self.po2[level])
         j = <np.int64_t> (pos[1] / self.po2[level])
         k = <np.int64_t> (pos[2] / self.po2[level])
@@ -202,7 +207,7 @@
     @cython.boundscheck(False)
     @cython.wraparound(False)
     def get_all_from_level(self, int level, int count_only = 0):
-        cdef int i, j
+        cdef int i, j, k
         cdef int total = 0
         vals = []
         for i in range(self.top_grid_dims[0]):
@@ -214,7 +219,7 @@
         cdef np.ndarray[np.int64_t, ndim=2] npos
         cdef np.ndarray[np.float64_t, ndim=2] nvals
         cdef np.ndarray[np.float64_t, ndim=1] nwvals
-        npos = np.zeros( (total, 2), dtype='int64')
+        npos = np.zeros( (total, 3), dtype='int64')
         nvals = np.zeros( (total, self.nvals), dtype='float64')
         nwvals = np.zeros( total, dtype='float64')
         cdef np.int64_t curpos = 0
@@ -229,10 +234,11 @@
         return npos, nvals, nwvals
 
     cdef int count_at_level(self, OctreeNode *node, int level):
-        cdef int i, j
+        cdef int i, j, k
         # We only really return a non-zero, calculated value if we are at the
         # level in question.
         if node.level == level:
+            if self.incremental: return 1
             # We return 1 if there are no finer points at this level and zero
             # if there are
             return (node.children[0][0][0] == NULL)
@@ -249,9 +255,10 @@
                               np.int64_t *pdata,
                               np.float64_t *vdata,
                               np.float64_t *wdata):
-        cdef int i, j
+        cdef int i, j, k
         if node.level == level:
-            if node.children[0][0][0] != NULL: return 0
+            if node.children[0][0][0] != NULL and not self.incremental:
+                return 0
             for i in range(self.nvals):
                 vdata[self.nvals * curpos + i] = node.val[i]
             wdata[curpos] = node.weight_val
@@ -259,7 +266,7 @@
             pdata[curpos * 3 + 1] = node.pos[1]
             pdata[curpos * 3 + 2] = node.pos[2]
             return 1
-        if node.children[0][0] == NULL: return 0
+        if node.children[0][0][0] == NULL: return 0
         cdef np.int64_t added = 0
         for i in range(2):
             for j in range(2):
@@ -269,7 +276,7 @@
         return added
 
     def __dealloc__(self):
-        cdef int i, j
+        cdef int i, j, k
         for i in range(self.top_grid_dims[0]):
             for j in range(self.top_grid_dims[1]):
                 for k in range(self.top_grid_dims[2]):


--- a/yt/utilities/amr_utils.pyx	Tue Mar 15 17:57:57 2011 -0700
+++ b/yt/utilities/amr_utils.pyx	Tue Mar 15 17:59:57 2011 -0700
@@ -45,4 +45,5 @@
 include "_amr_utils/png_writer.pyx"
 include "_amr_utils/fortran_reader.pyx"
 include "_amr_utils/QuadTree.pyx"
+include "_amr_utils/Octree.pyx"
 include "_amr_utils/freetype_writer.pyx"


--- a/yt/utilities/parallel_tools/parallel_analysis_interface.py	Tue Mar 15 17:57:57 2011 -0700
+++ b/yt/utilities/parallel_tools/parallel_analysis_interface.py	Tue Mar 15 17:59:57 2011 -0700
@@ -152,7 +152,7 @@
             retval = func(self, *args, **kwargs)
             self._processing = False
         retval = MPI.COMM_WORLD.bcast(retval, root=self._owner)
-        MPI.COMM_WORLD.Barrier()
+        #MPI.COMM_WORLD.Barrier()
         return retval
     return single_proc_results
 
@@ -235,7 +235,7 @@
                 all_clear = 0
         else:
             all_clear = None
-        MPI.COMM_WORLD.Barrier()
+        #MPI.COMM_WORLD.Barrier()
         all_clear = MPI.COMM_WORLD.bcast(all_clear, root=0)
         if not all_clear: raise RuntimeError
     if parallel_capable: return root_only
@@ -632,14 +632,14 @@
 
     @parallel_passthrough
     def _mpi_joindict(self, data):
-        self._barrier()
+        #self._barrier()
         if MPI.COMM_WORLD.rank == 0:
             for i in range(1,MPI.COMM_WORLD.size):
                 data.update(MPI.COMM_WORLD.recv(source=i, tag=0))
         else:
             MPI.COMM_WORLD.send(data, dest=0, tag=0)
         data = MPI.COMM_WORLD.bcast(data, root=0)
-        self._barrier()
+        #self._barrier()
         return data
 
     @parallel_passthrough
@@ -1081,7 +1081,7 @@
 
     @parallel_passthrough
     def _mpi_bcast_pickled(self, data):
-        self._barrier()
+        #self._barrier()
         data = MPI.COMM_WORLD.bcast(data, root=0)
         return data
 
@@ -1115,7 +1115,7 @@
 
     @parallel_passthrough
     def _mpi_allsum(self, data):
-        self._barrier()
+        #self._barrier()
         # We use old-school pickling here on the assumption the arrays are
         # relatively small ( < 1e7 elements )
         if isinstance(data, na.ndarray) and data.dtype != na.bool:

Repository URL: https://bitbucket.org/yt_analysis/yt/

--

This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.



More information about the yt-svn mailing list