[yt-svn] commit/yt: 21 new changesets

commits-noreply at bitbucket.org commits-noreply at bitbucket.org
Tue Sep 16 14:09:18 PDT 2014


21 new commits in yt:

https://bitbucket.org/yt_analysis/yt/commits/7f55e8735671/
Changeset:   7f55e8735671
Branch:      yt-3.0
User:        MatthewTurk
Date:        2014-08-03 01:47:35
Summary:     Adding an image_axis_name attribute
Affected #:  1 file

diff -r 15f91fe5e0dd01ac4b3de01211e37e423bc0776e -r 7f55e87356715062208614583ec8399018e52be1 yt/geometry/coordinate_handler.py
--- a/yt/geometry/coordinate_handler.py
+++ b/yt/geometry/coordinate_handler.py
@@ -83,6 +83,17 @@
         raise NotImplementedError
 
     @property
+    def image_axis_name(self):
+        # Default
+        rv = {}
+        for i in range(3):
+            rv[i] = (self.axis_name[self.x_axis[i]],
+                     self.axis_name[self.y_axis[i]])
+            rv[self.axis_name[i]] = rv[i]
+            rv[self.axis_name[i].upper()] = rv[i]
+        return rv
+
+    @property
     def axis_id(self):
         raise NotImplementedError
 


https://bitbucket.org/yt_analysis/yt/commits/cf03d8258be8/
Changeset:   cf03d8258be8
Branch:      yt-3.0
User:        MatthewTurk
Date:        2014-08-03 02:01:41
Summary:     First pass at image axis names for non-cartesian coords.
Affected #:  4 files

diff -r 7f55e87356715062208614583ec8399018e52be1 -r cf03d8258be8ae8ecc5ddda00267e0ee9b156748 yt/geometry/cylindrical_coordinates.py
--- a/yt/geometry/cylindrical_coordinates.py
+++ b/yt/geometry/cylindrical_coordinates.py
@@ -121,6 +121,24 @@
     y_axis = { 'r' : 2, 'z' : 2, 'theta' : 1,
                 0  : 2,  1  : 2,  2  : 1}
 
+    _image_axis_name = None
+
+    @property
+    def image_axis_name(self):    
+        if self._image_axis_name is not None:
+            return self._image_axis_name
+        # This is the x and y axes labels that get displayed.  For
+        # non-Cartesian coordinates, we usually want to override these for
+        # Cartesian coordinates, since we transform them.
+        rv = {0: ('z', 'theta'),
+              1: ('x', 'y'),
+              2: ('r', 'z')}
+        for i in rv.keys():
+            rv[self.axis_name[i]] = rv[i]
+            rv[self.axis_name[i].upper()] = rv[i]
+        self._image_axis_name = rv
+        return rv
+
     def convert_from_cartesian(self, coord):
         return cartesian_to_cylindrical(coord)
 

diff -r 7f55e87356715062208614583ec8399018e52be1 -r cf03d8258be8ae8ecc5ddda00267e0ee9b156748 yt/geometry/polar_coordinates.py
--- a/yt/geometry/polar_coordinates.py
+++ b/yt/geometry/polar_coordinates.py
@@ -108,6 +108,23 @@
     y_axis = { 'r' : 2, 'theta' : 2, 'z' : 1,
                 0  : 2,  1  : 2,  2  : 1}
 
+    _image_axis_name = None
+    @property
+    def image_axis_name(self):    
+        if self._image_axis_name is not None:
+            return self._image_axis_name
+        # This is the x and y axes labels that get displayed.  For
+        # non-Cartesian coordinates, we usually want to override these for
+        # Cartesian coordinates, since we transform them.
+        rv = {0: ('theta', 'z'),
+              1: ('x', 'y'),
+              2: ('r', 'z')}
+        for i in rv.keys():
+            rv[self.axis_name[i]] = rv[i]
+            rv[self.axis_name[i].upper()] = rv[i]
+        self._image_axis_name = rv
+        return rv
+
     def convert_from_cartesian(self, coord):
         return cartesian_to_cylindrical(coord)
 

diff -r 7f55e87356715062208614583ec8399018e52be1 -r cf03d8258be8ae8ecc5ddda00267e0ee9b156748 yt/geometry/spherical_coordinates.py
--- a/yt/geometry/spherical_coordinates.py
+++ b/yt/geometry/spherical_coordinates.py
@@ -142,6 +142,24 @@
                  'r' : 'r', 'theta' : 'theta', 'phi' : 'phi',
                  'R' : 'r', 'Theta' : 'theta', 'Phi' : 'phi'}
 
+    _image_axis_name = None
+    @property
+    def image_axis_name(self):    
+        if self._image_axis_name is not None:
+            return self._image_axis_name
+        # This is the x and y axes labels that get displayed.  For
+        # non-Cartesian coordinates, we usually want to override these for
+        # Cartesian coordinates, since we transform them.
+        rv = {0: ('theta', 'phi'),
+              1: ('x', 'y'),
+              2: ('x', 'z')}
+        for i in rv.keys():
+            rv[self.axis_name[i]] = rv[i]
+            rv[self.axis_name[i].upper()] = rv[i]
+        self._image_axis_name = rv
+        return rv
+
+
     axis_id = { 'r' : 0, 'theta' : 1, 'phi' : 2,
                  0  : 0,  1  : 1,  2  : 2}
 

diff -r 7f55e87356715062208614583ec8399018e52be1 -r cf03d8258be8ae8ecc5ddda00267e0ee9b156748 yt/visualization/plot_window.py
--- a/yt/visualization/plot_window.py
+++ b/yt/visualization/plot_window.py
@@ -880,25 +880,26 @@
                 labels = [r'$\rm{Image\/x'+axes_unit_labels[0]+'}$',
                           r'$\rm{Image\/y'+axes_unit_labels[1]+'}$']
             else:
-                axis_names = self.ds.coordinates.axis_name
-                xax = self.ds.coordinates.x_axis[axis_index]
-                yax = self.ds.coordinates.y_axis[axis_index]
+                coordinates = self.ds.coordinates
+                axis_names = coordinates.image_axis_name[axis_index]
+                xax = coordinates.x_axis[axis_index]
+                yax = coordinates.y_axis[axis_index]
 
-                if hasattr(self.ds.coordinates, "axis_default_unit_label"):
+                if hasattr(coordinates, "axis_default_unit_label"):
                     axes_unit_labels = \
-                    [self.ds.coordinates.axis_default_unit_name[xax],
-                     self.ds.coordinates.axis_default_unit_name[yax]]
+                    [coordinates.axis_default_unit_name[xax],
+                     coordinates.axis_default_unit_name[yax]]
                 labels = [r'$\rm{'+axis_names[xax]+axes_unit_labels[0] + r'}$',
                           r'$\rm{'+axis_names[yax]+axes_unit_labels[1] + r'}$']
 
-                if hasattr(self.ds.coordinates, "axis_field"):
-                    if xax in self.ds.coordinates.axis_field:
-                        xmin, xmax = self.ds.coordinates.axis_field[xax](
+                if hasattr(coordinates, "axis_field"):
+                    if xax in coordinates.axis_field:
+                        xmin, xmax = coordinates.axis_field[xax](
                             0, self.xlim, self.ylim)
                     else:
                         xmin, xmax = [float(x) for x in extentx]
-                    if yax in self.ds.coordinates.axis_field:
-                        ymin, ymax = self.ds.coordinates.axis_field[yax](
+                    if yax in coordinates.axis_field:
+                        ymin, ymax = coordinates.axis_field[yax](
                             1, self.xlim, self.ylim)
                     else:
                         ymin, ymax = [float(y) for y in extenty]


https://bitbucket.org/yt_analysis/yt/commits/6f0edefe73db/
Changeset:   6f0edefe73db
Branch:      yt-3.0
User:        MatthewTurk
Date:        2014-08-03 02:43:35
Summary:     Move the cos/sin outside the inner loop.
Affected #:  1 file

diff -r cf03d8258be8ae8ecc5ddda00267e0ee9b156748 -r 6f0edefe73dbf196560cc94c952ec1c48cad7f3a yt/utilities/lib/misc_utilities.pyx
--- a/yt/utilities/lib/misc_utilities.pyx
+++ b/yt/utilities/lib/misc_utilities.pyx
@@ -486,6 +486,7 @@
     cdef np.float64_t x, y, dx, dy, r0, theta0
     cdef np.float64_t rmax, x0, y0, x1, y1
     cdef np.float64_t r_i, theta_i, dr_i, dtheta_i, dthetamin
+    cdef np.float64_t costheta, sintheta
     cdef int i, pi, pj
     
     imax = radius.argmax()
@@ -501,7 +502,6 @@
     dy = (y1 - y0) / img.shape[1]
       
     dthetamin = dx / rmax
-      
     for i in range(radius.shape[0]):
 
         r0 = radius[i]
@@ -512,12 +512,14 @@
         theta_i = theta0 - dtheta_i
         while theta_i < theta0 + dtheta_i:
             r_i = r0 - dr_i
+            costheta = math.cos(theta_i)
+            sintheta = math.sin(theta_i)
             while r_i < r0 + dr_i:
                 if rmax <= r_i:
                     r_i += 0.5*dx 
                     continue
-                x = r_i * math.cos(theta_i)
-                y = r_i * math.sin(theta_i)
+                x = r_i * costheta
+                y = r_i * sintheta
                 pi = <int>((x - x0)/dx)
                 pj = <int>((y - y0)/dy)
                 if pi >= 0 and pi < img.shape[0] and \


https://bitbucket.org/yt_analysis/yt/commits/e7fc6781105c/
Changeset:   e7fc6781105c
Branch:      yt-3.0
User:        MatthewTurk
Date:        2014-08-03 03:16:42
Summary:     An enormous speedup for cylindrical pixelization.
Affected #:  1 file

diff -r 6f0edefe73dbf196560cc94c952ec1c48cad7f3a -r e7fc6781105cf44bc49b7c7c824ec3c9eef69088 yt/utilities/lib/misc_utilities.pyx
--- a/yt/utilities/lib/misc_utilities.pyx
+++ b/yt/utilities/lib/misc_utilities.pyx
@@ -500,7 +500,23 @@
     x0, x1, y0, y1 = extents
     dx = (x1 - x0) / img.shape[0]
     dy = (y1 - y0) / img.shape[1]
-      
+    cdef np.float64_t rbounds[2]
+    cdef np.float64_t corners[8]
+    # Find our min and max r
+    corners[0] = x0*x0+y0*y0
+    corners[1] = x1*x1+y0*y0
+    corners[2] = x0*x0+y1*y1
+    corners[3] = x1*x1+y1*y1
+    corners[4] = x0*x0
+    corners[5] = x1*x1
+    corners[6] = y0*y0
+    corners[7] = y1*y1
+    rbounds[0] = rbounds[1] = corners[0]
+    for i in range(8):
+        rbounds[0] = fmin(rbounds[0], corners[i])
+        rbounds[1] = fmax(rbounds[1], corners[i])
+    rbounds[0] = rbounds[0]**0.5
+    rbounds[1] = rbounds[1]**0.5
     dthetamin = dx / rmax
     for i in range(radius.shape[0]):
 
@@ -508,8 +524,12 @@
         theta0 = theta[i]
         dr_i = dradius[i]
         dtheta_i = dtheta[i]
-
+        # Skip out early if we're offsides, for zoomed in plots
+        if r0 + dr_i < rbounds[0] or r0 - dr_i > rbounds[1]:
+            continue
         theta_i = theta0 - dtheta_i
+        # Buffer of 0.5 here
+        dthetamin = 0.5*dx/(r0 + dr_i)
         while theta_i < theta0 + dtheta_i:
             r_i = r0 - dr_i
             costheta = math.cos(theta_i)


https://bitbucket.org/yt_analysis/yt/commits/6e987387c1f0/
Changeset:   6e987387c1f0
Branch:      yt-3.0
User:        MatthewTurk
Date:        2014-08-04 03:28:15
Summary:     Fixing error with axis_names
Affected #:  1 file

diff -r e7fc6781105cf44bc49b7c7c824ec3c9eef69088 -r 6e987387c1f0cea115753351c7c5b201cda52b13 yt/visualization/plot_window.py
--- a/yt/visualization/plot_window.py
+++ b/yt/visualization/plot_window.py
@@ -889,8 +889,8 @@
                     axes_unit_labels = \
                     [coordinates.axis_default_unit_name[xax],
                      coordinates.axis_default_unit_name[yax]]
-                labels = [r'$\rm{'+axis_names[xax]+axes_unit_labels[0] + r'}$',
-                          r'$\rm{'+axis_names[yax]+axes_unit_labels[1] + r'}$']
+                labels = [r'$\rm{'+axis_names[0]+axes_unit_labels[0] + r'}$',
+                          r'$\rm{'+axis_names[1]+axes_unit_labels[1] + r'}$']
 
                 if hasattr(coordinates, "axis_field"):
                     if xax in coordinates.axis_field:


https://bitbucket.org/yt_analysis/yt/commits/a355b9bc82ac/
Changeset:   a355b9bc82ac
Branch:      yt-3.0
User:        MatthewTurk
Date:        2014-08-04 03:39:28
Summary:     Allow including the origin
Affected #:  1 file

diff -r 6e987387c1f0cea115753351c7c5b201cda52b13 -r a355b9bc82acbebb398319233d8f57ab90bbef26 yt/utilities/lib/misc_utilities.pyx
--- a/yt/utilities/lib/misc_utilities.pyx
+++ b/yt/utilities/lib/misc_utilities.pyx
@@ -517,6 +517,12 @@
         rbounds[1] = fmax(rbounds[1], corners[i])
     rbounds[0] = rbounds[0]**0.5
     rbounds[1] = rbounds[1]**0.5
+    # If we include the origin in either direction, we need to have radius of
+    # zero as our lower bound.
+    if x0 < 0 and x1 > 0:
+        rbounds[0] = 0.0
+    if y0 < 0 and y1 > 0:
+        rbounds[0] = 0.0
     dthetamin = dx / rmax
     for i in range(radius.shape[0]):
 


https://bitbucket.org/yt_analysis/yt/commits/a69cdcb0a6e3/
Changeset:   a69cdcb0a6e3
Branch:      yt-3.0
User:        MatthewTurk
Date:        2014-08-04 04:08:06
Summary:     Need to transpose the x,y for theta slices in spherical coords.
Affected #:  1 file

diff -r a355b9bc82acbebb398319233d8f57ab90bbef26 -r a69cdcb0a6e33dfbb7b612c7af43884a817e67cb yt/geometry/spherical_coordinates.py
--- a/yt/geometry/spherical_coordinates.py
+++ b/yt/geometry/spherical_coordinates.py
@@ -107,6 +107,9 @@
                                      data_source['phi'],
                                      data_source['dphi'] / 2.0, # half-widths
                                      size, data_source[field], bounds)
+            # Trying to preserve a nice-looking system, with x on the x and y
+            # on the y.
+            buff = buff.transpose()
         elif dimension == 2:
             buff = pixelize_cylinder(data_source['r'],
                                      data_source['dr'] / 2.0,
@@ -152,7 +155,7 @@
         # Cartesian coordinates, since we transform them.
         rv = {0: ('theta', 'phi'),
               1: ('x', 'y'),
-              2: ('x', 'z')}
+              2: ('r', 'z')}
         for i in rv.keys():
             rv[self.axis_name[i]] = rv[i]
             rv[self.axis_name[i].upper()] = rv[i]


https://bitbucket.org/yt_analysis/yt/commits/43bcb95d3cf6/
Changeset:   43bcb95d3cf6
Branch:      yt-3.0
User:        MatthewTurk
Date:        2014-08-05 19:48:12
Summary:     Moving coordinates into subdirectory
Affected #:  18 files

diff -r a69cdcb0a6e33dfbb7b612c7af43884a817e67cb -r 43bcb95d3cf697ea2ceb5c0ac2a873856cb7a5ed yt/data_objects/static_output.py
--- a/yt/data_objects/static_output.py
+++ b/yt/data_objects/static_output.py
@@ -45,17 +45,12 @@
     YTArray, \
     YTQuantity
 
-from yt.geometry.cartesian_coordinates import \
-    CartesianCoordinateHandler
-from yt.geometry.polar_coordinates import \
-    PolarCoordinateHandler
-from yt.geometry.cylindrical_coordinates import \
-    CylindricalCoordinateHandler
-from yt.geometry.spherical_coordinates import \
-    SphericalCoordinateHandler
-from yt.geometry.geographic_coordinates import \
-    GeographicCoordinateHandler
-from yt.geometry.spec_cube_coordinates import \
+from yt.geometry.coordinates.api import \
+    CartesianCoordinateHandler, \
+    PolarCoordinateHandler, \
+    CylindricalCoordinateHandler, \
+    SphericalCoordinateHandler, \
+    GeographicCoordinateHandler, \
     SpectralCubeCoordinateHandler
 
 # We want to support the movie format in the future.

diff -r a69cdcb0a6e33dfbb7b612c7af43884a817e67cb -r 43bcb95d3cf697ea2ceb5c0ac2a873856cb7a5ed yt/geometry/cartesian_coordinates.py
--- a/yt/geometry/cartesian_coordinates.py
+++ /dev/null
@@ -1,122 +0,0 @@
-"""
-Cartesian fields
-
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2013, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-import numpy as np
-from .coordinate_handler import \
-    CoordinateHandler, \
-    _unknown_coord, \
-    _get_coord_fields
-import yt.visualization._MPL as _MPL
-
-class CartesianCoordinateHandler(CoordinateHandler):
-
-    def __init__(self, ds):
-        super(CartesianCoordinateHandler, self).__init__(ds)
-
-    def setup_fields(self, registry):
-        for axi, ax in enumerate('xyz'):
-            f1, f2 = _get_coord_fields(axi)
-            registry.add_field(("index", "d%s" % ax), function = f1,
-                               display_field = False,
-                               units = "code_length")
-            registry.add_field(("index", "%s" % ax), function = f2,
-                               display_field = False,
-                               units = "code_length")
-        def _cell_volume(field, data):
-            rv  = data["index", "dx"].copy(order='K')
-            rv *= data["index", "dy"]
-            rv *= data["index", "dz"]
-            return rv
-        registry.add_field(("index", "cell_volume"), function=_cell_volume,
-                           display_field=False, units = "code_length**3")
-        registry.check_derived_fields(
-            [("index", "dx"), ("index", "dy"), ("index", "dz"),
-             ("index", "x"), ("index", "y"), ("index", "z"),
-             ("index", "cell_volume")])
-
-    def pixelize(self, dimension, data_source, field, bounds, size,
-                 antialias = True, periodic = True):
-        if dimension < 3:
-            return self._ortho_pixelize(data_source, field, bounds, size,
-                                        antialias, dimension, periodic)
-        else:
-            return self._oblique_pixelize(data_source, field, bounds, size,
-                                          antialias)
-
-    def _ortho_pixelize(self, data_source, field, bounds, size, antialias,
-                        dim, periodic):
-        # We should be using fcoords
-        period = self.period[:2].copy() # dummy here
-        period[0] = self.period[self.x_axis[dim]]
-        period[1] = self.period[self.y_axis[dim]]
-        if hasattr(period, 'in_units'):
-            period = period.in_units("code_length").d
-        buff = _MPL.Pixelize(data_source['px'], data_source['py'],
-                             data_source['pdx'], data_source['pdy'],
-                             data_source[field], size[0], size[1],
-                             bounds, int(antialias),
-                             period, int(periodic)).transpose()
-        return buff
-
-    def _oblique_pixelize(self, data_source, field, bounds, size, antialias):
-        indices = np.argsort(data_source['dx'])[::-1]
-        buff = _MPL.CPixelize(data_source['x'], data_source['y'],
-                              data_source['z'], data_source['px'],
-                              data_source['py'], data_source['pdx'],
-                              data_source['pdy'], data_source['pdz'],
-                              data_source.center, data_source._inv_mat, indices,
-                              data_source[field], size[0], size[1], bounds).transpose()
-        return buff
-
-    def convert_from_cartesian(self, coord):
-        return coord
-
-    def convert_to_cartesian(self, coord):
-        return coord
-
-    def convert_to_cylindrical(self, coord):
-        center = self.ds.domain_center
-        return cartesian_to_cylindrical(coord, center)
-
-    def convert_from_cylindrical(self, coord):
-        center = self.ds.domain_center
-        return cylindrical_to_cartesian(coord, center)
-
-    def convert_to_spherical(self, coord):
-        raise NotImplementedError
-
-    def convert_from_spherical(self, coord):
-        raise NotImplementedError
-
-    # Despite being mutables, we uses these here to be clear about how these
-    # are generated and to ensure that they are not re-generated unnecessarily
-    axis_name = { 0  : 'x',  1  : 'y',  2  : 'z',
-                 'x' : 'x', 'y' : 'y', 'z' : 'z',
-                 'X' : 'x', 'Y' : 'y', 'Z' : 'z'}
-
-    axis_id = { 'x' : 0, 'y' : 1, 'z' : 2,
-                 0  : 0,  1  : 1,  2  : 2}
-
-    x_axis = { 'x' : 1, 'y' : 2, 'z' : 0,
-                0  : 1,  1  : 2,  2  : 0}
-
-    y_axis = { 'x' : 2, 'y' : 0, 'z' : 1,
-                0  : 2,  1  : 0,  2  : 1}
-
-    @property
-    def period(self):
-        return self.ds.domain_width
-

diff -r a69cdcb0a6e33dfbb7b612c7af43884a817e67cb -r 43bcb95d3cf697ea2ceb5c0ac2a873856cb7a5ed yt/geometry/coordinate_handler.py
--- a/yt/geometry/coordinate_handler.py
+++ /dev/null
@@ -1,127 +0,0 @@
-"""
-Coordinate handler base class.
-
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2013, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-import numpy as np
-import abc
-import weakref
-
-from yt.funcs import *
-from yt.fields.field_info_container import \
-    NullFunc, FieldInfoContainer
-from yt.utilities.io_handler import io_registry
-from yt.utilities.logger import ytLogger as mylog
-from yt.utilities.parallel_tools.parallel_analysis_interface import \
-    ParallelAnalysisInterface
-from yt.utilities.lib.misc_utilities import \
-    pixelize_cylinder
-import yt.visualization._MPL as _MPL
-
-def _unknown_coord(field, data):
-    raise YTCoordinateNotImplemented
-
-def _get_coord_fields(axi, units = "code_length"):
-    def _dds(field, data):
-        rv = data.ds.arr(data.fwidth[...,axi].copy(), units)
-        return data._reshape_vals(rv)
-    def _coords(field, data):
-        rv = data.ds.arr(data.fcoords[...,axi].copy(), units)
-        return data._reshape_vals(rv)
-    return _dds, _coords
-
-class CoordinateHandler(object):
-    
-    def __init__(self, ds):
-        self.ds = weakref.proxy(ds)
-
-    def setup_fields(self):
-        # This should return field definitions for x, y, z, r, theta, phi
-        raise NotImplementedError
-
-    def pixelize(self, dimension, data_source, field, bounds, size, antialias = True):
-        # This should *actually* be a pixelize call, not just returning the
-        # pixelizer
-        raise NotImplementedError
-
-    def distance(self, start, end):
-        p1 = self.convert_to_cartesian(start)
-        p2 = self.convert_to_cartesian(end)
-        return np.sqrt(((p1-p2)**2.0).sum())
-
-    def convert_from_cartesian(self, coord):
-        raise NotImplementedError
-
-    def convert_to_cartesian(self, coord):
-        raise NotImplementedError
-
-    def convert_to_cylindrical(self, coord):
-        raise NotImplementedError
-
-    def convert_from_cylindrical(self, coord):
-        raise NotImplementedError
-
-    def convert_to_spherical(self, coord):
-        raise NotImplementedError
-
-    def convert_from_spherical(self, coord):
-        raise NotImplementedError
-
-    @property
-    def axis_name(self):
-        raise NotImplementedError
-
-    @property
-    def image_axis_name(self):
-        # Default
-        rv = {}
-        for i in range(3):
-            rv[i] = (self.axis_name[self.x_axis[i]],
-                     self.axis_name[self.y_axis[i]])
-            rv[self.axis_name[i]] = rv[i]
-            rv[self.axis_name[i].upper()] = rv[i]
-        return rv
-
-    @property
-    def axis_id(self):
-        raise NotImplementedError
-
-    @property
-    def x_axis(self):
-        raise NotImplementedError
-
-    @property
-    def y_axis(self):
-        raise NotImplementedError
-
-    @property
-    def period(self):
-        raise NotImplementedError
-
-def cartesian_to_cylindrical(coord, center = (0,0,0)):
-    c2 = np.zeros_like(coord)
-    c2[...,0] = ((coord[...,0] - center[0])**2.0
-              +  (coord[...,1] - center[1])**2.0)**0.5
-    c2[...,1] = coord[...,2] # rzt
-    c2[...,2] = np.arctan2(coord[...,1] - center[1],
-                           coord[...,0] - center[0])
-    return c2
-
-def cylindrical_to_cartesian(coord, center = (0,0,0)):
-    c2 = np.zeros_like(coord)
-    c2[...,0] = np.cos(coord[...,0]) * coord[...,1] + center[0]
-    c2[...,1] = np.sin(coord[...,0]) * coord[...,1] + center[1]
-    c2[...,2] = coord[...,2]
-    return c2
-

diff -r a69cdcb0a6e33dfbb7b612c7af43884a817e67cb -r 43bcb95d3cf697ea2ceb5c0ac2a873856cb7a5ed yt/geometry/coordinates/api.py
--- /dev/null
+++ b/yt/geometry/coordinates/api.py
@@ -0,0 +1,29 @@
+"""
+API for coordinate handlers
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2014, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from .coordinate_handler import \
+    CoordinateHandler
+
+from .cartesian_coordinates import \
+    CartesianCoordinateHandler
+from .polar_coordinates import \
+    PolarCoordinateHandler
+from .cylindrical_coordinates import \
+    CylindricalCoordinateHandler
+from .spherical_coordinates import \
+    SphericalCoordinateHandler
+from .geographic_coordinates import \
+    GeographicCoordinateHandler
+from .spec_cube_coordinates import \
+    SpectralCubeCoordinateHandler
+

diff -r a69cdcb0a6e33dfbb7b612c7af43884a817e67cb -r 43bcb95d3cf697ea2ceb5c0ac2a873856cb7a5ed yt/geometry/coordinates/cartesian_coordinates.py
--- /dev/null
+++ b/yt/geometry/coordinates/cartesian_coordinates.py
@@ -0,0 +1,122 @@
+"""
+Cartesian fields
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import numpy as np
+from .coordinate_handler import \
+    CoordinateHandler, \
+    _unknown_coord, \
+    _get_coord_fields
+import yt.visualization._MPL as _MPL
+
+class CartesianCoordinateHandler(CoordinateHandler):
+
+    def __init__(self, ds):
+        super(CartesianCoordinateHandler, self).__init__(ds)
+
+    def setup_fields(self, registry):
+        for axi, ax in enumerate('xyz'):
+            f1, f2 = _get_coord_fields(axi)
+            registry.add_field(("index", "d%s" % ax), function = f1,
+                               display_field = False,
+                               units = "code_length")
+            registry.add_field(("index", "%s" % ax), function = f2,
+                               display_field = False,
+                               units = "code_length")
+        def _cell_volume(field, data):
+            rv  = data["index", "dx"].copy(order='K')
+            rv *= data["index", "dy"]
+            rv *= data["index", "dz"]
+            return rv
+        registry.add_field(("index", "cell_volume"), function=_cell_volume,
+                           display_field=False, units = "code_length**3")
+        registry.check_derived_fields(
+            [("index", "dx"), ("index", "dy"), ("index", "dz"),
+             ("index", "x"), ("index", "y"), ("index", "z"),
+             ("index", "cell_volume")])
+
+    def pixelize(self, dimension, data_source, field, bounds, size,
+                 antialias = True, periodic = True):
+        if dimension < 3:
+            return self._ortho_pixelize(data_source, field, bounds, size,
+                                        antialias, dimension, periodic)
+        else:
+            return self._oblique_pixelize(data_source, field, bounds, size,
+                                          antialias)
+
+    def _ortho_pixelize(self, data_source, field, bounds, size, antialias,
+                        dim, periodic):
+        # We should be using fcoords
+        period = self.period[:2].copy() # dummy here
+        period[0] = self.period[self.x_axis[dim]]
+        period[1] = self.period[self.y_axis[dim]]
+        if hasattr(period, 'in_units'):
+            period = period.in_units("code_length").d
+        buff = _MPL.Pixelize(data_source['px'], data_source['py'],
+                             data_source['pdx'], data_source['pdy'],
+                             data_source[field], size[0], size[1],
+                             bounds, int(antialias),
+                             period, int(periodic)).transpose()
+        return buff
+
+    def _oblique_pixelize(self, data_source, field, bounds, size, antialias):
+        indices = np.argsort(data_source['dx'])[::-1]
+        buff = _MPL.CPixelize(data_source['x'], data_source['y'],
+                              data_source['z'], data_source['px'],
+                              data_source['py'], data_source['pdx'],
+                              data_source['pdy'], data_source['pdz'],
+                              data_source.center, data_source._inv_mat, indices,
+                              data_source[field], size[0], size[1], bounds).transpose()
+        return buff
+
+    def convert_from_cartesian(self, coord):
+        return coord
+
+    def convert_to_cartesian(self, coord):
+        return coord
+
+    def convert_to_cylindrical(self, coord):
+        center = self.ds.domain_center
+        return cartesian_to_cylindrical(coord, center)
+
+    def convert_from_cylindrical(self, coord):
+        center = self.ds.domain_center
+        return cylindrical_to_cartesian(coord, center)
+
+    def convert_to_spherical(self, coord):
+        raise NotImplementedError
+
+    def convert_from_spherical(self, coord):
+        raise NotImplementedError
+
+    # Despite being mutables, we uses these here to be clear about how these
+    # are generated and to ensure that they are not re-generated unnecessarily
+    axis_name = { 0  : 'x',  1  : 'y',  2  : 'z',
+                 'x' : 'x', 'y' : 'y', 'z' : 'z',
+                 'X' : 'x', 'Y' : 'y', 'Z' : 'z'}
+
+    axis_id = { 'x' : 0, 'y' : 1, 'z' : 2,
+                 0  : 0,  1  : 1,  2  : 2}
+
+    x_axis = { 'x' : 1, 'y' : 2, 'z' : 0,
+                0  : 1,  1  : 2,  2  : 0}
+
+    y_axis = { 'x' : 2, 'y' : 0, 'z' : 1,
+                0  : 2,  1  : 0,  2  : 1}
+
+    @property
+    def period(self):
+        return self.ds.domain_width
+

diff -r a69cdcb0a6e33dfbb7b612c7af43884a817e67cb -r 43bcb95d3cf697ea2ceb5c0ac2a873856cb7a5ed yt/geometry/coordinates/coordinate_handler.py
--- /dev/null
+++ b/yt/geometry/coordinates/coordinate_handler.py
@@ -0,0 +1,127 @@
+"""
+Coordinate handler base class.
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import numpy as np
+import abc
+import weakref
+
+from yt.funcs import *
+from yt.fields.field_info_container import \
+    NullFunc, FieldInfoContainer
+from yt.utilities.io_handler import io_registry
+from yt.utilities.logger import ytLogger as mylog
+from yt.utilities.parallel_tools.parallel_analysis_interface import \
+    ParallelAnalysisInterface
+from yt.utilities.lib.misc_utilities import \
+    pixelize_cylinder
+import yt.visualization._MPL as _MPL
+
+def _unknown_coord(field, data):
+    raise YTCoordinateNotImplemented
+
+def _get_coord_fields(axi, units = "code_length"):
+    def _dds(field, data):
+        rv = data.ds.arr(data.fwidth[...,axi].copy(), units)
+        return data._reshape_vals(rv)
+    def _coords(field, data):
+        rv = data.ds.arr(data.fcoords[...,axi].copy(), units)
+        return data._reshape_vals(rv)
+    return _dds, _coords
+
+class CoordinateHandler(object):
+    
+    def __init__(self, ds):
+        self.ds = weakref.proxy(ds)
+
+    def setup_fields(self):
+        # This should return field definitions for x, y, z, r, theta, phi
+        raise NotImplementedError
+
+    def pixelize(self, dimension, data_source, field, bounds, size, antialias = True):
+        # This should *actually* be a pixelize call, not just returning the
+        # pixelizer
+        raise NotImplementedError
+
+    def distance(self, start, end):
+        p1 = self.convert_to_cartesian(start)
+        p2 = self.convert_to_cartesian(end)
+        return np.sqrt(((p1-p2)**2.0).sum())
+
+    def convert_from_cartesian(self, coord):
+        raise NotImplementedError
+
+    def convert_to_cartesian(self, coord):
+        raise NotImplementedError
+
+    def convert_to_cylindrical(self, coord):
+        raise NotImplementedError
+
+    def convert_from_cylindrical(self, coord):
+        raise NotImplementedError
+
+    def convert_to_spherical(self, coord):
+        raise NotImplementedError
+
+    def convert_from_spherical(self, coord):
+        raise NotImplementedError
+
+    @property
+    def axis_name(self):
+        raise NotImplementedError
+
+    @property
+    def image_axis_name(self):
+        # Default
+        rv = {}
+        for i in range(3):
+            rv[i] = (self.axis_name[self.x_axis[i]],
+                     self.axis_name[self.y_axis[i]])
+            rv[self.axis_name[i]] = rv[i]
+            rv[self.axis_name[i].upper()] = rv[i]
+        return rv
+
+    @property
+    def axis_id(self):
+        raise NotImplementedError
+
+    @property
+    def x_axis(self):
+        raise NotImplementedError
+
+    @property
+    def y_axis(self):
+        raise NotImplementedError
+
+    @property
+    def period(self):
+        raise NotImplementedError
+
+def cartesian_to_cylindrical(coord, center = (0,0,0)):
+    c2 = np.zeros_like(coord)
+    c2[...,0] = ((coord[...,0] - center[0])**2.0
+              +  (coord[...,1] - center[1])**2.0)**0.5
+    c2[...,1] = coord[...,2] # rzt
+    c2[...,2] = np.arctan2(coord[...,1] - center[1],
+                           coord[...,0] - center[0])
+    return c2
+
+def cylindrical_to_cartesian(coord, center = (0,0,0)):
+    c2 = np.zeros_like(coord)
+    c2[...,0] = np.cos(coord[...,0]) * coord[...,1] + center[0]
+    c2[...,1] = np.sin(coord[...,0]) * coord[...,1] + center[1]
+    c2[...,2] = coord[...,2]
+    return c2
+

diff -r a69cdcb0a6e33dfbb7b612c7af43884a817e67cb -r 43bcb95d3cf697ea2ceb5c0ac2a873856cb7a5ed yt/geometry/coordinates/cylindrical_coordinates.py
--- /dev/null
+++ b/yt/geometry/coordinates/cylindrical_coordinates.py
@@ -0,0 +1,163 @@
+"""
+Cylindrical fields
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import numpy as np
+from yt.units.yt_array import YTArray
+from .coordinate_handler import \
+    CoordinateHandler, \
+    _unknown_coord, \
+    _get_coord_fields
+import yt.visualization._MPL as _MPL
+from yt.utilities.lib.misc_utilities import \
+    pixelize_cylinder
+#
+# Cylindrical fields
+#
+
+class CylindricalCoordinateHandler(CoordinateHandler):
+
+    def __init__(self, ds, ordering = 'rzt'):
+        if ordering != 'rzt': raise NotImplementedError
+        super(CylindricalCoordinateHandler, self).__init__(ds)
+
+    def setup_fields(self, registry):
+        # return the fields for r, z, theta
+        registry.add_field(("index", "dx"), function=_unknown_coord)
+        registry.add_field(("index", "dy"), function=_unknown_coord)
+        registry.add_field(("index", "x"), function=_unknown_coord)
+        registry.add_field(("index", "y"), function=_unknown_coord)
+        f1, f2 = _get_coord_fields(0)
+        registry.add_field(("index", "dr"), function = f1,
+                           display_field = False,
+                           units = "code_length")
+        registry.add_field(("index", "r"), function = f2,
+                           display_field = False,
+                           units = "code_length")
+
+        f1, f2 = _get_coord_fields(1)
+        registry.add_field(("index", "dz"), function = f1,
+                           display_field = False,
+                           units = "code_length")
+        registry.add_field(("index", "z"), function = f2,
+                           display_field = False,
+                           units = "code_length")
+
+        f1, f2 = _get_coord_fields(2, "")
+        registry.add_field(("index", "dtheta"), function = f1,
+                           display_field = False,
+                           units = "")
+        registry.add_field(("index", "theta"), function = f2,
+                           display_field = False,
+                           units = "")
+
+        def _CylindricalVolume(field, data):
+            return data["index", "dtheta"] \
+                 * data["index", "r"] \
+                 * data["index", "dr"] \
+                 * data["index", "dz"]
+        registry.add_field(("index", "cell_volume"),
+                 function=_CylindricalVolume,
+                 units = "code_length**3")
+
+
+    def pixelize(self, dimension, data_source, field, bounds, size,
+                 antialias = True, periodic = True):
+        ax_name = self.axis_name[dimension]
+        if ax_name in ('r', 'theta'):
+            return self._ortho_pixelize(data_source, field, bounds, size,
+                                        antialias, dimension, periodic)
+        elif ax_name == "z":
+            return self._cyl_pixelize(data_source, field, bounds, size,
+                                        antialias)
+        else:
+            # Pixelizing along a cylindrical surface is a bit tricky
+            raise NotImplementedError
+
+    def _ortho_pixelize(self, data_source, field, bounds, size, antialias,
+                        dim, periodic):
+        period = self.period[:2].copy() # dummy here
+        period[0] = self.period[self.x_axis[dim]]
+        period[1] = self.period[self.y_axis[dim]]
+        if hasattr(period, 'in_units'):
+            period = period.in_units("code_length").d
+        buff = _MPL.Pixelize(data_source['px'], data_source['py'],
+                             data_source['pdx'], data_source['pdy'],
+                             data_source[field], size[0], size[1],
+                             bounds, int(antialias),
+                             period, int(periodic)).transpose()
+        return buff
+
+    def _cyl_pixelize(self, data_source, field, bounds, size, antialias):
+        buff = pixelize_cylinder(data_source['r'],
+                                 data_source['dr'],
+                                 data_source['theta'],
+                                 data_source['dtheta']/2.0, # half-widths
+                                 size, data_source[field], bounds)
+        return buff
+
+    axis_name = { 0  : 'r',  1  : 'z',  2  : 'theta',
+                 'r' : 'r', 'z' : 'z', 'theta' : 'theta',
+                 'R' : 'r', 'Z' : 'z', 'Theta' : 'theta'}
+
+    axis_id = { 'r' : 0, 'z' : 1, 'theta' : 2,
+                 0  : 0,  1  : 1,  2  : 2}
+
+    x_axis = { 'r' : 1, 'z' : 0, 'theta' : 0,
+                0  : 1,  1  : 0,  2  : 0}
+
+    y_axis = { 'r' : 2, 'z' : 2, 'theta' : 1,
+                0  : 2,  1  : 2,  2  : 1}
+
+    _image_axis_name = None
+
+    @property
+    def image_axis_name(self):    
+        if self._image_axis_name is not None:
+            return self._image_axis_name
+        # This is the x and y axes labels that get displayed.  For
+        # non-Cartesian coordinates, we usually want to override these for
+        # Cartesian coordinates, since we transform them.
+        rv = {0: ('z', 'theta'),
+              1: ('x', 'y'),
+              2: ('r', 'z')}
+        for i in rv.keys():
+            rv[self.axis_name[i]] = rv[i]
+            rv[self.axis_name[i].upper()] = rv[i]
+        self._image_axis_name = rv
+        return rv
+
+    def convert_from_cartesian(self, coord):
+        return cartesian_to_cylindrical(coord)
+
+    def convert_to_cartesian(self, coord):
+        return cylindrical_to_cartesian(coord)
+
+    def convert_to_cylindrical(self, coord):
+        return coord
+
+    def convert_from_cylindrical(self, coord):
+        return coord
+
+    def convert_to_spherical(self, coord):
+        raise NotImplementedError
+
+    def convert_from_spherical(self, coord):
+        raise NotImplementedError
+
+    @property
+    def period(self):
+        return np.array([0.0, 0.0, 2.0*np.pi])
+

diff -r a69cdcb0a6e33dfbb7b612c7af43884a817e67cb -r 43bcb95d3cf697ea2ceb5c0ac2a873856cb7a5ed yt/geometry/coordinates/geographic_coordinates.py
--- /dev/null
+++ b/yt/geometry/coordinates/geographic_coordinates.py
@@ -0,0 +1,194 @@
+"""
+Geographic fields
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import numpy as np
+from .coordinate_handler import \
+    CoordinateHandler, \
+    _unknown_coord, \
+    _get_coord_fields
+import yt.visualization._MPL as _MPL
+from yt.utilities.lib.misc_utilities import \
+    pixelize_cylinder, pixelize_aitoff
+
+class GeographicCoordinateHandler(CoordinateHandler):
+
+    def __init__(self, ds, ordering = 'latlonalt'):
+        if ordering != 'latlonalt': raise NotImplementedError
+        super(GeographicCoordinateHandler, self).__init__(ds)
+
+    def setup_fields(self, registry):
+        # return the fields for r, z, theta
+        registry.add_field(("index", "dx"), function=_unknown_coord)
+        registry.add_field(("index", "dy"), function=_unknown_coord)
+        registry.add_field(("index", "dz"), function=_unknown_coord)
+        registry.add_field(("index", "x"), function=_unknown_coord)
+        registry.add_field(("index", "y"), function=_unknown_coord)
+        registry.add_field(("index", "z"), function=_unknown_coord)
+        f1, f2 = _get_coord_fields(0, "")
+        registry.add_field(("index", "dlatitude"), function = f1,
+                           display_field = False,
+                           units = "")
+        registry.add_field(("index", "latitude"), function = f2,
+                           display_field = False,
+                           units = "")
+
+        f1, f2 = _get_coord_fields(1, "")
+        registry.add_field(("index", "dlongitude"), function = f1,
+                           display_field = False,
+                           units = "")
+        registry.add_field(("index", "longitude"), function = f2,
+                           display_field = False,
+                           units = "")
+
+        f1, f2 = _get_coord_fields(2)
+        registry.add_field(("index", "daltitude"), function = f1,
+                           display_field = False,
+                           units = "code_length")
+        registry.add_field(("index", "altitude"), function = f2,
+                           display_field = False,
+                           units = "code_length")
+
+        def _SphericalVolume(field, data):
+            # r**2 sin theta dr dtheta dphi
+            # We can use the transformed coordinates here.
+            vol = data["index", "r"]**2.0
+            vol *= data["index", "dr"]
+            vol *= np.sin(data["index", "theta"])
+            vol *= data["index", "dtheta"]
+            vol *= data["index", "dphi"]
+            return vol
+        registry.add_field(("index", "cell_volume"),
+                 function=_SphericalVolume,
+                 units = "code_length**3")
+
+        # Altitude is the radius from the central zone minus the radius of the
+        # surface.
+        def _altitude_to_radius(field, data):
+            surface_height = data.get_field_parameter("surface_height")
+            if surface_height is None:
+                surface_height = getattr(data.ds, "surface_height", 0.0)
+            return data["altitude"] + surface_height
+        registry.add_field(("index", "r"),
+                 function=_altitude_to_radius,
+                 units = "code_length")
+        registry.alias(("index", "dr"), ("index", "daltitude"))
+
+        def _longitude_to_theta(field, data):
+            # longitude runs from -180 to 180.
+            return (data["longitude"] + 180) * np.pi/180.0
+        registry.add_field(("index", "theta"),
+                 function = _longitude_to_theta,
+                 units = "")
+        def _dlongitude_to_dtheta(field, data):
+            return data["dlongitude"] * np.pi/180.0
+        registry.add_field(("index", "dtheta"),
+                 function = _dlongitude_to_dtheta,
+                 units = "")
+
+        def _latitude_to_phi(field, data):
+            # latitude runs from -90 to 90
+            return (data["latitude"] + 90) * np.pi/180.0
+        registry.add_field(("index", "phi"),
+                 function = _latitude_to_phi,
+                 units = "")
+        def _dlatitude_to_dphi(field, data):
+            return data["dlatitude"] * np.pi/180.0
+        registry.add_field(("index", "dphi"),
+                 function = _dlatitude_to_dphi,
+                 units = "")
+
+    def pixelize(self, dimension, data_source, field, bounds, size,
+                 antialias = True, periodic = True):
+        if dimension in (0, 1):
+            return self._cyl_pixelize(data_source, field, bounds, size,
+                                          antialias, dimension)
+        elif dimension == 2:
+            return self._ortho_pixelize(data_source, field, bounds, size,
+                                        antialias, dimension, periodic)
+        else:
+            raise NotImplementedError
+
+    def _ortho_pixelize(self, data_source, field, bounds, size, antialias,
+                        dim, periodic):
+        buff = pixelize_aitoff(data_source["theta"], data_source["dtheta"]/2.0,
+                               data_source["phi"], data_source["dphi"]/2.0,
+                               size, data_source[field], None,
+                               None).transpose()
+        return buff
+
+    def _cyl_pixelize(self, data_source, field, bounds, size, antialias,
+                      dimension):
+        if dimension == 0:
+            buff = pixelize_cylinder(data_source['r'],
+                                     data_source['dr'] / 2.0,
+                                     data_source['theta'],
+                                     data_source['dtheta'] / 2.0, # half-widths
+                                     size, data_source[field], bounds)
+        elif dimension == 1:
+            buff = pixelize_cylinder(data_source['r'],
+                                     data_source['dr'] / 2.0,
+                                     data_source['phi'],
+                                     data_source['dphi'] / 2.0, # half-widths
+                                     size, data_source[field], bounds)
+        else:
+            raise RuntimeError
+        return buff
+
+
+    def convert_from_cartesian(self, coord):
+        raise NotImplementedError
+
+    def convert_to_cartesian(self, coord):
+        raise NotImplementedError
+
+    def convert_to_cylindrical(self, coord):
+        raise NotImplementedError
+
+    def convert_from_cylindrical(self, coord):
+        raise NotImplementedError
+
+    def convert_to_spherical(self, coord):
+        raise NotImplementedError
+
+    def convert_from_spherical(self, coord):
+        raise NotImplementedError
+
+    # Despite being mutables, we uses these here to be clear about how these
+    # are generated and to ensure that they are not re-generated unnecessarily
+    axis_name = { 0  : 'latitude',  1  : 'longitude',  2  : 'altitude',
+                 'latitude' : 'latitude',
+                 'longitude' : 'longitude', 
+                 'altitude' : 'altitude',
+                 'Latitude' : 'latitude',
+                 'Longitude' : 'longitude', 
+                 'Altitude' : 'altitude',
+                 'lat' : 'latitude',
+                 'lon' : 'longitude', 
+                 'alt' : 'altitude' }
+
+    axis_id = { 'latitude' : 0, 'longitude' : 1, 'altitude' : 2,
+                 0  : 0,  1  : 1,  2  : 2}
+
+    x_axis = { 'latitude' : 1, 'longitude' : 0, 'altitude' : 0,
+                0  : 1,  1  : 0,  2  : 0}
+
+    y_axis = { 'latitude' : 2, 'longitude' : 2, 'altitude' : 1,
+                0  : 2,  1  : 2,  2  : 1}
+
+    @property
+    def period(self):
+        return self.ds.domain_width
+

diff -r a69cdcb0a6e33dfbb7b612c7af43884a817e67cb -r 43bcb95d3cf697ea2ceb5c0ac2a873856cb7a5ed yt/geometry/coordinates/polar_coordinates.py
--- /dev/null
+++ b/yt/geometry/coordinates/polar_coordinates.py
@@ -0,0 +1,149 @@
+"""
+Polar fields
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import numpy as np
+from .coordinate_handler import \
+    CoordinateHandler, \
+    _unknown_coord, \
+    _get_coord_fields
+from yt.utilities.lib.misc_utilities import \
+    pixelize_cylinder
+
+class PolarCoordinateHandler(CoordinateHandler):
+
+    def __init__(self, ds, ordering = 'rtz'):
+        if ordering != 'rtz': raise NotImplementedError
+        super(PolarCoordinateHandler, self).__init__(ds)
+
+    def setup_fields(self, registry):
+        # return the fields for r, z, theta
+        registry.add_field("dx", function=_unknown_coord)
+        registry.add_field("dy", function=_unknown_coord)
+        registry.add_field("x", function=_unknown_coord)
+        registry.add_field("y", function=_unknown_coord)
+
+        f1, f2 = _get_coord_fields(0)
+        registry.add_field(("index", "dr"), function = f1,
+                           display_field = False,
+                           units = "code_length")
+        registry.add_field(("index", "r"), function = f2,
+                           display_field = False,
+                           units = "code_length")
+
+        f1, f2 = _get_coord_fields(1, "")
+        registry.add_field(("index", "dtheta"), function = f1,
+                           display_field = False,
+                           units = "")
+        registry.add_field(("index", "theta"), function = f2,
+                           display_field = False,
+                           units = "")
+
+        f1, f2 = _get_coord_fields(2) 
+        registry.add_field(("index", "dz"), function = f1,
+                           display_field = False,
+                           units = "code_length")
+        registry.add_field(("index", "z"), function = f2,
+                           display_field = False,
+                           units = "code_length")
+
+
+        def _CylindricalVolume(field, data):
+            return data["dtheta"] * data["r"] * data["dr"] * data["dz"]
+        registry.add_field("CellVolume", function=_CylindricalVolume)
+
+    def pixelize(self, dimension, data_source, field, bounds, size, antialias = True):
+        ax_name = self.axis_name[dimension]
+        if ax_name in ('r', 'theta'):
+            return self._ortho_pixelize(data_source, field, bounds, size,
+                                        antialias)
+        elif ax_name == "z":
+            return self._polar_pixelize(data_source, field, bounds, size,
+                                        antialias)
+        else:
+            # Pixelizing along a cylindrical surface is a bit tricky
+            raise NotImplementedError
+
+
+    def _ortho_pixelize(self, data_source, field, bounds, size, antialias):
+        buff = _MPL.Pixelize(data_source['px'], data_source['py'],
+                             data_source['pdx'], data_source['pdy'],
+                             data_source[field], size[0], size[1],
+                             bounds, int(antialias),
+                             True, self.period).transpose()
+        return buff
+
+    def _polar_pixelize(self, data_source, field, bounds, size, antialias):
+        # Out bounds here will *always* be what plot window thinks are x0, x1,
+        # y0, y1, but which will actually be rmin, rmax, thetamin, thetamax.
+        buff = pixelize_cylinder(data_source['r'],
+                                 data_source['dr'],
+                                 data_source['theta'],
+                                 data_source['dtheta'] / 2.0, # half-widths
+                                 size, data_source[field], bounds)
+        return buff
+
+    axis_name = { 0  : 'r',  1  : 'theta',  2  : 'z',
+                 'r' : 'r', 'theta' : 'theta', 'z' : 'z',
+                 'R' : 'r', 'Theta' : 'theta', 'Z' : 'z'}
+
+    axis_id = { 'r' : 0, 'theta' : 1, 'z' : 2,
+                 0  : 0,  1  : 1,  2  : 2}
+
+    x_axis = { 'r' : 1, 'theta' : 0, 'z' : 0,
+                0  : 1,  1  : 0,  2  : 0}
+
+    y_axis = { 'r' : 2, 'theta' : 2, 'z' : 1,
+                0  : 2,  1  : 2,  2  : 1}
+
+    _image_axis_name = None
+    @property
+    def image_axis_name(self):    
+        if self._image_axis_name is not None:
+            return self._image_axis_name
+        # This is the x and y axes labels that get displayed.  For
+        # non-Cartesian coordinates, we usually want to override these for
+        # Cartesian coordinates, since we transform them.
+        rv = {0: ('theta', 'z'),
+              1: ('x', 'y'),
+              2: ('r', 'z')}
+        for i in rv.keys():
+            rv[self.axis_name[i]] = rv[i]
+            rv[self.axis_name[i].upper()] = rv[i]
+        self._image_axis_name = rv
+        return rv
+
+    def convert_from_cartesian(self, coord):
+        return cartesian_to_cylindrical(coord)
+
+    def convert_to_cartesian(self, coord):
+        return cylindrical_to_cartesian(coord)
+
+    def convert_to_cylindrical(self, coord):
+        return coord
+
+    def convert_from_cylindrical(self, coord):
+        return coord
+
+    def convert_to_spherical(self, coord):
+        raise NotImplementedError
+
+    def convert_from_spherical(self, coord):
+        raise NotImplementedError
+
+    @property
+    def period(self):
+        return np.array([0.0, 0.0, 2.0*np.pi])
+

diff -r a69cdcb0a6e33dfbb7b612c7af43884a817e67cb -r 43bcb95d3cf697ea2ceb5c0ac2a873856cb7a5ed yt/geometry/coordinates/spec_cube_coordinates.py
--- /dev/null
+++ b/yt/geometry/coordinates/spec_cube_coordinates.py
@@ -0,0 +1,65 @@
+"""
+Cartesian fields
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import numpy as np
+from .cartesian_coordinates import \
+    CartesianCoordinateHandler
+
+class SpectralCubeCoordinateHandler(CartesianCoordinateHandler):
+
+    def __init__(self, ds):
+        super(SpectralCubeCoordinateHandler, self).__init__(ds)
+
+        self.axis_name = {}
+        self.axis_id = {}
+
+        for axis, axis_name in zip([ds.lon_axis, ds.lat_axis, ds.spec_axis],
+                                   ["Image\ x", "Image\ y", ds.spec_name]):
+            lower_ax = "xyz"[axis]
+            upper_ax = lower_ax.upper()
+
+            self.axis_name[axis] = axis_name
+            self.axis_name[lower_ax] = axis_name
+            self.axis_name[upper_ax] = axis_name
+            self.axis_name[axis_name] = axis_name
+
+            self.axis_id[lower_ax] = axis
+            self.axis_id[axis] = axis
+            self.axis_id[axis_name] = axis
+
+        self.default_unit_label = {}
+        self.default_unit_label[ds.lon_axis] = "pixel"
+        self.default_unit_label[ds.lat_axis] = "pixel"
+        self.default_unit_label[ds.spec_axis] = ds.spec_unit
+
+        def _spec_axis(ax, x, y):
+            p = (x,y)[ax]
+            return [self.ds.pixel2spec(pp).v for pp in p]
+
+        self.axis_field = {}
+        self.axis_field[self.ds.spec_axis] = _spec_axis
+
+    def convert_to_cylindrical(self, coord):
+        raise NotImplementedError
+
+    def convert_from_cylindrical(self, coord):
+        raise NotImplementedError
+
+    x_axis = { 'x' : 1, 'y' : 0, 'z' : 0,
+                0  : 1,  1  : 0,  2  : 0}
+
+    y_axis = { 'x' : 2, 'y' : 2, 'z' : 1,
+                0  : 2,  1  : 2,  2  : 1}

diff -r a69cdcb0a6e33dfbb7b612c7af43884a817e67cb -r 43bcb95d3cf697ea2ceb5c0ac2a873856cb7a5ed yt/geometry/coordinates/spherical_coordinates.py
--- /dev/null
+++ b/yt/geometry/coordinates/spherical_coordinates.py
@@ -0,0 +1,178 @@
+"""
+Spherical fields
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import numpy as np
+from .coordinate_handler import \
+    CoordinateHandler, \
+    _unknown_coord, \
+    _get_coord_fields
+import yt.visualization._MPL as _MPL
+from yt.utilities.lib.misc_utilities import \
+    pixelize_cylinder, pixelize_aitoff
+
+class SphericalCoordinateHandler(CoordinateHandler):
+
+    def __init__(self, ds, ordering = 'rtp'):
+        if ordering != 'rtp': raise NotImplementedError
+        super(SphericalCoordinateHandler, self).__init__(ds)
+
+    def setup_fields(self, registry):
+        # return the fields for r, z, theta
+        registry.add_field(("index", "dx"), function=_unknown_coord)
+        registry.add_field(("index", "dy"), function=_unknown_coord)
+        registry.add_field(("index", "dz"), function=_unknown_coord)
+        registry.add_field(("index", "x"), function=_unknown_coord)
+        registry.add_field(("index", "y"), function=_unknown_coord)
+        registry.add_field(("index", "z"), function=_unknown_coord)
+        f1, f2 = _get_coord_fields(0)
+        registry.add_field(("index", "dr"), function = f1,
+                           display_field = False,
+                           units = "code_length")
+        registry.add_field(("index", "r"), function = f2,
+                           display_field = False,
+                           units = "code_length")
+
+        f1, f2 = _get_coord_fields(1, "")
+        registry.add_field(("index", "dtheta"), function = f1,
+                           display_field = False,
+                           units = "")
+        registry.add_field(("index", "theta"), function = f2,
+                           display_field = False,
+                           units = "")
+
+        f1, f2 = _get_coord_fields(2, "")
+        registry.add_field(("index", "dphi"), function = f1,
+                           display_field = False,
+                           units = "")
+        registry.add_field(("index", "phi"), function = f2,
+                           display_field = False,
+                           units = "")
+
+        def _SphericalVolume(field, data):
+            # r**2 sin theta dr dtheta dphi
+            vol = data["index", "r"]**2.0
+            vol *= data["index", "dr"]
+            vol *= np.sin(data["index", "theta"])
+            vol *= data["index", "dtheta"]
+            vol *= data["index", "dphi"]
+            return vol
+        registry.add_field(("index", "cell_volume"),
+                 function=_SphericalVolume,
+                 units = "code_length**3")
+
+    def pixelize(self, dimension, data_source, field, bounds, size,
+                 antialias = True, periodic = True):
+        self.period
+        if dimension == 0:
+            return self._ortho_pixelize(data_source, field, bounds, size,
+                                        antialias, dimension, periodic)
+        elif dimension in (1, 2):
+            return self._cyl_pixelize(data_source, field, bounds, size,
+                                          antialias, dimension)
+        else:
+            raise NotImplementedError
+
+    def _ortho_pixelize(self, data_source, field, bounds, size, antialias,
+                        dim, periodic):
+        # We should be using fcoords
+        period = self.period[:2].copy() # dummy here
+        period[0] = self.period[self.x_axis[dim]]
+        period[1] = self.period[self.y_axis[dim]]
+        period = period.in_units("code_length").d
+        buff = _MPL.Pixelize(data_source['px'], data_source['py'],
+                             data_source['pdx'], data_source['pdy'],
+                             data_source[field], size[0], size[1],
+                             bounds, int(antialias),
+                             period, int(periodic)).transpose()
+        return buff
+
+    def _cyl_pixelize(self, data_source, field, bounds, size, antialias,
+                      dimension):
+        if dimension == 1:
+            buff = pixelize_cylinder(data_source['r'],
+                                     data_source['dr'] / 2.0,
+                                     data_source['phi'],
+                                     data_source['dphi'] / 2.0, # half-widths
+                                     size, data_source[field], bounds)
+            # Trying to preserve a nice-looking system, with x on the x and y
+            # on the y.
+            buff = buff.transpose()
+        elif dimension == 2:
+            buff = pixelize_cylinder(data_source['r'],
+                                     data_source['dr'] / 2.0,
+                                     data_source['theta'],
+                                     data_source['dtheta'] / 2.0, # half-widths
+                                     size, data_source[field], bounds)
+        else:
+            raise RuntimeError
+        return buff
+
+
+    def convert_from_cartesian(self, coord):
+        raise NotImplementedError
+
+    def convert_to_cartesian(self, coord):
+        raise NotImplementedError
+
+    def convert_to_cylindrical(self, coord):
+        raise NotImplementedError
+
+    def convert_from_cylindrical(self, coord):
+        raise NotImplementedError
+
+    def convert_to_spherical(self, coord):
+        raise NotImplementedError
+
+    def convert_from_spherical(self, coord):
+        raise NotImplementedError
+
+    # Despite being mutables, we uses these here to be clear about how these
+    # are generated and to ensure that they are not re-generated unnecessarily
+    axis_name = { 0  : 'r',  1  : 'theta',  2  : 'phi',
+                 'r' : 'r', 'theta' : 'theta', 'phi' : 'phi',
+                 'R' : 'r', 'Theta' : 'theta', 'Phi' : 'phi'}
+
+    _image_axis_name = None
+    @property
+    def image_axis_name(self):    
+        if self._image_axis_name is not None:
+            return self._image_axis_name
+        # This is the x and y axes labels that get displayed.  For
+        # non-Cartesian coordinates, we usually want to override these for
+        # Cartesian coordinates, since we transform them.
+        rv = {0: ('theta', 'phi'),
+              1: ('x', 'y'),
+              2: ('r', 'z')}
+        for i in rv.keys():
+            rv[self.axis_name[i]] = rv[i]
+            rv[self.axis_name[i].upper()] = rv[i]
+        self._image_axis_name = rv
+        return rv
+
+
+    axis_id = { 'r' : 0, 'theta' : 1, 'phi' : 2,
+                 0  : 0,  1  : 1,  2  : 2}
+
+    x_axis = { 'r' : 1, 'theta' : 0, 'phi' : 0,
+                0  : 1,  1  : 0,  2  : 0}
+
+    y_axis = { 'r' : 2, 'theta' : 2, 'phi' : 1,
+                0  : 2,  1  : 2,  2  : 1}
+
+    @property
+    def period(self):
+        return self.ds.domain_width
+

diff -r a69cdcb0a6e33dfbb7b612c7af43884a817e67cb -r 43bcb95d3cf697ea2ceb5c0ac2a873856cb7a5ed yt/geometry/cylindrical_coordinates.py
--- a/yt/geometry/cylindrical_coordinates.py
+++ /dev/null
@@ -1,163 +0,0 @@
-"""
-Cylindrical fields
-
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2013, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-import numpy as np
-from yt.units.yt_array import YTArray
-from .coordinate_handler import \
-    CoordinateHandler, \
-    _unknown_coord, \
-    _get_coord_fields
-import yt.visualization._MPL as _MPL
-from yt.utilities.lib.misc_utilities import \
-    pixelize_cylinder
-#
-# Cylindrical fields
-#
-
-class CylindricalCoordinateHandler(CoordinateHandler):
-
-    def __init__(self, ds, ordering = 'rzt'):
-        if ordering != 'rzt': raise NotImplementedError
-        super(CylindricalCoordinateHandler, self).__init__(ds)
-
-    def setup_fields(self, registry):
-        # return the fields for r, z, theta
-        registry.add_field(("index", "dx"), function=_unknown_coord)
-        registry.add_field(("index", "dy"), function=_unknown_coord)
-        registry.add_field(("index", "x"), function=_unknown_coord)
-        registry.add_field(("index", "y"), function=_unknown_coord)
-        f1, f2 = _get_coord_fields(0)
-        registry.add_field(("index", "dr"), function = f1,
-                           display_field = False,
-                           units = "code_length")
-        registry.add_field(("index", "r"), function = f2,
-                           display_field = False,
-                           units = "code_length")
-
-        f1, f2 = _get_coord_fields(1)
-        registry.add_field(("index", "dz"), function = f1,
-                           display_field = False,
-                           units = "code_length")
-        registry.add_field(("index", "z"), function = f2,
-                           display_field = False,
-                           units = "code_length")
-
-        f1, f2 = _get_coord_fields(2, "")
-        registry.add_field(("index", "dtheta"), function = f1,
-                           display_field = False,
-                           units = "")
-        registry.add_field(("index", "theta"), function = f2,
-                           display_field = False,
-                           units = "")
-
-        def _CylindricalVolume(field, data):
-            return data["index", "dtheta"] \
-                 * data["index", "r"] \
-                 * data["index", "dr"] \
-                 * data["index", "dz"]
-        registry.add_field(("index", "cell_volume"),
-                 function=_CylindricalVolume,
-                 units = "code_length**3")
-
-
-    def pixelize(self, dimension, data_source, field, bounds, size,
-                 antialias = True, periodic = True):
-        ax_name = self.axis_name[dimension]
-        if ax_name in ('r', 'theta'):
-            return self._ortho_pixelize(data_source, field, bounds, size,
-                                        antialias, dimension, periodic)
-        elif ax_name == "z":
-            return self._cyl_pixelize(data_source, field, bounds, size,
-                                        antialias)
-        else:
-            # Pixelizing along a cylindrical surface is a bit tricky
-            raise NotImplementedError
-
-    def _ortho_pixelize(self, data_source, field, bounds, size, antialias,
-                        dim, periodic):
-        period = self.period[:2].copy() # dummy here
-        period[0] = self.period[self.x_axis[dim]]
-        period[1] = self.period[self.y_axis[dim]]
-        if hasattr(period, 'in_units'):
-            period = period.in_units("code_length").d
-        buff = _MPL.Pixelize(data_source['px'], data_source['py'],
-                             data_source['pdx'], data_source['pdy'],
-                             data_source[field], size[0], size[1],
-                             bounds, int(antialias),
-                             period, int(periodic)).transpose()
-        return buff
-
-    def _cyl_pixelize(self, data_source, field, bounds, size, antialias):
-        buff = pixelize_cylinder(data_source['r'],
-                                 data_source['dr'],
-                                 data_source['theta'],
-                                 data_source['dtheta']/2.0, # half-widths
-                                 size, data_source[field], bounds)
-        return buff
-
-    axis_name = { 0  : 'r',  1  : 'z',  2  : 'theta',
-                 'r' : 'r', 'z' : 'z', 'theta' : 'theta',
-                 'R' : 'r', 'Z' : 'z', 'Theta' : 'theta'}
-
-    axis_id = { 'r' : 0, 'z' : 1, 'theta' : 2,
-                 0  : 0,  1  : 1,  2  : 2}
-
-    x_axis = { 'r' : 1, 'z' : 0, 'theta' : 0,
-                0  : 1,  1  : 0,  2  : 0}
-
-    y_axis = { 'r' : 2, 'z' : 2, 'theta' : 1,
-                0  : 2,  1  : 2,  2  : 1}
-
-    _image_axis_name = None
-
-    @property
-    def image_axis_name(self):    
-        if self._image_axis_name is not None:
-            return self._image_axis_name
-        # This is the x and y axes labels that get displayed.  For
-        # non-Cartesian coordinates, we usually want to override these for
-        # Cartesian coordinates, since we transform them.
-        rv = {0: ('z', 'theta'),
-              1: ('x', 'y'),
-              2: ('r', 'z')}
-        for i in rv.keys():
-            rv[self.axis_name[i]] = rv[i]
-            rv[self.axis_name[i].upper()] = rv[i]
-        self._image_axis_name = rv
-        return rv
-
-    def convert_from_cartesian(self, coord):
-        return cartesian_to_cylindrical(coord)
-
-    def convert_to_cartesian(self, coord):
-        return cylindrical_to_cartesian(coord)
-
-    def convert_to_cylindrical(self, coord):
-        return coord
-
-    def convert_from_cylindrical(self, coord):
-        return coord
-
-    def convert_to_spherical(self, coord):
-        raise NotImplementedError
-
-    def convert_from_spherical(self, coord):
-        raise NotImplementedError
-
-    @property
-    def period(self):
-        return np.array([0.0, 0.0, 2.0*np.pi])
-

diff -r a69cdcb0a6e33dfbb7b612c7af43884a817e67cb -r 43bcb95d3cf697ea2ceb5c0ac2a873856cb7a5ed yt/geometry/geographic_coordinates.py
--- a/yt/geometry/geographic_coordinates.py
+++ /dev/null
@@ -1,194 +0,0 @@
-"""
-Geographic fields
-
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2013, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-import numpy as np
-from .coordinate_handler import \
-    CoordinateHandler, \
-    _unknown_coord, \
-    _get_coord_fields
-import yt.visualization._MPL as _MPL
-from yt.utilities.lib.misc_utilities import \
-    pixelize_cylinder, pixelize_aitoff
-
-class GeographicCoordinateHandler(CoordinateHandler):
-
-    def __init__(self, ds, ordering = 'latlonalt'):
-        if ordering != 'latlonalt': raise NotImplementedError
-        super(GeographicCoordinateHandler, self).__init__(ds)
-
-    def setup_fields(self, registry):
-        # return the fields for r, z, theta
-        registry.add_field(("index", "dx"), function=_unknown_coord)
-        registry.add_field(("index", "dy"), function=_unknown_coord)
-        registry.add_field(("index", "dz"), function=_unknown_coord)
-        registry.add_field(("index", "x"), function=_unknown_coord)
-        registry.add_field(("index", "y"), function=_unknown_coord)
-        registry.add_field(("index", "z"), function=_unknown_coord)
-        f1, f2 = _get_coord_fields(0, "")
-        registry.add_field(("index", "dlatitude"), function = f1,
-                           display_field = False,
-                           units = "")
-        registry.add_field(("index", "latitude"), function = f2,
-                           display_field = False,
-                           units = "")
-
-        f1, f2 = _get_coord_fields(1, "")
-        registry.add_field(("index", "dlongitude"), function = f1,
-                           display_field = False,
-                           units = "")
-        registry.add_field(("index", "longitude"), function = f2,
-                           display_field = False,
-                           units = "")
-
-        f1, f2 = _get_coord_fields(2)
-        registry.add_field(("index", "daltitude"), function = f1,
-                           display_field = False,
-                           units = "code_length")
-        registry.add_field(("index", "altitude"), function = f2,
-                           display_field = False,
-                           units = "code_length")
-
-        def _SphericalVolume(field, data):
-            # r**2 sin theta dr dtheta dphi
-            # We can use the transformed coordinates here.
-            vol = data["index", "r"]**2.0
-            vol *= data["index", "dr"]
-            vol *= np.sin(data["index", "theta"])
-            vol *= data["index", "dtheta"]
-            vol *= data["index", "dphi"]
-            return vol
-        registry.add_field(("index", "cell_volume"),
-                 function=_SphericalVolume,
-                 units = "code_length**3")
-
-        # Altitude is the radius from the central zone minus the radius of the
-        # surface.
-        def _altitude_to_radius(field, data):
-            surface_height = data.get_field_parameter("surface_height")
-            if surface_height is None:
-                surface_height = getattr(data.ds, "surface_height", 0.0)
-            return data["altitude"] + surface_height
-        registry.add_field(("index", "r"),
-                 function=_altitude_to_radius,
-                 units = "code_length")
-        registry.alias(("index", "dr"), ("index", "daltitude"))
-
-        def _longitude_to_theta(field, data):
-            # longitude runs from -180 to 180.
-            return (data["longitude"] + 180) * np.pi/180.0
-        registry.add_field(("index", "theta"),
-                 function = _longitude_to_theta,
-                 units = "")
-        def _dlongitude_to_dtheta(field, data):
-            return data["dlongitude"] * np.pi/180.0
-        registry.add_field(("index", "dtheta"),
-                 function = _dlongitude_to_dtheta,
-                 units = "")
-
-        def _latitude_to_phi(field, data):
-            # latitude runs from -90 to 90
-            return (data["latitude"] + 90) * np.pi/180.0
-        registry.add_field(("index", "phi"),
-                 function = _latitude_to_phi,
-                 units = "")
-        def _dlatitude_to_dphi(field, data):
-            return data["dlatitude"] * np.pi/180.0
-        registry.add_field(("index", "dphi"),
-                 function = _dlatitude_to_dphi,
-                 units = "")
-
-    def pixelize(self, dimension, data_source, field, bounds, size,
-                 antialias = True, periodic = True):
-        if dimension in (0, 1):
-            return self._cyl_pixelize(data_source, field, bounds, size,
-                                          antialias, dimension)
-        elif dimension == 2:
-            return self._ortho_pixelize(data_source, field, bounds, size,
-                                        antialias, dimension, periodic)
-        else:
-            raise NotImplementedError
-
-    def _ortho_pixelize(self, data_source, field, bounds, size, antialias,
-                        dim, periodic):
-        buff = pixelize_aitoff(data_source["theta"], data_source["dtheta"]/2.0,
-                               data_source["phi"], data_source["dphi"]/2.0,
-                               size, data_source[field], None,
-                               None).transpose()
-        return buff
-
-    def _cyl_pixelize(self, data_source, field, bounds, size, antialias,
-                      dimension):
-        if dimension == 0:
-            buff = pixelize_cylinder(data_source['r'],
-                                     data_source['dr'] / 2.0,
-                                     data_source['theta'],
-                                     data_source['dtheta'] / 2.0, # half-widths
-                                     size, data_source[field], bounds)
-        elif dimension == 1:
-            buff = pixelize_cylinder(data_source['r'],
-                                     data_source['dr'] / 2.0,
-                                     data_source['phi'],
-                                     data_source['dphi'] / 2.0, # half-widths
-                                     size, data_source[field], bounds)
-        else:
-            raise RuntimeError
-        return buff
-
-
-    def convert_from_cartesian(self, coord):
-        raise NotImplementedError
-
-    def convert_to_cartesian(self, coord):
-        raise NotImplementedError
-
-    def convert_to_cylindrical(self, coord):
-        raise NotImplementedError
-
-    def convert_from_cylindrical(self, coord):
-        raise NotImplementedError
-
-    def convert_to_spherical(self, coord):
-        raise NotImplementedError
-
-    def convert_from_spherical(self, coord):
-        raise NotImplementedError
-
-    # Despite being mutables, we uses these here to be clear about how these
-    # are generated and to ensure that they are not re-generated unnecessarily
-    axis_name = { 0  : 'latitude',  1  : 'longitude',  2  : 'altitude',
-                 'latitude' : 'latitude',
-                 'longitude' : 'longitude', 
-                 'altitude' : 'altitude',
-                 'Latitude' : 'latitude',
-                 'Longitude' : 'longitude', 
-                 'Altitude' : 'altitude',
-                 'lat' : 'latitude',
-                 'lon' : 'longitude', 
-                 'alt' : 'altitude' }
-
-    axis_id = { 'latitude' : 0, 'longitude' : 1, 'altitude' : 2,
-                 0  : 0,  1  : 1,  2  : 2}
-
-    x_axis = { 'latitude' : 1, 'longitude' : 0, 'altitude' : 0,
-                0  : 1,  1  : 0,  2  : 0}
-
-    y_axis = { 'latitude' : 2, 'longitude' : 2, 'altitude' : 1,
-                0  : 2,  1  : 2,  2  : 1}
-
-    @property
-    def period(self):
-        return self.ds.domain_width
-

diff -r a69cdcb0a6e33dfbb7b612c7af43884a817e67cb -r 43bcb95d3cf697ea2ceb5c0ac2a873856cb7a5ed yt/geometry/polar_coordinates.py
--- a/yt/geometry/polar_coordinates.py
+++ /dev/null
@@ -1,149 +0,0 @@
-"""
-Polar fields
-
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2013, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-import numpy as np
-from .coordinate_handler import \
-    CoordinateHandler, \
-    _unknown_coord, \
-    _get_coord_fields
-from yt.utilities.lib.misc_utilities import \
-    pixelize_cylinder
-
-class PolarCoordinateHandler(CoordinateHandler):
-
-    def __init__(self, ds, ordering = 'rtz'):
-        if ordering != 'rtz': raise NotImplementedError
-        super(PolarCoordinateHandler, self).__init__(ds)
-
-    def setup_fields(self, registry):
-        # return the fields for r, z, theta
-        registry.add_field("dx", function=_unknown_coord)
-        registry.add_field("dy", function=_unknown_coord)
-        registry.add_field("x", function=_unknown_coord)
-        registry.add_field("y", function=_unknown_coord)
-
-        f1, f2 = _get_coord_fields(0)
-        registry.add_field(("index", "dr"), function = f1,
-                           display_field = False,
-                           units = "code_length")
-        registry.add_field(("index", "r"), function = f2,
-                           display_field = False,
-                           units = "code_length")
-
-        f1, f2 = _get_coord_fields(1, "")
-        registry.add_field(("index", "dtheta"), function = f1,
-                           display_field = False,
-                           units = "")
-        registry.add_field(("index", "theta"), function = f2,
-                           display_field = False,
-                           units = "")
-
-        f1, f2 = _get_coord_fields(2) 
-        registry.add_field(("index", "dz"), function = f1,
-                           display_field = False,
-                           units = "code_length")
-        registry.add_field(("index", "z"), function = f2,
-                           display_field = False,
-                           units = "code_length")
-
-
-        def _CylindricalVolume(field, data):
-            return data["dtheta"] * data["r"] * data["dr"] * data["dz"]
-        registry.add_field("CellVolume", function=_CylindricalVolume)
-
-    def pixelize(self, dimension, data_source, field, bounds, size, antialias = True):
-        ax_name = self.axis_name[dimension]
-        if ax_name in ('r', 'theta'):
-            return self._ortho_pixelize(data_source, field, bounds, size,
-                                        antialias)
-        elif ax_name == "z":
-            return self._polar_pixelize(data_source, field, bounds, size,
-                                        antialias)
-        else:
-            # Pixelizing along a cylindrical surface is a bit tricky
-            raise NotImplementedError
-
-
-    def _ortho_pixelize(self, data_source, field, bounds, size, antialias):
-        buff = _MPL.Pixelize(data_source['px'], data_source['py'],
-                             data_source['pdx'], data_source['pdy'],
-                             data_source[field], size[0], size[1],
-                             bounds, int(antialias),
-                             True, self.period).transpose()
-        return buff
-
-    def _polar_pixelize(self, data_source, field, bounds, size, antialias):
-        # Out bounds here will *always* be what plot window thinks are x0, x1,
-        # y0, y1, but which will actually be rmin, rmax, thetamin, thetamax.
-        buff = pixelize_cylinder(data_source['r'],
-                                 data_source['dr'],
-                                 data_source['theta'],
-                                 data_source['dtheta'] / 2.0, # half-widths
-                                 size, data_source[field], bounds)
-        return buff
-
-    axis_name = { 0  : 'r',  1  : 'theta',  2  : 'z',
-                 'r' : 'r', 'theta' : 'theta', 'z' : 'z',
-                 'R' : 'r', 'Theta' : 'theta', 'Z' : 'z'}
-
-    axis_id = { 'r' : 0, 'theta' : 1, 'z' : 2,
-                 0  : 0,  1  : 1,  2  : 2}
-
-    x_axis = { 'r' : 1, 'theta' : 0, 'z' : 0,
-                0  : 1,  1  : 0,  2  : 0}
-
-    y_axis = { 'r' : 2, 'theta' : 2, 'z' : 1,
-                0  : 2,  1  : 2,  2  : 1}
-
-    _image_axis_name = None
-    @property
-    def image_axis_name(self):    
-        if self._image_axis_name is not None:
-            return self._image_axis_name
-        # This is the x and y axes labels that get displayed.  For
-        # non-Cartesian coordinates, we usually want to override these for
-        # Cartesian coordinates, since we transform them.
-        rv = {0: ('theta', 'z'),
-              1: ('x', 'y'),
-              2: ('r', 'z')}
-        for i in rv.keys():
-            rv[self.axis_name[i]] = rv[i]
-            rv[self.axis_name[i].upper()] = rv[i]
-        self._image_axis_name = rv
-        return rv
-
-    def convert_from_cartesian(self, coord):
-        return cartesian_to_cylindrical(coord)
-
-    def convert_to_cartesian(self, coord):
-        return cylindrical_to_cartesian(coord)
-
-    def convert_to_cylindrical(self, coord):
-        return coord
-
-    def convert_from_cylindrical(self, coord):
-        return coord
-
-    def convert_to_spherical(self, coord):
-        raise NotImplementedError
-
-    def convert_from_spherical(self, coord):
-        raise NotImplementedError
-
-    @property
-    def period(self):
-        return np.array([0.0, 0.0, 2.0*np.pi])
-

diff -r a69cdcb0a6e33dfbb7b612c7af43884a817e67cb -r 43bcb95d3cf697ea2ceb5c0ac2a873856cb7a5ed yt/geometry/setup.py
--- a/yt/geometry/setup.py
+++ b/yt/geometry/setup.py
@@ -7,6 +7,7 @@
 def configuration(parent_package='',top_path=None):
     from numpy.distutils.misc_util import Configuration
     config = Configuration('geometry',parent_package,top_path)
+    config.add_subpackage('coordinates')
     config.add_extension("oct_container", 
                 ["yt/geometry/oct_container.pyx"],
                 include_dirs=["yt/utilities/lib/"],

diff -r a69cdcb0a6e33dfbb7b612c7af43884a817e67cb -r 43bcb95d3cf697ea2ceb5c0ac2a873856cb7a5ed yt/geometry/spec_cube_coordinates.py
--- a/yt/geometry/spec_cube_coordinates.py
+++ /dev/null
@@ -1,65 +0,0 @@
-"""
-Cartesian fields
-
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2013, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-import numpy as np
-from .cartesian_coordinates import \
-    CartesianCoordinateHandler
-
-class SpectralCubeCoordinateHandler(CartesianCoordinateHandler):
-
-    def __init__(self, ds):
-        super(SpectralCubeCoordinateHandler, self).__init__(ds)
-
-        self.axis_name = {}
-        self.axis_id = {}
-
-        for axis, axis_name in zip([ds.lon_axis, ds.lat_axis, ds.spec_axis],
-                                   ["Image\ x", "Image\ y", ds.spec_name]):
-            lower_ax = "xyz"[axis]
-            upper_ax = lower_ax.upper()
-
-            self.axis_name[axis] = axis_name
-            self.axis_name[lower_ax] = axis_name
-            self.axis_name[upper_ax] = axis_name
-            self.axis_name[axis_name] = axis_name
-
-            self.axis_id[lower_ax] = axis
-            self.axis_id[axis] = axis
-            self.axis_id[axis_name] = axis
-
-        self.default_unit_label = {}
-        self.default_unit_label[ds.lon_axis] = "pixel"
-        self.default_unit_label[ds.lat_axis] = "pixel"
-        self.default_unit_label[ds.spec_axis] = ds.spec_unit
-
-        def _spec_axis(ax, x, y):
-            p = (x,y)[ax]
-            return [self.ds.pixel2spec(pp).v for pp in p]
-
-        self.axis_field = {}
-        self.axis_field[self.ds.spec_axis] = _spec_axis
-
-    def convert_to_cylindrical(self, coord):
-        raise NotImplementedError
-
-    def convert_from_cylindrical(self, coord):
-        raise NotImplementedError
-
-    x_axis = { 'x' : 1, 'y' : 0, 'z' : 0,
-                0  : 1,  1  : 0,  2  : 0}
-
-    y_axis = { 'x' : 2, 'y' : 2, 'z' : 1,
-                0  : 2,  1  : 2,  2  : 1}

This diff is so big that we needed to truncate the remainder.

https://bitbucket.org/yt_analysis/yt/commits/f5970aef40af/
Changeset:   f5970aef40af
Branch:      yt-3.0
User:        MatthewTurk
Date:        2014-08-05 20:04:10
Summary:     Refactor sanitize_width and sanitize_center
Affected #:  4 files

diff -r 43bcb95d3cf697ea2ceb5c0ac2a873856cb7a5ed -r f5970aef40af093358f2367fd341a9ae02ffa299 yt/funcs.py
--- a/yt/funcs.py
+++ b/yt/funcs.py
@@ -759,3 +759,9 @@
     if os.path.isfile(_fn):
         mylog.info("Loading plugins from %s", _fn)
         execfile(_fn)
+
+def fix_unitary(u):
+    if u == '1':
+        return 'unitary'
+    else:
+        return u

diff -r 43bcb95d3cf697ea2ceb5c0ac2a873856cb7a5ed -r f5970aef40af093358f2367fd341a9ae02ffa299 yt/geometry/coordinates/coordinate_handler.py
--- a/yt/geometry/coordinates/coordinate_handler.py
+++ b/yt/geometry/coordinates/coordinate_handler.py
@@ -17,6 +17,7 @@
 import numpy as np
 import abc
 import weakref
+from numbers import Number
 
 from yt.funcs import *
 from yt.fields.field_info_container import \
@@ -28,6 +29,8 @@
 from yt.utilities.lib.misc_utilities import \
     pixelize_cylinder
 import yt.visualization._MPL as _MPL
+from yt.units.yt_array import \
+    YTArray, YTQuantity
 
 def _unknown_coord(field, data):
     raise YTCoordinateNotImplemented
@@ -41,6 +44,29 @@
         return data._reshape_vals(rv)
     return _dds, _coords
 
+def validate_iterable_width(width, ds, unit=None):
+    if isinstance(width[0], tuple) and isinstance(width[1], tuple):
+        validate_width_tuple(width[0])
+        validate_width_tuple(width[1])
+        return (ds.quan(width[0][0], fix_unitary(width[0][1])),
+                ds.quan(width[1][0], fix_unitary(width[1][1])))
+    elif isinstance(width[0], Number) and isinstance(width[1], Number):
+        return (ds.quan(width[0], 'code_length'),
+                ds.quan(width[1], 'code_length'))
+    elif isinstance(width[0], YTQuantity) and isinstance(width[1], YTQuantity):
+        return (ds.quan(width[0]), ds.quan(width[1]))
+    else:
+        validate_width_tuple(width)
+        # If width and unit are both valid width tuples, we
+        # assume width controls x and unit controls y
+        try:
+            validate_width_tuple(unit)
+            return (ds.quan(width[0], fix_unitary(width[1])),
+                    ds.quan(unit[0], fix_unitary(unit[1])))
+        except YTInvalidWidthError:
+            return (ds.quan(width[0], fix_unitary(width[1])),
+                    ds.quan(width[0], fix_unitary(width[1])))
+
 class CoordinateHandler(object):
     
     def __init__(self, ds):
@@ -109,6 +135,63 @@
     def period(self):
         raise NotImplementedError
 
+    def sanitize_width(self, axis, width, depth):
+        if width is None:
+            # Default to code units
+            if not iterable(axis):
+                xax = self.x_axis[axis]
+                yax = self.y_axis[axis]
+                w = self.ds.domain_width[[xax, yax]]
+            else:
+                # axis is actually the normal vector
+                # for an off-axis data object.
+                mi = np.argmin(self.ds.domain_width)
+                w = self.ds.domain_width[[mi,mi]]
+            width = (w[0], w[1])
+        elif iterable(width):
+            width = validate_iterable_width(width, self.ds)
+        elif isinstance(width, YTQuantity):
+            width = (width, width)
+        elif isinstance(width, Number):
+            width = (self.ds.quan(width, 'code_length'),
+                     self.ds.quan(width, 'code_length'))
+        else:
+            raise YTInvalidWidthError(width)
+        if depth is not None:
+            if iterable(depth):
+                validate_width_tuple(depth)
+                depth = (self.ds.quan(depth[0], fix_unitary(depth[1])), )
+            elif isinstance(depth, Number):
+                depth = (self.ds.quan(depth, 'code_length',
+                         registry = self.ds.unit_registry), )
+            elif isinstance(depth, YTQuantity):
+                depth = (depth, )
+            else:
+                raise YTInvalidWidthError(depth)
+            return width + depth
+        return width
+
+    def sanitize_center(self, center):
+        if isinstance(center, basestring):
+            if center.lower() == "m" or center.lower() == "max":
+                v, center = self.ds.find_max(("gas", "density"))
+                center = self.ds.arr(center, 'code_length')
+            elif center.lower() == "c" or center.lower() == "center":
+                center = (self.ds.domain_left_edge + self.ds.domain_right_edge) / 2
+            else:
+                raise RuntimeError('center keyword \"%s\" not recognized' % center)
+        elif isinstance(center, YTArray):
+            return self.ds.arr(center)
+        elif iterable(center):
+            if iterable(center[0]) and isinstance(center[1], basestring):
+                center = self.ds.arr(center[0], center[1])
+            else:
+                center = self.ds.arr(center, 'code_length')
+        else:
+            raise RuntimeError("center keyword \"%s\" not recognized" % center)
+        return center
+
+
 def cartesian_to_cylindrical(coord, center = (0,0,0)):
     c2 = np.zeros_like(coord)
     c2[...,0] = ((coord[...,0] - center[0])**2.0

diff -r 43bcb95d3cf697ea2ceb5c0ac2a873856cb7a5ed -r f5970aef40af093358f2367fd341a9ae02ffa299 yt/utilities/fits_image.py
--- a/yt/utilities/fits_image.py
+++ b/yt/utilities/fits_image.py
@@ -13,7 +13,6 @@
 import numpy as np
 from yt.funcs import mylog, iterable, fix_axis, ensure_list
 from yt.visualization.fixed_resolution import FixedResolutionBuffer
-from yt.visualization.plot_window import get_sanitized_center
 from yt.data_objects.construction_data_containers import YTCoveringGridBase
 from yt.utilities.on_demand_imports import _astropy
 from yt.units.yt_array import YTQuantity
@@ -306,7 +305,7 @@
     def __init__(self, ds, axis, fields, center="c", **kwargs):
         fields = ensure_list(fields)
         axis = fix_axis(axis, ds)
-        center = get_sanitized_center(center, ds)
+        center = ds.coordinates.sanitize_center(center)
         slc = ds.slice(axis, center[axis], **kwargs)
         w, frb = construct_image(slc)
         super(FITSSlice, self).__init__(frb, fields=fields, wcs=w)

diff -r 43bcb95d3cf697ea2ceb5c0ac2a873856cb7a5ed -r f5970aef40af093358f2367fd341a9ae02ffa299 yt/visualization/plot_window.py
--- a/yt/visualization/plot_window.py
+++ b/yt/visualization/plot_window.py
@@ -42,7 +42,8 @@
     StringIO
 from yt.funcs import \
     mylog, iterable, ensure_list, \
-    fix_axis, validate_width_tuple
+    fix_axis, validate_width_tuple, \
+    fix_unitary
 from yt.units.unit_object import \
     Unit
 from yt.units.unit_registry import \
@@ -75,95 +76,10 @@
 except ImportError:
     from pyparsing import ParseFatalException
 
-def fix_unitary(u):
-    if u == '1':
-        return 'unitary'
-    else:
-        return u
-
-def validate_iterable_width(width, ds, unit=None):
-    if isinstance(width[0], tuple) and isinstance(width[1], tuple):
-        validate_width_tuple(width[0])
-        validate_width_tuple(width[1])
-        return (ds.quan(width[0][0], fix_unitary(width[0][1])),
-                ds.quan(width[1][0], fix_unitary(width[1][1])))
-    elif isinstance(width[0], Number) and isinstance(width[1], Number):
-        return (ds.quan(width[0], 'code_length'),
-                ds.quan(width[1], 'code_length'))
-    elif isinstance(width[0], YTQuantity) and isinstance(width[1], YTQuantity):
-        return (ds.quan(width[0]), ds.quan(width[1]))
-    else:
-        validate_width_tuple(width)
-        # If width and unit are both valid width tuples, we
-        # assume width controls x and unit controls y
-        try:
-            validate_width_tuple(unit)
-            return (ds.quan(width[0], fix_unitary(width[1])),
-                    ds.quan(unit[0], fix_unitary(unit[1])))
-        except YTInvalidWidthError:
-            return (ds.quan(width[0], fix_unitary(width[1])),
-                    ds.quan(width[0], fix_unitary(width[1])))
-
-def get_sanitized_width(axis, width, depth, ds):
-    if width is None:
-        # Default to code units
-        if not iterable(axis):
-            xax = ds.coordinates.x_axis[axis]
-            yax = ds.coordinates.y_axis[axis]
-            w = ds.domain_width[[xax, yax]]
-        else:
-            # axis is actually the normal vector
-            # for an off-axis data object.
-            mi = np.argmin(ds.domain_width)
-            w = ds.domain_width[[mi,mi]]
-        width = (w[0], w[1])
-    elif iterable(width):
-        width = validate_iterable_width(width, ds)
-    elif isinstance(width, YTQuantity):
-        width = (width, width)
-    elif isinstance(width, Number):
-        width = (ds.quan(width, 'code_length'),
-                 ds.quan(width, 'code_length'))
-    else:
-        raise YTInvalidWidthError(width)
-    if depth is not None:
-        if iterable(depth):
-            validate_width_tuple(depth)
-            depth = (ds.quan(depth[0], fix_unitary(depth[1])), )
-        elif isinstance(depth, Number):
-            depth = (ds.quan(depth, 'code_length',
-                     registry = ds.unit_registry), )
-        elif isinstance(depth, YTQuantity):
-            depth = (depth, )
-        else:
-            raise YTInvalidWidthError(depth)
-        return width + depth
-    return width
-
-def get_sanitized_center(center, ds):
-    if isinstance(center, basestring):
-        if center.lower() == "m" or center.lower() == "max":
-            v, center = ds.find_max(("gas", "density"))
-            center = ds.arr(center, 'code_length')
-        elif center.lower() == "c" or center.lower() == "center":
-            center = (ds.domain_left_edge + ds.domain_right_edge) / 2
-        else:
-            raise RuntimeError('center keyword \"%s\" not recognized' % center)
-    elif isinstance(center, YTArray):
-        return ds.arr(center)
-    elif iterable(center):
-        if iterable(center[0]) and isinstance(center[1], basestring):
-            center = ds.arr(center[0], center[1])
-        else:
-            center = ds.arr(center, 'code_length')
-    else:
-        raise RuntimeError("center keyword \"%s\" not recognized" % center)
-    return center
-
 def get_window_parameters(axis, center, width, ds):
     if ds.geometry == "cartesian" or ds.geometry == "spectral_cube":
-        width = get_sanitized_width(axis, width, None, ds)
-        center = get_sanitized_center(center, ds)
+        width = ds.coordinates.sanitize_width(axis, width, None)
+        center = ds.coordinates.sanitize_center(center)
     elif ds.geometry in ("polar", "cylindrical"):
         # Set our default width to be the full domain
         width = [ds.domain_right_edge[0]*2.0, ds.domain_right_edge[0]*2.0]
@@ -198,8 +114,8 @@
     return (bounds, center)
 
 def get_oblique_window_parameters(normal, center, width, ds, depth=None):
-    width = get_sanitized_width(normal, width, depth, ds)
-    center = get_sanitized_center(center, ds)
+    width = ds.coordinates.sanitize_width(normal, width, depth)
+    center = ds.coordinates.sanitize_center(center)
 
     if len(width) == 2:
         # Transforming to the cutting plane coordinate system
@@ -557,7 +473,8 @@
 
         axes_unit = get_axes_unit(width, self.ds)
 
-        width = get_sanitized_width(self.frb.axis, width, None, self.ds)
+        width = self.ds.coordinates.sanitize_width(
+            self.frb.axis, width, None)
 
         centerx = (self.xlim[1] + self.xlim[0])/2.
         centery = (self.ylim[1] + self.ylim[0])/2.


https://bitbucket.org/yt_analysis/yt/commits/31de32f1870b/
Changeset:   31de32f1870b
Branch:      yt-3.0
User:        MatthewTurk
Date:        2014-08-05 20:58:59
Summary:     Beginning refactoring of display_center and center.
Affected #:  5 files

diff -r f5970aef40af093358f2367fd341a9ae02ffa299 -r 31de32f1870b31a848a5b9aa280174dc41ab7f98 yt/data_objects/data_containers.py
--- a/yt/data_objects/data_containers.py
+++ b/yt/data_objects/data_containers.py
@@ -778,7 +778,8 @@
         skip += list(set(frb._exclude_fields).difference(set(self._key_fields)))
         self.fields = ensure_list(fields) + \
             [k for k in self.field_data if k not in skip]
-        (bounds, center) = get_window_parameters(axis, center, width, self.ds)
+        (bounds, center, display_center) = \
+            get_window_parameters(axis, center, width, self.ds)
         pw = PWViewerMPL(self, bounds, fields=self.fields, origin=origin,
                          frb_generator=frb, plot_type=plot_type)
         pw._setup_plots()

diff -r f5970aef40af093358f2367fd341a9ae02ffa299 -r 31de32f1870b31a848a5b9aa280174dc41ab7f98 yt/geometry/coordinates/coordinate_handler.py
--- a/yt/geometry/coordinates/coordinate_handler.py
+++ b/yt/geometry/coordinates/coordinate_handler.py
@@ -189,7 +189,9 @@
                 center = self.ds.arr(center, 'code_length')
         else:
             raise RuntimeError("center keyword \"%s\" not recognized" % center)
-        return center
+        # This has to return both a center and a display_center
+        display_center = self.convert_to_cartesian(center)
+        return center, display_center
 
 
 def cartesian_to_cylindrical(coord, center = (0,0,0)):

diff -r f5970aef40af093358f2367fd341a9ae02ffa299 -r 31de32f1870b31a848a5b9aa280174dc41ab7f98 yt/geometry/coordinates/cylindrical_coordinates.py
--- a/yt/geometry/coordinates/cylindrical_coordinates.py
+++ b/yt/geometry/coordinates/cylindrical_coordinates.py
@@ -160,4 +160,3 @@
     @property
     def period(self):
         return np.array([0.0, 0.0, 2.0*np.pi])
-

diff -r f5970aef40af093358f2367fd341a9ae02ffa299 -r 31de32f1870b31a848a5b9aa280174dc41ab7f98 yt/geometry/coordinates/spherical_coordinates.py
--- a/yt/geometry/coordinates/spherical_coordinates.py
+++ b/yt/geometry/coordinates/spherical_coordinates.py
@@ -125,7 +125,21 @@
         raise NotImplementedError
 
     def convert_to_cartesian(self, coord):
-        raise NotImplementedError
+        if isinstance(coord, np.ndarray) and len(coord.shape) > 1:
+            r = coord[:,0]
+            theta = coord[:,1]
+            phi = coord[:,2]
+            nc = np.zeros_like(coord)
+            # r, theta, phi
+            nc[:,0] = np.cos(phi) * np.sin(theta)*r
+            nc[:,1] = np.sin(phi) * np.sin(theta)*r
+            nc[:,2] = np.cos(theta) * r
+        else:
+            r, theta, phi = coord
+            nc = (np.cos(phi) * np.sin(theta)*r,
+                  np.sin(phi) * np.sin(theta)*r,
+                  np.cos(theta) * r)
+        return nc
 
     def convert_to_cylindrical(self, coord):
         raise NotImplementedError
@@ -176,3 +190,11 @@
     def period(self):
         return self.ds.domain_width
 
+    def sanitize_width(self, axis, width, depth):
+        if axis == 0:
+            # This is a slice along r
+            width = self.ds.domain_width[1], self.ds.domain_width[2]
+        else:
+            width = super(SphericalCoordinateHandler, self).sanitize_width(
+              axis, width, depth)
+        return width

diff -r f5970aef40af093358f2367fd341a9ae02ffa299 -r 31de32f1870b31a848a5b9aa280174dc41ab7f98 yt/visualization/plot_window.py
--- a/yt/visualization/plot_window.py
+++ b/yt/visualization/plot_window.py
@@ -77,6 +77,8 @@
     from pyparsing import ParseFatalException
 
 def get_window_parameters(axis, center, width, ds):
+    display_center, center = ds.coordinates.sanitize_center(center)
+    width = ds.coordinates.sanitize_width(axis, width, None)
     if ds.geometry == "cartesian" or ds.geometry == "spectral_cube":
         width = ds.coordinates.sanitize_width(axis, width, None)
         center = ds.coordinates.sanitize_center(center)
@@ -107,15 +109,15 @@
         raise NotImplementedError
     xax = ds.coordinates.x_axis[axis]
     yax = ds.coordinates.y_axis[axis]
-    bounds = (center[xax]-width[0] / 2,
-              center[xax]+width[0] / 2,
-              center[yax]-width[1] / 2,
-              center[yax]+width[1] / 2)
-    return (bounds, center)
+    bounds = (display_center[xax]-width[0] / 2,
+              display_center[xax]+width[0] / 2,
+              display_center[yax]-width[1] / 2,
+              display_center[yax]+width[1] / 2)
+    return (bounds, center, display_center)
 
 def get_oblique_window_parameters(normal, center, width, ds, depth=None):
+    display_center, center = ds.coordinates.sanitize_center(center)
     width = ds.coordinates.sanitize_width(normal, width, depth)
-    center = ds.coordinates.sanitize_center(center)
 
     if len(width) == 2:
         # Transforming to the cutting plane coordinate system
@@ -1013,7 +1015,8 @@
         self.ts = ts
         ds = self.ds = ts[0]
         axis = fix_axis(axis, ds)
-        (bounds, center) = get_window_parameters(axis, center, width, ds)
+        (bounds, center, display_center) = \
+                get_window_parameters(axis, center, width, ds)
         if field_parameters is None: field_parameters = {}
         slc = ds.slice(axis, center[axis],
             field_parameters = field_parameters, center=center)
@@ -1144,7 +1147,8 @@
         # If a non-weighted integral projection, assure field-label reflects that
         if weight_field is None and proj_style == "integrate":
             self.projected = True
-        (bounds, center) = get_window_parameters(axis, center, width, ds)
+        (bounds, center, display_center) = \
+                get_window_parameters(axis, center, width, ds)
         if field_parameters is None: field_parameters = {}
         proj = ds.proj(fields, axis, weight_field=weight_field,
                        center=center, data_source=data_source,


https://bitbucket.org/yt_analysis/yt/commits/0cb8cf3daa53/
Changeset:   0cb8cf3daa53
Branch:      yt-3.0
User:        MatthewTurk
Date:        2014-08-06 00:25:53
Summary:     We had these backwards for theta in cyl pixelizer.
Affected #:  1 file

diff -r 31de32f1870b31a848a5b9aa280174dc41ab7f98 -r 0cb8cf3daa533fc2fec1c1b75acedf833d777b24 yt/utilities/lib/misc_utilities.pyx
--- a/yt/utilities/lib/misc_utilities.pyx
+++ b/yt/utilities/lib/misc_utilities.pyx
@@ -544,8 +544,8 @@
                 if rmax <= r_i:
                     r_i += 0.5*dx 
                     continue
-                x = r_i * costheta
-                y = r_i * sintheta
+                y = r_i * costheta
+                x = r_i * sintheta
                 pi = <int>((x - x0)/dx)
                 pj = <int>((y - y0)/dy)
                 if pi >= 0 and pi < img.shape[0] and \


https://bitbucket.org/yt_analysis/yt/commits/1270cdfff108/
Changeset:   1270cdfff108
Branch:      yt-3.0
User:        MatthewTurk
Date:        2014-08-06 00:26:42
Summary:     Refactoring sanitize center.
Affected #:  4 files

diff -r 0cb8cf3daa533fc2fec1c1b75acedf833d777b24 -r 1270cdfff1084a35e03f9b97a742c088282406ae yt/geometry/coordinates/coordinate_handler.py
--- a/yt/geometry/coordinates/coordinate_handler.py
+++ b/yt/geometry/coordinates/coordinate_handler.py
@@ -171,7 +171,7 @@
             return width + depth
         return width
 
-    def sanitize_center(self, center):
+    def sanitize_center(self, center, axis):
         if isinstance(center, basestring):
             if center.lower() == "m" or center.lower() == "max":
                 v, center = self.ds.find_max(("gas", "density"))
@@ -181,7 +181,7 @@
             else:
                 raise RuntimeError('center keyword \"%s\" not recognized' % center)
         elif isinstance(center, YTArray):
-            return self.ds.arr(center)
+            return self.ds.arr(center), self.convert_to_cartesian(center)
         elif iterable(center):
             if iterable(center[0]) and isinstance(center[1], basestring):
                 center = self.ds.arr(center[0], center[1])

diff -r 0cb8cf3daa533fc2fec1c1b75acedf833d777b24 -r 1270cdfff1084a35e03f9b97a742c088282406ae yt/geometry/coordinates/spherical_coordinates.py
--- a/yt/geometry/coordinates/spherical_coordinates.py
+++ b/yt/geometry/coordinates/spherical_coordinates.py
@@ -107,15 +107,13 @@
                                      data_source['phi'],
                                      data_source['dphi'] / 2.0, # half-widths
                                      size, data_source[field], bounds)
-            # Trying to preserve a nice-looking system, with x on the x and y
-            # on the y.
-            buff = buff.transpose()
         elif dimension == 2:
             buff = pixelize_cylinder(data_source['r'],
                                      data_source['dr'] / 2.0,
                                      data_source['theta'],
                                      data_source['dtheta'] / 2.0, # half-widths
                                      size, data_source[field], bounds)
+            buff = buff.transpose()
         else:
             raise RuntimeError
         return buff
@@ -190,11 +188,34 @@
     def period(self):
         return self.ds.domain_width
 
+    def sanitize_center(self, center, axis):
+        center, display_center = super(
+            SphericalCoordinateHandler, self).sanitize_center(center, axis)
+        if axis == 0:
+            display_center = center
+        elif axis == 1:
+            display_center = (0.0 * display_center[0],
+                              0.0 * display_center[1],
+                              0.0 * display_center[2])
+        elif axis ==2:
+            display_center = (self.ds.domain_width[0]/2.0,
+                              0.0 * display_center[1],
+                              0.0 * display_center[2])
+        return center, display_center
+
     def sanitize_width(self, axis, width, depth):
-        if axis == 0:
-            # This is a slice along r
-            width = self.ds.domain_width[1], self.ds.domain_width[2]
-        else:
+        if width is not None:
             width = super(SphericalCoordinateHandler, self).sanitize_width(
               axis, width, depth)
+        elif axis == 0:
+            width = [self.ds.domain_width[self.x_axis[0]],
+                     self.ds.domain_width[self.y_axis[0]]]
+        elif axis == 1:
+            # Remember, in spherical coordinates when we cut in theta,
+            # we create a conic section
+            width = [2.0*self.ds.domain_width[0],
+                     2.0*self.ds.domain_width[0]]
+        elif axis == 2:
+            width = [self.ds.domain_width[0],
+                     2.0*self.ds.domain_width[0]]
         return width

diff -r 0cb8cf3daa533fc2fec1c1b75acedf833d777b24 -r 1270cdfff1084a35e03f9b97a742c088282406ae yt/utilities/fits_image.py
--- a/yt/utilities/fits_image.py
+++ b/yt/utilities/fits_image.py
@@ -305,7 +305,7 @@
     def __init__(self, ds, axis, fields, center="c", **kwargs):
         fields = ensure_list(fields)
         axis = fix_axis(axis, ds)
-        center = ds.coordinates.sanitize_center(center)
+        center = ds.coordinates.sanitize_center(center, axis)
         slc = ds.slice(axis, center[axis], **kwargs)
         w, frb = construct_image(slc)
         super(FITSSlice, self).__init__(frb, fields=fields, wcs=w)

diff -r 0cb8cf3daa533fc2fec1c1b75acedf833d777b24 -r 1270cdfff1084a35e03f9b97a742c088282406ae yt/visualization/plot_window.py
--- a/yt/visualization/plot_window.py
+++ b/yt/visualization/plot_window.py
@@ -77,24 +77,13 @@
     from pyparsing import ParseFatalException
 
 def get_window_parameters(axis, center, width, ds):
-    display_center, center = ds.coordinates.sanitize_center(center)
-    width = ds.coordinates.sanitize_width(axis, width, None)
-    if ds.geometry == "cartesian" or ds.geometry == "spectral_cube":
+    if ds.geometry in ("cartesian", "spectral_cube", "spherical"):
         width = ds.coordinates.sanitize_width(axis, width, None)
-        center = ds.coordinates.sanitize_center(center)
+        center, display_center = ds.coordinates.sanitize_center(center, axis)
     elif ds.geometry in ("polar", "cylindrical"):
         # Set our default width to be the full domain
         width = [ds.domain_right_edge[0]*2.0, ds.domain_right_edge[0]*2.0]
         center = ds.arr([0.0, 0.0, 0.0], "code_length")
-    elif ds.geometry == "spherical":
-        if axis == 0:
-            width = ds.domain_width[1], ds.domain_width[2]
-            center = 0.5*(ds.domain_left_edge + ds.domain_right_edge)
-            center.convert_to_units("code_length")
-        else:
-            # Our default width here is the full domain
-            width = [ds.domain_right_edge[0]*2.0, ds.domain_right_edge[0]*2.0]
-            center = ds.arr([0.0, 0.0, 0.0], "code_length")
     elif ds.geometry == "geographic":
         c_r = ((ds.domain_right_edge + ds.domain_left_edge)/2.0)[2]
         center = ds.arr([0.0, 0.0, c_r], "code_length")
@@ -116,7 +105,7 @@
     return (bounds, center, display_center)
 
 def get_oblique_window_parameters(normal, center, width, ds, depth=None):
-    display_center, center = ds.coordinates.sanitize_center(center)
+    display_center, center = ds.coordinates.sanitize_center(center, axis)
     width = ds.coordinates.sanitize_width(normal, width, depth)
 
     if len(width) == 2:


https://bitbucket.org/yt_analysis/yt/commits/776082b1ecd0/
Changeset:   776082b1ecd0
Branch:      yt-3.0
User:        MatthewTurk
Date:        2014-08-06 22:01:01
Summary:     Capital R and show the sin factor.
Affected #:  1 file

diff -r 1270cdfff1084a35e03f9b97a742c088282406ae -r 776082b1ecd0b7882d4ddb448ee634342f2f2b77 yt/geometry/coordinates/spherical_coordinates.py
--- a/yt/geometry/coordinates/spherical_coordinates.py
+++ b/yt/geometry/coordinates/spherical_coordinates.py
@@ -166,8 +166,8 @@
         # non-Cartesian coordinates, we usually want to override these for
         # Cartesian coordinates, since we transform them.
         rv = {0: ('theta', 'phi'),
-              1: ('x', 'y'),
-              2: ('r', 'z')}
+              1: ('x / \\sin(\\theta)', 'y / \\sin(\\theta)'),
+              2: ('R', 'z')}
         for i in rv.keys():
             rv[self.axis_name[i]] = rv[i]
             rv[self.axis_name[i].upper()] = rv[i]


https://bitbucket.org/yt_analysis/yt/commits/abf5b82c5f9c/
Changeset:   abf5b82c5f9c
Branch:      yt-3.0
User:        MatthewTurk
Date:        2014-09-03 17:20:13
Summary:     Merging with upstream
Affected #:  117 files

diff -r 776082b1ecd0b7882d4ddb448ee634342f2f2b77 -r abf5b82c5f9ccef6103e5aec0de20d810e75f707 doc/helper_scripts/show_fields.py
--- a/doc/helper_scripts/show_fields.py
+++ b/doc/helper_scripts/show_fields.py
@@ -67,17 +67,20 @@
 Field List
 ==========
 
-This is a list of many of the fields available in ``yt``.  We have attempted to
-include most of the fields that are accessible through the plugin system, as well as
-the fields that are known by the frontends, however it is possible to generate many more
-permutations, particularly through vector operations. For more information about the fields
-framework, see :ref:`fields`.
+This is a list of many of the fields available in yt.  We have attempted to
+include most of the fields that are accessible through the plugin system, as 
+well as the fields that are known by the frontends, however it is possible to 
+generate many more permutations, particularly through vector operations. For 
+more information about the fields framework, see :ref:`fields`.
 
-Some fields are recognized by specific frontends only. These are typically fields like density
-and temperature that have their own names and units in the different frontend datasets. Often,
-these fields are aliased to their ``yt``-named counterpart fields. For example, in the ``FLASH``
-frontend, the ``dens`` field is aliased to the ``yt`` field ``density``, ``velx`` is aliased to
-``velocity_x``, and so on. In what follows, if a field is aliased it will be noted.
+Some fields are recognized by specific frontends only. These are typically 
+fields like density and temperature that have their own names and units in 
+the different frontend datasets. Often, these fields are aliased to their 
+yt-named counterpart fields (typically 'gas' fieldtypes). For example, in 
+the ``FLASH`` frontend, the ``dens`` field (i.e. ``(flash, dens)``) is aliased 
+to the gas field density (i.e. ``(gas, density)``), similarly ``(flash, velx)`` 
+is aliased to ``(gas, velocity_x)``, and so on. In what follows, if a field 
+is aliased it will be noted.
 
 Try using the ``ds.field_list`` and ``ds.derived_field_list`` to view the
 native and derived fields available for your dataset respectively. For example
@@ -93,19 +96,35 @@
 To figure out out what all of the field types here mean, see
 :ref:`known-field-types`.
 
-.. _yt_fields:
+.. contents:: Table of Contents
+   :depth: 1
+   :local:
+   :backlinks: none
 
-Fields Generated by ``yt``
-++++++++++++++++++++++++++
+.. _yt-fields:
+
+Universal Fields
+----------------
+"""
+
+footer = """
+
+Index of Fields
+---------------
+
+.. contents:: 
+   :depth: 3
+   :backlinks: none
 
 """
-
 print header
 
 seen = []
 
-def fix_units(units):
+def fix_units(units, in_cgs=False):
     unit_object = Unit(units, registry=ds.unit_registry)
+    if in_cgs:
+        unit_object = unit_object.get_cgs_equivalent()
     latex = unit_object.latex_representation()
     return latex.replace('\/','~')
 
@@ -115,10 +134,17 @@
         f = df._function
         s = "%s" % (df.name,)
         print s
-        print "-" * len(s)
+        print "^" * len(s)
         print
         if len(df.units) > 0:
-            print "   * Units: :math:`%s`" % fix_units(df.units)
+            # Most universal fields are in CGS except for these special fields
+            if df.name[1] in ['particle_position', 'particle_position_x', \
+                         'particle_position_y', 'particle_position_z', \
+                         'entropy', 'kT', 'metallicity', 'dx', 'dy', 'dz',\
+                         'cell_volume', 'x', 'y', 'z']:
+                print "   * Units: :math:`%s`" % fix_units(df.units)
+            else:
+                print "   * Units: :math:`%s`" % fix_units(df.units, in_cgs=True)
         print "   * Particle Type: %s" % (df.particle_type)
         print
         print "**Field Source**"
@@ -145,7 +171,7 @@
         ftype = "'"+ftype+"'"
     s = "(%s, '%s')" % (ftype, name)
     print s
-    print "-" * len(s)
+    print "^" * len(s)
     print
     if len(units) > 0:
         print "   * Units: :math:`\mathrm{%s}`" % fix_units(units)
@@ -160,9 +186,20 @@
     this_f = getattr(frontends_module, frontend)
     field_info_names = [fi for fi in dir(this_f) if "FieldInfo" in fi]
     dataset_names = [dset for dset in dir(this_f) if "Dataset" in dset]
+
     if frontend == "sph":
         field_info_names = \
           ['TipsyFieldInfo' if 'Tipsy' in d else 'SPHFieldInfo' for d in dataset_names]
+    elif frontend == "boxlib":
+        field_info_names = []
+        for d in dataset_names:
+            if "Maestro" in d:  
+                field_info_names.append("MaestroFieldInfo")
+            elif "Castro" in d: 
+                field_info_names.append("CastroFieldInfo")
+            else: 
+                field_info_names.append("BoxlibFieldInfo")
+
     for dset_name, fi_name in zip(dataset_names, field_info_names):
         fi = getattr(this_f, fi_name)
         nfields = 0
@@ -182,7 +219,7 @@
             print ".. _%s_specific_fields:\n" % dset_name.replace("Dataset", "")
             h = "%s-Specific Fields" % dset_name.replace("Dataset", "")
             print h
-            print "+" * len(h) + "\n"
+            print "-" * len(h) + "\n"
             for field in known_other_fields:
                 print_frontend_field(frontend, field, False)
             for field in known_particle_fields:
@@ -190,3 +227,5 @@
                     print_frontend_field("particle_type", field, True)
                 else:
                     print_frontend_field("io", field, True)
+
+print footer

diff -r 776082b1ecd0b7882d4ddb448ee634342f2f2b77 -r abf5b82c5f9ccef6103e5aec0de20d810e75f707 doc/install_script.sh
--- a/doc/install_script.sh
+++ b/doc/install_script.sh
@@ -16,7 +16,7 @@
 
 DEST_SUFFIX="yt-`uname -m`"
 DEST_DIR="`pwd`/${DEST_SUFFIX/ /}"   # Installation location
-BRANCH="yt-3.0" # This is the branch to which we will forcibly update.
+BRANCH="yt" # This is the branch to which we will forcibly update.
 
 if [ ${REINST_YT} ] && [ ${REINST_YT} -eq 1 ] && [ -n ${YT_DEST} ]
 then
@@ -500,13 +500,28 @@
     fi
     [ ! -e $LIB/extracted ] && tar xfz $LIB.tar.gz
     touch $LIB/extracted
+    BUILD_ARGS=""
+    case $LIB in
+        *h5py*)
+            BUILD_ARGS="--hdf5=${HDF5_DIR}"
+            ;;
+        *numpy*)
+            if [ -e ${DEST_DIR}/lib/python2.7/site-packages/numpy/__init__.py ]
+            then
+                VER=$(${DEST_DIR}/bin/python -c 'from distutils.version import StrictVersion as SV; \
+                                                 import numpy; print SV(numpy.__version__) < SV("1.8.0")')
+                if [ $VER == "True" ]
+                then
+                    echo "Removing previous NumPy instance (see issue #889)"
+                    rm -rf ${DEST_DIR}/lib/python2.7/site-packages/{numpy*,*.pth}
+                fi
+            fi
+            ;;
+        *)
+            ;;
+    esac
     cd $LIB
-    if [ ! -z `echo $LIB | grep h5py` ]
-    then
-	( ${DEST_DIR}/bin/python2.7 setup.py build --hdf5=${HDF5_DIR} $* 2>&1 ) 1>> ${LOG_FILE} || do_exit
-    else
-        ( ${DEST_DIR}/bin/python2.7 setup.py build   $* 2>&1 ) 1>> ${LOG_FILE} || do_exit
-    fi
+    ( ${DEST_DIR}/bin/python2.7 setup.py build ${BUILD_ARGS} $* 2>&1 ) 1>> ${LOG_FILE} || do_exit
     ( ${DEST_DIR}/bin/python2.7 setup.py install    2>&1 ) 1>> ${LOG_FILE} || do_exit
     touch done
     cd ..
@@ -580,56 +595,54 @@
 mkdir -p ${DEST_DIR}/src
 cd ${DEST_DIR}/src
 
-CYTHON='Cython-0.19.1'
-FORTHON='Forthon-0.8.11'
+CYTHON='Cython-0.20.2'
 PYX='PyX-0.12.1'
-PYTHON='Python-2.7.6'
+PYTHON='Python-2.7.8'
 BZLIB='bzip2-1.0.6'
 FREETYPE_VER='freetype-2.4.12'
-H5PY='h5py-2.1.3'
+H5PY='h5py-2.3.1'
 HDF5='hdf5-1.8.11'
-IPYTHON='ipython-2.1.0'
+IPYTHON='ipython-2.2.0'
 LAPACK='lapack-3.4.2'
 PNG=libpng-1.6.3
-MATPLOTLIB='matplotlib-1.3.0'
-MERCURIAL='mercurial-3.0'
-NOSE='nose-1.3.0'
-NUMPY='numpy-1.7.1'
+MATPLOTLIB='matplotlib-1.4.0'
+MERCURIAL='mercurial-3.1'
+NOSE='nose-1.3.4'
+NUMPY='numpy-1.8.2'
 PYTHON_HGLIB='python-hglib-1.0'
-PYZMQ='pyzmq-13.1.0'
+PYZMQ='pyzmq-14.3.1'
 ROCKSTAR='rockstar-0.99.6'
-SCIPY='scipy-0.12.0'
+SCIPY='scipy-0.14.0'
 SQLITE='sqlite-autoconf-3071700'
-SYMPY='sympy-0.7.3'
-TORNADO='tornado-3.1'
-ZEROMQ='zeromq-3.2.4'
+SYMPY='sympy-0.7.5'
+TORNADO='tornado-4.0.1'
+ZEROMQ='zeromq-4.0.4'
 ZLIB='zlib-1.2.8'
 
 # Now we dump all our SHA512 files out.
-echo '9dcdda5b2ee2e63c2d3755245b7b4ed2f4592455f40feb6f8e86503195d9474559094ed27e789ab1c086d09da0bb21c4fe844af0e32a7d47c81ff59979b18ca0  Cython-0.19.1.tar.gz' > Cython-0.19.1.tar.gz.sha512
-echo '3f53d0b474bfd79fea2536d0a9197eaef6c0927e95f2f9fd52dbd6c1d46409d0e649c21ac418d8f7767a9f10fe6114b516e06f2be4b06aec3ab5bdebc8768220  Forthon-0.8.11.tar.gz' > Forthon-0.8.11.tar.gz.sha512
+echo '118e3ebd76f50bda8187b76654e65caab2c2c403df9b89da525c2c963dedc7b38d898ae0b92d44b278731d969a891eb3f7b5bcc138cfe3e037f175d4c87c29ec  Cython-0.20.2.tar.gz' > Cython-0.20.2.tar.gz.sha512
 echo '4941f5aa21aff3743546495fb073c10d2657ff42b2aff401903498638093d0e31e344cce778980f28a7170c6d29eab72ac074277b9d4088376e8692dc71e55c1  PyX-0.12.1.tar.gz' > PyX-0.12.1.tar.gz.sha512
-echo '3df0ba4b1cfef5f02fb27925de4c2ca414eca9000af6a3d475d39063720afe987287c3d51377e0a36b88015573ef699f700782e1749c7a357b8390971d858a79  Python-2.7.6.tgz' > Python-2.7.6.tgz.sha512
+echo '4b05f0a490ddee37e8fc7970403bb8b72c38e5d173703db40310e78140d9d5c5732789d69c68dbd5605a623e4582f5b9671f82b8239ecdb34ad4261019dace6a  Python-2.7.8.tgz' > Python-2.7.8.tgz.sha512
 echo '276bd9c061ec9a27d478b33078a86f93164ee2da72210e12e2c9da71dcffeb64767e4460b93f257302b09328eda8655e93c4b9ae85e74472869afbeae35ca71e  blas.tar.gz' > blas.tar.gz.sha512
 echo '00ace5438cfa0c577e5f578d8a808613187eff5217c35164ffe044fbafdfec9e98f4192c02a7d67e01e5a5ccced630583ad1003c37697219b0f147343a3fdd12  bzip2-1.0.6.tar.gz' > bzip2-1.0.6.tar.gz.sha512
 echo 'a296dfcaef7e853e58eed4e24b37c4fa29cfc6ac688def048480f4bb384b9e37ca447faf96eec7b378fd764ba291713f03ac464581d62275e28eb2ec99110ab6  reason-js-20120623.zip' > reason-js-20120623.zip.sha512
 echo '609a68a3675087e0cc95268574f31e104549daa48efe15a25a33b8e269a93b4bd160f4c3e8178dca9c950ef5ca514b039d6fd1b45db6af57f25342464d0429ce  freetype-2.4.12.tar.gz' > freetype-2.4.12.tar.gz.sha512
-echo '2eb7030f8559ff5cb06333223d98fda5b3a663b6f4a026949d1c423aa9a869d824e612ed5e1851f3bf830d645eea1a768414f73731c23ab4d406da26014fe202  h5py-2.1.3.tar.gz' > h5py-2.1.3.tar.gz.sha512
+echo 'f0da1d2ac855c02fb828444d719a1b23a580adb049335f3e732ace67558a125ac8cd3b3a68ac6bf9d10aa3ab19e4672b814eb28cc8c66910750c62efb655d744  h5py-2.3.1.tar.gz' > h5py-2.3.1.tar.gz.sha512
 echo 'e9db26baa297c8ed10f1ca4a3fcb12d6985c6542e34c18d48b2022db73014f054c8b8434f3df70dcf44631f38b016e8050701d52744953d0fced3272d7b6b3c1  hdf5-1.8.11.tar.gz' > hdf5-1.8.11.tar.gz.sha512
-echo '68c15f6402cacfd623f8e2b70c22d06541de3616fdb2d502ce93cd2fdb4e7507bb5b841a414a4123264221ee5ffb0ebefbb8541f79e647fcb9f73310b4c2d460  ipython-2.1.0.tar.gz' > ipython-2.1.0.tar.gz.sha512
+echo '4953bf5e9d6d5c6ad538d07d62b5b100fd86a37f6b861238501581c0059bd4655345ca05cf395e79709c38ce4cb9c6293f5d11ac0252a618ad8272b161140d13  ipython-2.2.0.tar.gz' > ipython-2.2.0.tar.gz.sha512
 echo '8770214491e31f0a7a3efaade90eee7b0eb20a8a6ab635c5f854d78263f59a1849133c14ef5123d01023f0110cbb9fc6f818da053c01277914ae81473430a952  lapack-3.4.2.tar.gz' > lapack-3.4.2.tar.gz.sha512
 echo '887582e5a22e4cde338aa8fec7a89f6dd31f2f02b8842735f00f970f64582333fa03401cea6d01704083403c7e8b7ebc26655468ce930165673b33efa4bcd586  libpng-1.6.3.tar.gz' > libpng-1.6.3.tar.gz.sha512
-echo '990e3a155ca7a9d329c41a43b44a9625f717205e81157c668a8f3f2ad5459ed3fed8c9bd85e7f81c509e0628d2192a262d4aa30c8bfc348bb67ed60a0362505a  matplotlib-1.3.0.tar.gz' > matplotlib-1.3.0.tar.gz.sha512
-echo '8cd387ea0d74d5ed01b58d5ef8e3fb408d4b05f7deb45a02e34fbb931fd920aafbfcb3a9b52a027ebcdb562837198637a0e51f2121c94e0fcf7f7d8c016f5342  mercurial-3.0.tar.gz' > mercurial-3.0.tar.gz.sha512
-echo 'a3b8060e415560a868599224449a3af636d24a060f1381990b175dcd12f30249edd181179d23aea06b0c755ff3dc821b7a15ed8840f7855530479587d4d814f4  nose-1.3.0.tar.gz' > nose-1.3.0.tar.gz.sha512
-echo 'd58177f3971b6d07baf6f81a2088ba371c7e43ea64ee7ada261da97c6d725b4bd4927122ac373c55383254e4e31691939276dab08a79a238bfa55172a3eff684  numpy-1.7.1.tar.gz' > numpy-1.7.1.tar.gz.sha512
+echo '60aa386639dec17b4f579955df60f2aa7c8ccd589b3490bb9afeb2929ea418d5d1a36a0b02b8d4a6734293076e9069429956c56cf8bd099b756136f2657cf9d4  matplotlib-1.4.0.tar.gz' > matplotlib-1.4.0.tar.gz.sha512
+echo '1ee2fe7a241bf81087e55d9e4ee8fa986f41bb0655d4828d244322c18f3958a1f3111506e2df15aefcf86100b4fe530fcab2d4c041b5945599ed3b3a889d50f5  mercurial-3.1.tar.gz' > mercurial-3.1.tar.gz.sha512
+echo '19499ab08018229ea5195cdac739d6c7c247c5aa5b2c91b801cbd99bad12584ed84c5cfaaa6fa8b4893a46324571a2f8a1988a1381f4ddd58390e597bd7bdc24  nose-1.3.4.tar.gz' > nose-1.3.4.tar.gz.sha512
+echo '996e6b8e2d42f223e44660f56bf73eb8ab124f400d89218f8f5e4d7c9860ada44a4d7c54526137b0695c7a10f36e8834fbf0d42b7cb20bcdb5d5c245d673385c  numpy-1.8.2.tar.gz' > numpy-1.8.2.tar.gz.sha512
 echo '9c0a61299779aff613131aaabbc255c8648f0fa7ab1806af53f19fbdcece0c8a68ddca7880d25b926d67ff1b9201954b207919fb09f6a290acb078e8bbed7b68  python-hglib-1.0.tar.gz' > python-hglib-1.0.tar.gz.sha512
-echo 'c65013293dd4049af5db009fdf7b6890a3c6b1e12dd588b58fb5f5a5fef7286935851fb7a530e03ea16f28de48b964e50f48bbf87d34545fd23b80dd4380476b  pyzmq-13.1.0.tar.gz' > pyzmq-13.1.0.tar.gz.sha512
-echo '80c8e137c3ccba86575d4263e144ba2c4684b94b5cd620e200f094c92d4e118ea6a631d27bdb259b0869771dfaeeae68c0fdd37fdd740b9027ee185026e921d4  scipy-0.12.0.tar.gz' > scipy-0.12.0.tar.gz.sha512
+echo '3d93a8fbd94fc3f1f90df68257cda548ba1adf3d7a819e7a17edc8681894003ac7ae6abd319473054340c11443a6a3817b931366fd7dae78e3807d549c544f8b  pyzmq-14.3.1.tar.gz' > pyzmq-14.3.1.tar.gz.sha512
+echo 'ad1278740c1dc44c5e1b15335d61c4552b66c0439325ed6eeebc5872a1c0ba3fce1dd8509116b318d01e2d41da2ee49ec168da330a7fafd22511138b29f7235d  scipy-0.14.0.tar.gz' > scipy-0.14.0.tar.gz.sha512
 echo '96f3e51b46741450bc6b63779c10ebb4a7066860fe544385d64d1eda52592e376a589ef282ace2e1df73df61c10eab1a0d793abbdaf770e60289494d4bf3bcb4  sqlite-autoconf-3071700.tar.gz' > sqlite-autoconf-3071700.tar.gz.sha512
-echo '2992baa3edfb4e1842fb642abf0bf0fc0bf56fc183aab8fed6b3c42fbea928fa110ede7fdddea2d63fc5953e8d304b04da433dc811134fadefb1eecc326121b8  sympy-0.7.3.tar.gz' > sympy-0.7.3.tar.gz.sha512
-echo '101544db6c97beeadc5a02b2ef79edefa0a07e129840ace2e4aa451f3976002a273606bcdc12d6cef5c22ff4c1c9dcf60abccfdee4cbef8e3f957cd25c0430cf  tornado-3.1.tar.gz' > tornado-3.1.tar.gz.sha512
-echo 'd8eef84860bc5314b42a2cc210340572a9148e008ea65f7650844d0edbe457d6758785047c2770399607f69ba3b3a544db9775a5cdf961223f7e278ef7e0f5c6  zeromq-3.2.4.tar.gz' > zeromq-3.2.4.tar.gz.sha512
+echo '8a46e75abc3ed2388b5da9cb0e5874ae87580cf3612e2920b662d8f8eee8047efce5aa998eee96661d3565070b1a6b916c8bed74138b821f4e09115f14b6677d  sympy-0.7.5.tar.gz' > sympy-0.7.5.tar.gz.sha512
+echo 'a4e0231e77ebbc2885bab648b292b842cb15c84d66a1972de18cb00fcc611eae2794b872f070ab7d5af32dd0c6c1773527fe1332bd382c1821e1f2d5d76808fb  tornado-4.0.1.tar.gz' > tornado-4.0.1.tar.gz.sha512
+echo '7d70855d0537971841810a66b7a943a88304f6991ce445df19eea034aadc53dbce9d13be92bf44cfef1f3e19511a754eb01006a3968edc1ec3d1766ea4730cda  zeromq-4.0.4.tar.gz' > zeromq-4.0.4.tar.gz.sha512
 echo 'ece209d4c7ec0cb58ede791444dc754e0d10811cbbdebe3df61c0fd9f9f9867c1c3ccd5f1827f847c005e24eef34fb5bf87b5d3f894d75da04f1797538290e4a  zlib-1.2.8.tar.gz' > zlib-1.2.8.tar.gz.sha512
 # Individual processes
 [ -z "$HDF5_DIR" ] && get_ytproject $HDF5.tar.gz
@@ -653,7 +666,6 @@
 get_ytproject $H5PY.tar.gz
 get_ytproject $CYTHON.tar.gz
 get_ytproject reason-js-20120623.zip
-get_ytproject $FORTHON.tar.gz
 get_ytproject $NOSE.tar.gz
 get_ytproject $PYTHON_HGLIB.tar.gz
 get_ytproject $SYMPY.tar.gz
@@ -729,7 +741,7 @@
         cd $FREETYPE_VER
         ( ./configure CFLAGS=-I${DEST_DIR}/include --prefix=${DEST_DIR}/ 2>&1 ) 1>> ${LOG_FILE} || do_exit
         ( make 2>&1 ) 1>> ${LOG_FILE} || do_exit
-		( make install 2>&1 ) 1>> ${LOG_FILE} || do_exit
+        ( make install 2>&1 ) 1>> ${LOG_FILE} || do_exit
         ( make clean 2>&1) 1>> ${LOG_FILE} || do_exit
         touch done
         cd ..
@@ -932,7 +944,6 @@
 do_setup_py $IPYTHON
 do_setup_py $H5PY
 do_setup_py $CYTHON
-do_setup_py $FORTHON
 do_setup_py $NOSE
 do_setup_py $PYTHON_HGLIB
 do_setup_py $SYMPY
@@ -1026,7 +1037,7 @@
     echo
     echo "To get started with yt, check out the orientation:"
     echo
-    echo "    http://yt-project.org/doc/bootcamp/"
+    echo "    http://yt-project.org/doc/quickstart/"
     echo
     echo "The source for yt is located at:"
     echo "    $YT_DIR"

diff -r 776082b1ecd0b7882d4ddb448ee634342f2f2b77 -r abf5b82c5f9ccef6103e5aec0de20d810e75f707 doc/source/_static/custom.css
--- a/doc/source/_static/custom.css
+++ b/doc/source/_static/custom.css
@@ -39,6 +39,13 @@
         padding-top: 10px;
         padding-bottom: 10px;
     }
+    /* since 3.1.0 */
+    .navbar-collapse.collapse.in { 
+        display: block!important;
+    }
+    .collapsing {
+        overflow: hidden!important;
+    }
 }
 
 /* 
@@ -85,7 +92,7 @@
 
 */
 
-*[id]:before { 
+*[id]:before :not(p) {
   display: block; 
   content: " "; 
   margin-top: -45px; 

diff -r 776082b1ecd0b7882d4ddb448ee634342f2f2b77 -r abf5b82c5f9ccef6103e5aec0de20d810e75f707 doc/source/analyzing/analysis_modules/fitting_procedure.rst
--- a/doc/source/analyzing/analysis_modules/fitting_procedure.rst
+++ /dev/null
@@ -1,138 +0,0 @@
-.. _fitting_procedure:
-
-Procedure for Generating Fits
-=============================
-.. sectionauthor:: Hilary Egan <hilary.egan at colorado.edu>
-
-To generate a fit for a spectrum :py:func:`generate_total_fit()` is called.
-This function controls the identification of line complexes, the fit
-of a series of absorption lines for each appropriate species, checks of
-those fits, and returns the results of the fits.
-
-
-Finding Line Complexes
-----------------------
-Line complexes are found using the :py:func:`find_complexes` function. The
-process by which line complexes are found involves walking through
-the array of flux in order from minimum to maximum wavelength, and finding
-series of spatially contiguous cells whose flux is less than some limit.
-These regions are then checked in terms of an additional flux limit and size.
-The bounds of all the passing regions are then listed and returned. Those
-bounds that cover an exceptionally large region of wavelength space will be
-broken up if a suitable cut point is found. This method is only appropriate
-for noiseless spectra.
-
-The optional parameter **complexLim** (default = 0.999), controls the limit
-that triggers the identification of a spatially contiguous region of flux
-that could be a line complex. This number should be very close to 1 but not
-exactly equal. It should also be at least an order of magnitude closer to 1
-than the later discussed **fitLim** parameter, because a line complex where
-the flux of the trough is very close to the flux of the edge can be incredibly
-unstable when optimizing.
-
-The **fitLim** parameter controls what is the maximum flux that the trough
-of the region can have and still be considered a line complex. This 
-effectively controls the sensitivity to very low column absorbers. Default
-value is **fitLim** = 0.99. If a region is identified where the flux of the trough
-is greater than this value, the region is simply ignored.
-
-The **minLength** parameter controls the minimum number of array elements 
-that an identified region must have. This value must be greater than or
-equal to 3 as there are a minimum of 3 free parameters that must be fit.
-Default is **minLength** = 3.
-
-The **maxLength** parameter controls the maximum number of array elements
-that an identified region can have before it is split into separate regions.
-Default is **maxLength** = 1000. This should be adjusted based on the 
-resolution of the spectrum to remain appropriate. The value correspond
-to a wavelength of roughly 50 angstroms. 
-
-The **splitLim** parameter controls how exceptionally large regions are split.
-When such a region is identified by having more array elements than
-**maxLength**, the point of maximum flux (or minimum absorption) in the 
-middle two quartiles is identified. If that point has a flux greater than
-or equal to **splitLim**, then two separate complexes are created: one from
-the lower wavelength edge to the minimum absorption point and the other from
-the minimum absorption point to the higher wavelength edge. The default
-value is **splitLim** =.99, but it should not drastically affect results, so
-long as the value is reasonably close to 1.
-
-
-Fitting a Line Complex
-----------------------
-
-After a complex is identified, it is fitted by iteratively adding and 
-optimizing a set of Voigt Profiles for a particular species until the
-region is considered successfully fit. The optimizing is accomplished
-using scipy's least squares optimizer. This requires an initial estimate
-of the parameters to be fit (column density, b-value, redshift) for each
-line.
-
-Each time a line is added, the guess of the parameters is based on
-the difference between the line complex and the fit so far. For the first line
-this just means the initial guess is based solely on the flux of the line
-complex. The column density is given by the initial column density given
-in the species parameters dictionary. If the line is saturated (some portion
-of the flux with a value less than .1) than the larger initial column density
-guess is chosen. If the flux is relatively high (all values >.9) than the
-smaller initial guess is given. These values are chosen to make optimization
-faster and more stable by being closer to the actual value, but the final
-results of fitting should not depend on them as they merely provide a
-starting point. 
-
-After the parameters for a line are optimized for the first time, the 
-optimized parameters are then used for the initial guess on subsequent 
-iterations with more lines. 
-
-The complex is considered successfully fit when the sum of the squares of 
-the difference between the flux generated from the fit and the desired flux
-profile is less than **errBound**. **errBound** is related to the optional
-parameter to :py:func:`generate_total_fit()`, **maxAvgError** by the number
-of array elements in the region such that **errBound** = number of elements *
-**maxAvgError**.
-
-There are several other conditions under which the cycle of adding and 
-optimizing lines will halt. If the error of the optimized fit from adding
-a line is an order of magnitude worse than the error of the fit without
-that line, then it is assumed that the fitting has become unstable and 
-the latest line is removed. Lines are also prevented from being added if
-the total number of lines is greater than the number of elements in the flux
-array being fit divided by 3. This is because there must not be more free
-parameters in a fit than the number of points to constrain them. 
-
-
-Checking Fit Results
---------------------
-
-After an acceptable fit for a region is determined, there are several steps
-the algorithm must go through to validate the fits. 
-
-First, the parameters must be in a reasonable range. This is a check to make 
-sure that the optimization did not become unstable and generate a fit that
-diverges wildly outside the region where the fit was performed. This way, even
-if particular complex cannot be fit, the rest of the spectrum fitting still
-behaves as expected. The range of acceptability for each parameter is given
-in the species parameter dictionary. These are merely broad limits that will
-prevent numerical instability rather than physical limits.
-
-In cases where a single species generates multiple lines (as in the OVI 
-doublet), the fits are then checked for higher wavelength lines. Originally
-the fits are generated only considering the lowest wavelength fit to a region.
-This is because we perform the fitting of complexes in order from the lowest
-wavelength to the highest, so any contribution to a complex being fit must
-come from the lower wavelength as the higher wavelength contributions would
-already have been subtracted out after fitting the lower wavelength. 
-
-Saturated Lyman Alpha Fitting Tools
------------------------------------
-
-In cases where a large or saturated line (there exists a point in the complex
-where the flux is less than .1) fails to be fit properly at first pass, a
-more robust set of fitting tools is used to try and remedy the situation.
-The basic approach is to simply try a much wider range of initial parameter
-guesses in order to find the true optimization minimum, rather than getting
-stuck in a local minimum. A set of hard coded initial parameter guesses
-for Lyman alpha lines is given by the function :py:func:`get_test_lines`. 
-Also included in these parameter guesses is an an initial guess of a high
-column cool line overlapping a lower column warm line, indictive of a 
-broad Lyman alpha (BLA) absorber.

diff -r 776082b1ecd0b7882d4ddb448ee634342f2f2b77 -r abf5b82c5f9ccef6103e5aec0de20d810e75f707 doc/source/analyzing/analysis_modules/index.rst
--- a/doc/source/analyzing/analysis_modules/index.rst
+++ b/doc/source/analyzing/analysis_modules/index.rst
@@ -17,4 +17,4 @@
    two_point_functions
    clump_finding
    particle_trajectories
-   ellipsoidal_analysis
+   ellipsoid_analysis

diff -r 776082b1ecd0b7882d4ddb448ee634342f2f2b77 -r abf5b82c5f9ccef6103e5aec0de20d810e75f707 doc/source/analyzing/fields.rst
--- a/doc/source/analyzing/fields.rst
+++ b/doc/source/analyzing/fields.rst
@@ -20,7 +20,8 @@
 for datasets containing multiple different types of fluid fields, mesh fields,
 particles (with overlapping or disjoint lists of fields).  To enable accessing
 these fields in a meaningful, simple way, the mechanism for accessing them has
-changed to take an optional *field type* in addition to the *field name*.
+changed to take an optional *field type* in addition to the *field name* of
+the form ('*field type*', '*field name*').
 
 As an example, we may be in a situation where have multiple types of particles
 which possess the ``particle_position`` field.  In the case where a data
@@ -99,17 +100,18 @@
 should be returned in.  If an aliased field is requested (and aliased fields 
 will always be lowercase, with underscores separating words) it will be returned 
 in CGS units (future versions will enable global defaults to be set for MKS and 
-other unit systems), whereas if the underlying field is requested, it will not 
-undergo any unit conversions from its natural units.  (This rule is occasionally 
-violated for fields which are mesh-dependent, specifically particle masses in 
-some cosmology codes.)
+other unit systems), whereas if the frontend-specific field is requested, it 
+will not undergo any unit conversions from its natural units.  (This rule is 
+occasionally violated for fields which are mesh-dependent, specifically particle 
+masses in some cosmology codes.)
 
-.. _known_field_types:
+.. _known-field-types:
 
 Field types known to yt
 -----------------------
 
-yt knows of a few different field types:
+Recall that fields are formally accessed in two parts: ('*field type*', 
+'*field name*').  Here we describe the different field types you will encounter:
 
 * frontend-name -- Mesh or fluid fields that exist on-disk default to having
   the name of the frontend as their type name (e.g., ``enzo``, ``flash``,
@@ -140,6 +142,14 @@
   density estimates, counts, and the like.  See :ref:`deposited-particle-fields` 
   for more information.
 
+While it is best to be explicit access fields by their full names 
+(i.e. ('*field type*', '*field name*')), yt provides an abbreviated 
+interface for accessing common fields (i.e. '*field name*').  In the abbreviated
+case, yt will assume you want the last *field type* accessed.  If you
+haven't previously accessed a *field type*, it will default to *field type* = 
+``'all'`` in the case of particle fields and *field type* = ``'gas'`` in the 
+case of mesh fields.
+
 Field Plugins
 -------------
 

diff -r 776082b1ecd0b7882d4ddb448ee634342f2f2b77 -r abf5b82c5f9ccef6103e5aec0de20d810e75f707 doc/source/analyzing/particle_filter.ipynb
--- a/doc/source/analyzing/particle_filter.ipynb
+++ b/doc/source/analyzing/particle_filter.ipynb
@@ -1,7 +1,7 @@
 {
  "metadata": {
   "name": "",
-  "signature": "sha256:4d705a81671d5692ed6691b3402115edbe9c98af815af5bb160ddf551bf02c76"
+  "signature": "sha256:427da1e1d02deb543246218dc8cce991268b518b25cfdd5944a4a436695f874b"
  },
  "nbformat": 3,
  "nbformat_minor": 0,
@@ -40,11 +40,13 @@
      "source": [
       "We will filter these into young stars and old stars by masking on the ('Stars', 'creation_time') field. \n",
       "\n",
-      "In order to do this, we first make a function which applies our desired cut.  This function must accept two arguments: `pfilter` and `data`.  The second argument is a yt data container and is usually the only one used in a filter definition.\n",
+      "In order to do this, we first make a function which applies our desired cut.  This function must accept two arguments: `pfilter` and `data`.  The first argument is a `ParticleFilter` object that contains metadata about the filter its self.  The second argument is a yt data container.\n",
       "\n",
-      "Let's call \"young\" stars only those stars with ages less 5 million years.  Since Tipsy assigns a very large `creation_time` for stars in the initial conditions, we need to also exclude stars with negative ages.\n",
+      "Let's call \"young\" stars only those stars with ages less 5 million years.  Since Tipsy assigns a very large `creation_time` for stars in the initial conditions, we need to also exclude stars with negative ages. \n",
       "\n",
-      "Old stars either formed dynamically in the simulation (ages greater than 5 Myr) or were present in the initial conditions (negative ages)."
+      "Conversely, let's define \"old\" stars as those stars formed dynamically in the simulation with ages greater than 5 Myr.  We also include stars with negative ages, since these stars were included in the simulation initial conditions.\n",
+      "\n",
+      "We make use of `pfilter.filtered_type` so that the filter definition will use the same particle type as the one specified in the call to `add_particle_filter` below.  This makes the filter definition usable for arbitrary particle types.  Since we're only filtering the `\"Stars\"` particle type in this example, we could have also replaced `pfilter.filtered_type` with `\"Stars\"` and gotten the same result."
      ]
     },
     {
@@ -52,12 +54,12 @@
      "collapsed": false,
      "input": [
       "def young_stars(pfilter, data):\n",
-      "    age = data.ds.current_time - data[\"Stars\", \"creation_time\"]\n",
+      "    age = data.ds.current_time - data[pfilter.filtered_type, \"creation_time\"]\n",
       "    filter = np.logical_and(age.in_units('Myr') <= 5, age >= 0)\n",
       "    return filter\n",
       "\n",
       "def old_stars(pfilter, data):\n",
-      "    age = data.ds.current_time - data[\"Stars\", \"creation_time\"]\n",
+      "    age = data.ds.current_time - data[pfilter.filtered_type, \"creation_time\"]\n",
       "    filter = np.logical_or(age.in_units('Myr') >= 5, age < 0)\n",
       "    return filter"
      ],
@@ -140,4 +142,4 @@
    "metadata": {}
   }
  ]
-}
+}
\ No newline at end of file

diff -r 776082b1ecd0b7882d4ddb448ee634342f2f2b77 -r abf5b82c5f9ccef6103e5aec0de20d810e75f707 doc/source/analyzing/units/index.rst
--- a/doc/source/analyzing/units/index.rst
+++ b/doc/source/analyzing/units/index.rst
@@ -37,7 +37,7 @@
 .. note::
 
    The notebooks use sample datasets that are available for download at
-   http://yt-project.org/data.  See :ref:`bootcamp-introduction` for more
+   http://yt-project.org/data.  See :ref:`quickstart-introduction` for more
    details.
 
 Let us know if you would like to contribute other example notebooks, or have

diff -r 776082b1ecd0b7882d4ddb448ee634342f2f2b77 -r abf5b82c5f9ccef6103e5aec0de20d810e75f707 doc/source/bootcamp/1)_Introduction.ipynb
--- a/doc/source/bootcamp/1)_Introduction.ipynb
+++ /dev/null
@@ -1,72 +0,0 @@
-{
- "metadata": {
-  "name": "",
-  "signature": "sha256:39620670ce7751b23f30d2123fd3598de1c7843331f65de13e29f4ae9f759e0f"
- },
- "nbformat": 3,
- "nbformat_minor": 0,
- "worksheets": [
-  {
-   "cells": [
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "# Welcome to the yt bootcamp!\n",
-      "\n",
-      "In this brief tutorial, we'll go over how to load up data, analyze things, inspect your data, and make some visualizations.\n",
-      "\n",
-      "Our documentation page can provide information on a variety of the commands that are used here, both in narrative documentation as well as recipes for specific functionality in our cookbook.  The documentation exists at http://yt-project.org/doc/.  If you encounter problems, look for help here: http://yt-project.org/doc/help/index.html.\n",
-      "\n",
-      "## Acquiring the datasets for this tutorial\n",
-      "\n",
-      "If you are executing these tutorials interactively, you need some sample datasets on which to run the code.  You can download these datasets at http://yt-project.org/data/.  The datasets necessary for each lesson are noted next to the corresponding tutorial.\n",
-      "\n",
-      "## What's Next?\n",
-      "\n",
-      "The Notebooks are meant to be explored in this order:\n",
-      "\n",
-      "1. Introduction\n",
-      "2. Data Inspection (IsolatedGalaxy dataset)\n",
-      "3. Simple Visualization (enzo_tiny_cosmology & Enzo_64 datasets)\n",
-      "4. Data Objects and Time Series (IsolatedGalaxy dataset)\n",
-      "5. Derived Fields and Profiles (IsolatedGalaxy dataset)\n",
-      "6. Volume Rendering (IsolatedGalaxy dataset)"
-     ]
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "The following code will download the data needed for this tutorial automatically using `curl`. It may take some time so please wait when the kernel is busy. You will need to set `download_datasets` to True before using it."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "download_datasets = False\n",
-      "if download_datasets:\n",
-      "    !curl -sSO http://yt-project.org/data/enzo_tiny_cosmology.tar\n",
-      "    print \"Got enzo_tiny_cosmology\"\n",
-      "    !tar xf enzo_tiny_cosmology.tar\n",
-      "    \n",
-      "    !curl -sSO http://yt-project.org/data/Enzo_64.tar\n",
-      "    print \"Got Enzo_64\"\n",
-      "    !tar xf Enzo_64.tar\n",
-      "    \n",
-      "    !curl -sSO http://yt-project.org/data/IsolatedGalaxy.tar\n",
-      "    print \"Got IsolatedGalaxy\"\n",
-      "    !tar xf IsolatedGalaxy.tar\n",
-      "    \n",
-      "    print \"All done!\""
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    }
-   ],
-   "metadata": {}
-  }
- ]
-}
\ No newline at end of file

diff -r 776082b1ecd0b7882d4ddb448ee634342f2f2b77 -r abf5b82c5f9ccef6103e5aec0de20d810e75f707 doc/source/bootcamp/2)_Data_Inspection.ipynb
--- a/doc/source/bootcamp/2)_Data_Inspection.ipynb
+++ /dev/null
@@ -1,384 +0,0 @@
-{
- "metadata": {
-  "name": "",
-  "signature": "sha256:a8fe78715c1f3900c37c675d84320fe65f0ba8734abba60fd12e74d957e5d8ee"
- },
- "nbformat": 3,
- "nbformat_minor": 0,
- "worksheets": [
-  {
-   "cells": [
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "# Starting Out and Loading Data\n",
-      "\n",
-      "We're going to get started by loading up yt.  This next command brings all of the libraries into memory and sets up our environment."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "import yt"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "Now that we've loaded yt, we can load up some data.  Let's load the `IsolatedGalaxy` dataset."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "ds = yt.load(\"IsolatedGalaxy/galaxy0030/galaxy0030\")"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "## Fields and Facts\n",
-      "\n",
-      "When you call the `load` function, yt tries to do very little -- this is designed to be a fast operation, just setting up some information about the simulation.  Now, the first time you access the \"index\" it will read and load the mesh and then determine where data is placed in the physical domain and on disk.  Once it knows that, yt can tell you some statistics about the simulation:"
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "ds.print_stats()"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "yt can also tell you the fields it found on disk:"
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "ds.field_list"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "And, all of the fields it thinks it knows how to generate:"
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "ds.derived_field_list"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "yt can also transparently generate fields.  However, we encourage you to examine exactly what yt is doing when it generates those fields.  To see, you can ask for the source of a given field."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "print ds.field_info[\"gas\", \"vorticity_x\"].get_source()"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "yt stores information about the domain of the simulation:"
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "print ds.domain_width"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "yt can also convert this into various units:"
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "print ds.domain_width.in_units(\"kpc\")\n",
-      "print ds.domain_width.in_units(\"au\")\n",
-      "print ds.domain_width.in_units(\"mile\")"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "# Mesh Structure\n",
-      "\n",
-      "If you're using a simulation type that has grids (for instance, here we're using an Enzo simulation) you can examine the structure of the mesh.  For the most part, you probably won't have to use this unless you're debugging a simulation or examining in detail what is going on."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "print ds.index.grid_left_edge"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "But, you may have to access information about individual grid objects!  Each grid object mediates accessing data from the disk and has a number of attributes that tell you about it.  The index (`ds.index` here) has an attribute `grids` which is all of the grid objects."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "print ds.index.grids[1]"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "g = ds.index.grids[1]\n",
-      "print g"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "Grids have dimensions, extents, level, and even a list of Child grids."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "g.ActiveDimensions"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "g.LeftEdge, g.RightEdge"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "g.Level"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "g.Children"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "## Advanced Grid Inspection\n",
-      "\n",
-      "If we want to examine grids only at a given level, we can!  Not only that, but we can load data and take a look at various fields.\n",
-      "\n",
-      "*This section can be skipped!*"
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "gs = ds.index.select_grids(ds.index.max_level)"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "g2 = gs[0]\n",
-      "print g2\n",
-      "print g2.Parent\n",
-      "print g2.get_global_startindex()"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "print g2[\"density\"][:,:,0]"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "print (g2.Parent.child_mask == 0).sum() * 8\n",
-      "print g2.ActiveDimensions.prod()"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "for f in ds.field_list:\n",
-      "    fv = g[f]\n",
-      "    if fv.size == 0: continue\n",
-      "    print f, fv.min(), fv.max()"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "# Examining Data in Regions\n",
-      "\n",
-      "yt provides data object selectors.  In subsequent notebooks we'll examine these in more detail, but we can select a sphere of data and perform a number of operations on it.  yt makes it easy to operate on fluid fields in an object in *bulk*, but you can also examine individual field values.\n",
-      "\n",
-      "This creates a sphere selector positioned at the most dense point in the simulation that has a radius of 10 kpc."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "sp = ds.sphere(\"max\", (10, 'kpc'))"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "print sp"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "We can calculate a bunch of bulk quantities.  Here's that list, but there's a list in the docs, too!"
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "print sp.quantities.keys()"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "Let's look at the total mass.  This is how you call a given quantity.  yt calls these \"Derived Quantities\".  We'll talk about a few in a later notebook."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "print sp.quantities.total_mass()"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    }
-   ],
-   "metadata": {}
-  }
- ]
-}
\ No newline at end of file

diff -r 776082b1ecd0b7882d4ddb448ee634342f2f2b77 -r abf5b82c5f9ccef6103e5aec0de20d810e75f707 doc/source/bootcamp/3)_Simple_Visualization.ipynb
--- a/doc/source/bootcamp/3)_Simple_Visualization.ipynb
+++ /dev/null
@@ -1,275 +0,0 @@
-{
- "metadata": {
-  "name": "",
-  "signature": "sha256:c00ba7fdbbd9ea957d06060ad70f06f629b1fd4ebf5379c1fdad2697ab0a4cd6"
- },
- "nbformat": 3,
- "nbformat_minor": 0,
- "worksheets": [
-  {
-   "cells": [
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "# Simple Visualizations of Data\n",
-      "\n",
-      "Just like in our first notebook, we have to load yt and then some data."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "import yt"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "For this notebook, we'll load up a cosmology dataset."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "ds = yt.load(\"enzo_tiny_cosmology/DD0046/DD0046\")\n",
-      "print \"Redshift =\", ds.current_redshift"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "In the terms that yt uses, a projection is a line integral through the domain.  This can either be unweighted (in which case a column density is returned) or weighted, in which case an average value is returned.  Projections are, like all other data objects in yt, full-fledged data objects that churn through data and present that to you.  However, we also provide a simple method of creating Projections and plotting them in a single step.  This is called a Plot Window, here specifically known as a `ProjectionPlot`.  One thing to note is that in yt, we project all the way through the entire domain at a single time.  This means that the first call to projecting can be somewhat time consuming, but panning, zooming and plotting are all quite fast.\n",
-      "\n",
-      "yt is designed to make it easy to make nice plots and straightforward to modify those plots directly.  The cookbook in the documentation includes detailed examples of this."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "p = yt.ProjectionPlot(ds, \"y\", \"density\")\n",
-      "p.show()"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "The `show` command simply sends the plot to the IPython notebook.  You can also call `p.save()` which will save the plot to the file system.  This function accepts an argument, which will be pre-prended to the filename and can be used to name it based on the width or to supply a location.\n",
-      "\n",
-      "Now we'll zoom and pan a bit."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "p.zoom(2.0)"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "p.pan_rel((0.1, 0.0))"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "p.zoom(10.0)"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "p.pan_rel((-0.25, -0.5))"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "p.zoom(0.1)"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "If we specify multiple fields, each time we call `show` we get multiple plots back.  Same for `save`!"
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "p = yt.ProjectionPlot(ds, \"z\", [\"density\", \"temperature\"], weight_field=\"density\")\n",
-      "p.show()"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "We can adjust the colormap on a field-by-field basis."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "p.set_cmap(\"temperature\", \"hot\")"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "And, we can re-center the plot on different locations.  One possible use of this would be to make a single `ProjectionPlot` which you move around to look at different regions in your simulation, saving at each one."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "v, c = ds.find_max(\"density\")\n",
-      "p.set_center((c[0], c[1]))\n",
-      "p.zoom(10)"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "Okay, let's load up a bigger simulation (from `Enzo_64` this time) and make a slice plot."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "ds = yt.load(\"Enzo_64/DD0043/data0043\")\n",
-      "s = yt.SlicePlot(ds, \"z\", [\"density\", \"velocity_magnitude\"], center=\"max\")\n",
-      "s.set_cmap(\"velocity_magnitude\", \"kamae\")\n",
-      "s.zoom(10.0)"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "We can adjust the logging of various fields:"
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "s.set_log(\"velocity_magnitude\", True)"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "yt provides many different annotations for your plots.  You can see all of these in the documentation, or if you type `s.annotate_` and press tab, a list will show up here.  We'll annotate with velocity arrows."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "s.annotate_velocity()"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "Contours can also be overlaid:"
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "s = yt.SlicePlot(ds, \"x\", [\"density\"], center=\"max\")\n",
-      "s.annotate_contour(\"temperature\")\n",
-      "s.zoom(2.5)"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "Finally, we can save out to the file system."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "s.save()"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    }
-   ],
-   "metadata": {}
-  }
- ]
-}
\ No newline at end of file

diff -r 776082b1ecd0b7882d4ddb448ee634342f2f2b77 -r abf5b82c5f9ccef6103e5aec0de20d810e75f707 doc/source/bootcamp/4)_Data_Objects_and_Time_Series.ipynb
--- a/doc/source/bootcamp/4)_Data_Objects_and_Time_Series.ipynb
+++ /dev/null
@@ -1,382 +0,0 @@
-{
- "metadata": {
-  "name": "",
-  "signature": "sha256:a46e1baa90d32045c2b524100f28bad41b3665249612c9a275ee0375a6f4be20"
- },
- "nbformat": 3,
- "nbformat_minor": 0,
- "worksheets": [
-  {
-   "cells": [
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "# Data Objects and Time Series Data\n",
-      "\n",
-      "Just like before, we will load up yt.  Since we'll be using pylab to plot some data in this notebook, we additionally tell matplotlib to place plots inline inside the notebook."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "%matplotlib inline\n",
-      "import yt\n",
-      "import numpy as np\n",
-      "from matplotlib import pylab\n",
-      "from yt.analysis_modules.halo_finding.api import HaloFinder"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "## Time Series Data\n",
-      "\n",
-      "Unlike before, instead of loading a single dataset, this time we'll load a bunch which we'll examine in sequence.  This command creates a `DatasetSeries` object, which can be iterated over (including in parallel, which is outside the scope of this bootcamp) and analyzed.  There are some other helpful operations it can provide, but we'll stick to the basics here.\n",
-      "\n",
-      "Note that you can specify either a list of filenames, or a glob (i.e., asterisk) pattern in this."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "ts = yt.DatasetSeries(\"enzo_tiny_cosmology/*/*.hierarchy\")"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "### Example 1: Simple Time Series\n",
-      "\n",
-      "As a simple example of how we can use this functionality, let's find the min and max of the density as a function of time in this simulation.  To do this we use the construction `for ds in ts` where `ds` means \"Dataset\" and `ts` is the \"Time Series\" we just loaded up.  For each dataset, we'll create an object (`dd`) that covers the entire domain.  (`all_data` is a shorthand function for this.)  We'll then call the `extrema` Derived Quantity, and append the min and max to our extrema outputs."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "rho_ex = []\n",
-      "times = []\n",
-      "for ds in ts:\n",
-      "    dd = ds.all_data()\n",
-      "    rho_ex.append(dd.quantities.extrema(\"density\"))\n",
-      "    times.append(ds.current_time.in_units(\"Gyr\"))\n",
-      "rho_ex = np.array(rho_ex)"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "Now we plot the minimum and the maximum:"
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "pylab.semilogy(times, rho_ex[:,0], '-xk', label='Minimum')\n",
-      "pylab.semilogy(times, rho_ex[:,1], '-xr', label='Maximum')\n",
-      "pylab.ylabel(\"Density ($g/cm^3$)\")\n",
-      "pylab.xlabel(\"Time (Gyr)\")\n",
-      "pylab.legend()\n",
-      "pylab.ylim(1e-32, 1e-21)\n",
-      "pylab.show()"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "### Example 2: Advanced Time Series\n",
-      "\n",
-      "Let's do something a bit different.  Let's calculate the total mass inside halos and outside halos.\n",
-      "\n",
-      "This actually touches a lot of different pieces of machinery in yt.  For every dataset, we will run the halo finder HOP.  Then, we calculate the total mass in the domain.  Then, for each halo, we calculate the sum of the baryon mass in that halo.  We'll keep running tallies of these two things."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "from yt.units import Msun\n",
-      "\n",
-      "mass = []\n",
-      "zs = []\n",
-      "for ds in ts:\n",
-      "    halos = HaloFinder(ds)\n",
-      "    dd = ds.all_data()\n",
-      "    total_mass = dd.quantities.total_quantity(\"cell_mass\").in_units(\"Msun\")\n",
-      "    total_in_baryons = 0.0*Msun\n",
-      "    for halo in halos:\n",
-      "        sp = halo.get_sphere()\n",
-      "        total_in_baryons += sp.quantities.total_quantity(\"cell_mass\").in_units(\"Msun\")\n",
-      "    mass.append(total_in_baryons/total_mass)\n",
-      "    zs.append(ds.current_redshift)"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "Now let's plot them!"
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "pylab.semilogx(zs, mass, '-xb')\n",
-      "pylab.xlabel(\"Redshift\")\n",
-      "pylab.ylabel(\"Mass in halos / Total mass\")\n",
-      "pylab.xlim(max(zs), min(zs))\n",
-      "pylab.ylim(-0.01, .18)"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "## Data Objects\n",
-      "\n",
-      "Time series data have many applications, but most of them rely on examining the underlying data in some way.  Below, we'll see how to use and manipulate data objects.\n",
-      "\n",
-      "### Ray Queries\n",
-      "\n",
-      "yt provides the ability to examine rays, or lines, through the domain.  Note that these are not periodic, unlike most other data objects.  We create a ray object and can then examine quantities of it.  Rays have the special fields `t` and `dts`, which correspond to the time the ray enters a given cell and the distance it travels through that cell.\n",
-      "\n",
-      "To create a ray, we specify the start and end points.\n",
-      "\n",
-      "Note that we need to convert these arrays to numpy arrays due to a bug in matplotlib 1.3.1."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "ray = ds.ray([0.1, 0.2, 0.3], [0.9, 0.8, 0.7])\n",
-      "pylab.semilogy(np.array(ray[\"t\"]), np.array(ray[\"density\"]))"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "print ray[\"dts\"]"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "print ray[\"t\"]"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "print ray[\"x\"]"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "### Slice Queries\n",
-      "\n",
-      "While slices are often used for visualization, they can be useful for other operations as well.  yt regards slices as multi-resolution objects.  They are an array of cells that are not all the same size; it only returns the cells at the highest resolution that it intersects.  (This is true for all yt data objects.)  Slices and projections have the special fields `px`, `py`, `pdx` and `pdy`, which correspond to the coordinates and half-widths in the pixel plane."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "ds = yt.load(\"IsolatedGalaxy/galaxy0030/galaxy0030\")\n",
-      "v, c = ds.find_max(\"density\")\n",
-      "sl = ds.slice(0, c[0])\n",
-      "print sl[\"index\", \"x\"]\n",
-      "print sl[\"index\", \"z\"]\n",
-      "print sl[\"pdx\"]\n",
-      "print sl[\"gas\", \"density\"].shape"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "If we want to do something interesting with a `Slice`, we can turn it into a `FixedResolutionBuffer`.  This object can be queried and will return a 2D array of values."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "frb = sl.to_frb((50.0, 'kpc'), 1024)\n",
-      "print frb[\"gas\", \"density\"].shape"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "yt provides a few functions for writing arrays to disk, particularly in image form.  Here we'll write out the log of `density`, and then use IPython to display it back here.  Note that for the most part, you will probably want to use a `PlotWindow` for this, but in the case that it is useful you can directly manipulate the data."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "yt.write_image(np.log10(frb[\"gas\", \"density\"]), \"temp.png\")\n",
-      "from IPython.display import Image\n",
-      "Image(filename = \"temp.png\")"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "### Off-Axis Slices\n",
-      "\n",
-      "yt provides not only slices, but off-axis slices that are sometimes called \"cutting planes.\"  These are specified by (in order) a normal vector and a center.  Here we've set the normal vector to `[0.2, 0.3, 0.5]` and the center to be the point of maximum density.\n",
-      "\n",
-      "We can then turn these directly into plot windows using `to_pw`.  Note that the `to_pw` and `to_frb` methods are available on slices, off-axis slices, and projections, and can be used on any of them."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "cp = ds.cutting([0.2, 0.3, 0.5], \"max\")\n",
-      "pw = cp.to_pw(fields = [(\"gas\", \"density\")])"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "Once we have our plot window from our cutting plane, we can show it here."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "pw.show()"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "We can, as noted above, do the same with our slice:"
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "pws = sl.to_pw(fields=[\"density\"])\n",
-      "#pws.show()\n",
-      "print pws.plots.keys()"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "### Covering Grids\n",
-      "\n",
-      "If we want to access a 3D array of data that spans multiple resolutions in our simulation, we can use a covering grid.  This will return a 3D array of data, drawing from up to the resolution level specified when creating the data.  For example, if you create a covering grid that spans two child grids of a single parent grid, it will fill those zones covered by a zone of a child grid with the data from that child grid.  Where it is covered only by the parent grid, the cells from the parent grid will be duplicated (appropriately) to fill the covering grid.\n",
-      "\n",
-      "There are two different types of covering grids: unsmoothed and smoothed.  Smoothed grids will be filled through a cascading interpolation process; they will be filled at level 0, interpolated to level 1, filled at level 1, interpolated to level 2, filled at level 2, etc.  This will help to reduce edge effects.  Unsmoothed covering grids will not be interpolated, but rather values will be duplicated multiple times.\n",
-      "\n",
-      "Here we create an unsmoothed covering grid at level 2, with the left edge at `[0.0, 0.0, 0.0]` and with dimensions equal to those that would cover the entire domain at level 2.  We can then ask for the Density field, which will be a 3D array."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "cg = ds.covering_grid(2, [0.0, 0.0, 0.0], ds.domain_dimensions * 2**2)\n",
-      "print cg[\"density\"].shape"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "In this example, we do exactly the same thing: except we ask for a *smoothed* covering grid, which will reduce edge effects."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "scg = ds.smoothed_covering_grid(2, [0.0, 0.0, 0.0], ds.domain_dimensions * 2**2)\n",
-      "print scg[\"density\"].shape"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    }
-   ],
-   "metadata": {}
-  }
- ]
-}
\ No newline at end of file

diff -r 776082b1ecd0b7882d4ddb448ee634342f2f2b77 -r abf5b82c5f9ccef6103e5aec0de20d810e75f707 doc/source/bootcamp/5)_Derived_Fields_and_Profiles.ipynb
--- a/doc/source/bootcamp/5)_Derived_Fields_and_Profiles.ipynb
+++ /dev/null
@@ -1,254 +0,0 @@
-{
- "metadata": {
-  "name": "",
-  "signature": "sha256:eca573e749829cacda0a8c07c6d5d11d07a5de657563a44b8c4ffff8f735caed"
- },
- "nbformat": 3,
- "nbformat_minor": 0,
- "worksheets": [
-  {
-   "cells": [
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "# Derived Fields and Profiles\n",
-      "\n",
-      "One of the most powerful features in yt is the ability to create derived fields that act and look exactly like fields that exist on disk.  This means that they will be generated on demand and can be used anywhere a field that exists on disk would be used.  Additionally, you can create them by just writing python functions."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "%matplotlib inline\n",
-      "import yt\n",
-      "import numpy as np\n",
-      "from yt import derived_field\n",
-      "from matplotlib import pylab"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "## Derived Fields\n",
-      "\n",
-      "This is an example of the simplest possible way to create a derived field.  All derived fields are defined by a function and some metadata; that metadata can include units, LaTeX-friendly names, conversion factors, and so on.  Fields can be defined in the way in the next cell.  What this does is create a function which accepts two arguments and then provide the units for that field.  In this case, our field is `dinosaurs` and our units are `K*cm/s`.  The function itself can access any fields that are in the simulation, and it does so by requesting data from the object called `data`."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "@derived_field(name = \"dinosaurs\", units = \"K * cm/s\")\n",
-      "def _dinos(field, data):\n",
-      "    return data[\"temperature\"] * data[\"velocity_magnitude\"]"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "One important thing to note is that derived fields must be defined *before* any datasets are loaded.  Let's load up our data and take a look at some quantities."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "ds = yt.load(\"IsolatedGalaxy/galaxy0030/galaxy0030\")\n",
-      "dd = ds.all_data()\n",
-      "print dd.quantities.keys()"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "One interesting question is, what are the minimum and maximum values of dinosaur production rates in our isolated galaxy?  We can do that by examining the `extrema` quantity -- the exact same way that we would for density, temperature, and so on."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "print dd.quantities.extrema(\"dinosaurs\")"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "We can do the same for the average quantities as well."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "print dd.quantities.weighted_average_quantity(\"dinosaurs\", weight=\"temperature\")"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "## A Few Other Quantities\n",
-      "\n",
-      "We can ask other quantities of our data, as well.  For instance, this sequence of operations will find the most dense point, center a sphere on it, calculate the bulk velocity of that sphere, calculate the baryonic angular momentum vector, and then the density extrema.  All of this is done in a memory conservative way: if you have an absolutely enormous dataset, yt will split that dataset into pieces, apply intermediate reductions and then a final reduction to calculate your quantity."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "sp = ds.sphere(\"max\", (10.0, 'kpc'))\n",
-      "bv = sp.quantities.bulk_velocity()\n",
-      "L = sp.quantities.angular_momentum_vector()\n",
-      "rho_min, rho_max = sp.quantities.extrema(\"density\")\n",
-      "print bv\n",
-      "print L\n",
-      "print rho_min, rho_max"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "## Profiles\n",
-      "\n",
-      "yt provides the ability to bin in 1, 2 and 3 dimensions.  This means discretizing in one or more dimensions of phase space (density, temperature, etc) and then calculating either the total value of a field in each bin or the average value of a field in each bin.\n",
-      "\n",
-      "We do this using the objects `Profile1D`, `Profile2D`, and `Profile3D`.  The first two are the most common since they are the easiest to visualize.\n",
-      "\n",
-      "This first set of commands manually creates a profile object the sphere we created earlier, binned in 32 bins according to density between `rho_min` and `rho_max`, and then takes the density-weighted average of the fields `temperature` and (previously-defined) `dinosaurs`.  We then plot it in a loglog plot."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "prof = yt.Profile1D(sp, \"density\", 32, rho_min, rho_max, True, weight_field=\"cell_mass\")\n",
-      "prof.add_fields([\"temperature\",\"dinosaurs\"])\n",
-      "pylab.loglog(np.array(prof.x), np.array(prof[\"temperature\"]), \"-x\")\n",
-      "pylab.xlabel('Density $(g/cm^3)$')\n",
-      "pylab.ylabel('Temperature $(K)$')"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "Now we plot the `dinosaurs` field."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "pylab.loglog(np.array(prof.x), np.array(prof[\"dinosaurs\"]), '-x')\n",
-      "pylab.xlabel('Density $(g/cm^3)$')\n",
-      "pylab.ylabel('Dinosaurs $(K cm / s)$')"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "If we want to see the total mass in every bin, we profile the `cell_mass` field with no weight.  Specifying `weight=None` will simply take the total value in every bin and add that up."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "prof = yt.Profile1D(sp, \"density\", 32, rho_min, rho_max, True, weight_field=None)\n",
-      "prof.add_fields([\"cell_mass\"])\n",
-      "pylab.loglog(np.array(prof.x), np.array(prof[\"cell_mass\"].in_units(\"Msun\")), '-x')\n",
-      "pylab.xlabel('Density $(g/cm^3)$')\n",
-      "pylab.ylabel('Cell mass $(M_\\odot)$')"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "In addition to the low-level `ProfileND` interface, it's also quite straightforward to quickly create plots of profiles using the `ProfilePlot` class.  Let's redo the last plot using `ProfilePlot`"
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "prof = yt.ProfilePlot(sp, 'density', 'cell_mass', weight_field=None)\n",
-      "prof.set_unit('cell_mass', 'Msun')\n",
-      "prof.show()"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "## Field Parameters\n",
-      "\n",
-      "Field parameters are a method of passing information to derived fields.  For instance, you might pass in information about a vector you want to use as a basis for a coordinate transformation.  yt often uses things like `bulk_velocity` to identify velocities that should be subtracted off.  Here we show how that works:"
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "sp_small = ds.sphere(\"max\", (50.0, 'kpc'))\n",
-      "bv = sp_small.quantities.bulk_velocity()\n",
-      "\n",
-      "sp = ds.sphere(\"max\", (0.1, 'Mpc'))\n",
-      "rv1 = sp.quantities.extrema(\"radial_velocity\")\n",
-      "\n",
-      "sp.clear_data()\n",
-      "sp.set_field_parameter(\"bulk_velocity\", bv)\n",
-      "rv2 = sp.quantities.extrema(\"radial_velocity\")\n",
-      "\n",
-      "print bv\n",
-      "print rv1\n",
-      "print rv2"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    }
-   ],
-   "metadata": {}
-  }
- ]
-}

This diff is so big that we needed to truncate the remainder.

https://bitbucket.org/yt_analysis/yt/commits/9ad4f38cdd2c/
Changeset:   9ad4f38cdd2c
Branch:      yt-3.0
User:        MatthewTurk
Date:        2014-09-03 17:31:00
Summary:     Set oblique axis to 4.
Affected #:  1 file

diff -r abf5b82c5f9ccef6103e5aec0de20d810e75f707 -r 9ad4f38cdd2ca4fdacb05a2d7c6b149099bef9cb yt/visualization/plot_window.py
--- a/yt/visualization/plot_window.py
+++ b/yt/visualization/plot_window.py
@@ -105,7 +105,7 @@
     return (bounds, center, display_center)
 
 def get_oblique_window_parameters(normal, center, width, ds, depth=None):
-    display_center, center = ds.coordinates.sanitize_center(center, axis)
+    display_center, center = ds.coordinates.sanitize_center(center, 4)
     width = ds.coordinates.sanitize_width(normal, width, depth)
 
     if len(width) == 2:


https://bitbucket.org/yt_analysis/yt/commits/afd3a0df1c6d/
Changeset:   afd3a0df1c6d
Branch:      yt-3.0
User:        MatthewTurk
Date:        2014-09-03 18:25:31
Summary:     Sanitize the center in plot window.
Affected #:  1 file

diff -r 9ad4f38cdd2ca4fdacb05a2d7c6b149099bef9cb -r afd3a0df1c6d531e9726c34a4be8dbae420f43b8 yt/visualization/plot_window.py
--- a/yt/visualization/plot_window.py
+++ b/yt/visualization/plot_window.py
@@ -205,8 +205,9 @@
             ax = self.data_source.axis
             xax = self.ds.coordinates.x_axis[ax]
             yax = self.ds.coordinates.y_axis[ax]
-            center = [self.data_source.center[xax],
-                      self.data_source.center[yax]]
+            center, display_center = self.ds.coordinates.sanitize_center(
+                self.data_source.center, ax)
+            center = [display_center[xax], display_center[yax]]
             self.set_center(center)
         for field in self.data_source._determine_fields(self.frb.data.keys()):
             finfo = self.data_source.ds._get_field_info(*field)


https://bitbucket.org/yt_analysis/yt/commits/3298a420955c/
Changeset:   3298a420955c
Branch:      yt-3.0
User:        MatthewTurk
Date:        2014-09-03 20:41:59
Summary:     Adding a notebook about loading spherical data
Affected #:  2 files

diff -r afd3a0df1c6d531e9726c34a4be8dbae420f43b8 -r 3298a420955cdf6c2a0844fc0a34f4e48f9eb83e doc/source/examining/Loading_Spherical_Data.ipynb
--- /dev/null
+++ b/doc/source/examining/Loading_Spherical_Data.ipynb
@@ -0,0 +1,188 @@
+{
+ "metadata": {
+  "name": "",
+  "signature": "sha256:7db6f703bdb3d84c63175acb3d05909a52ad1ed7af635f7c71826e4b9bb547fb"
+ },
+ "nbformat": 3,
+ "nbformat_minor": 0,
+ "worksheets": [
+  {
+   "cells": [
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "# Loading Spherical Data\n",
+      "\n",
+      "With version 3.0 of yt, it has gained the ability to load data from non-Cartesian systems.  This support is still being extended, but here is an example of how to load spherical data from a regularly-spaced grid.  For irregularly spaced grids, a similar setup can be used, but the `load_hexahedral_mesh` method will have to be used instead.\n",
+      "\n",
+      "Note that in yt, \"spherical\" means that it is ordered $r$, $\\theta$, $\\phi$, where $\\theta$ is the declination from the azimuth (running from $0$ to $\\pi$ and $\\phi$ is the angle around the zenith (running from $0$ to $2\\pi$).\n",
+      "\n",
+      "We first start out by loading yt."
+     ]
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "import numpy as np\n",
+      "import yt"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "Now, we create a few derived fields.  The first three are just straight translations of the Cartesian coordinates, so that we can see where we are located in the data, and understand what we're seeing.  The final one is just a fun field that is some combination of the three coordinates, and will vary in all dimensions."
+     ]
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "@yt.derived_field(name = \"sphx\", units = \"cm\", take_log=False)\n",
+      "def sphx(field, data):\n",
+      "    return np.cos(data[\"phi\"]) * np.sin(data[\"theta\"])*data[\"r\"]\n",
+      "@yt.derived_field(name = \"sphy\", units = \"cm\", take_log=False)\n",
+      "def sphy(field, data):\n",
+      "    return np.sin(data[\"phi\"]) * np.sin(data[\"theta\"])*data[\"r\"]\n",
+      "@yt.derived_field(name = \"sphz\", units = \"cm\", take_log=False)\n",
+      "def sphz(field, data):\n",
+      "    return np.cos(data[\"theta\"])*data[\"r\"]\n",
+      "@yt.derived_field(name = \"funfield\", units=\"cm\", take_log=False)\n",
+      "def funfield(field, data):\n",
+      "    return (np.sin(data[\"phi\"])**2 + np.cos(data[\"theta\"])**2) * (1.0*data[\"r\"].uq+data[\"r\"])"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "## Loading Data\n",
+      "\n",
+      "Now we can actually load our data.  We use the `load_uniform_grid` function here.  Normally, the first argument would be a dictionary of field data, where the keys were the field names and the values the field data arrays.  Here, we're just going to look at derived fields, so we supply an empty one.\n",
+      "\n",
+      "The next few arguments are the number of dimensions, the bounds, and we then specify the geometry as spherical."
+     ]
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "ds = yt.load_uniform_grid({}, [128, 128, 128],\n",
+      "                          bbox=np.array([[0.0, 1.0], [0.0, np.pi], [0.0, 2*np.pi]]),\n",
+      "                          geometry=\"spherical\")"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "## Looking at Data\n",
+      "\n",
+      "Now we can take slices.  The first thing we will try is making a slice of data along the \"phi\" axis, here $\\pi/2$, which will be along the y axis in the positive direction.\n",
+      "\n",
+      "This is the manual way of creating a plot -- below, we'll use the standard, automatic ways.  Note that the coordinates run from $-r$ to $r$ along the $z$ axis and from $0$ to $r$ along the $R$ axis.  We use the capital $R$ to indicate that it's the $R$ along the $x-y$ plane."
+     ]
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "s = ds.slice(2, np.pi/2)\n",
+      "p = s.to_pw(\"funfield\", origin=\"native\")\n",
+      "p.set_zlim(\"all\", 0.0, 4.0)\n",
+      "p.show()"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "We can also slice along $r$.  For now, this creates a regular grid with *incorrect* units for phi and theta.  We are currently exploring two other options -- a simple aitoff projection, and fixing it to use the correct units as-is."
+     ]
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "s = yt.SlicePlot(ds, \"r\", \"funfield\")\n",
+      "s.set_zlim(\"all\", 0.0, 4.0)\n",
+      "s.show()"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "We can also slice at constant $\\theta$.  But, this is a weird thing!  We're slicing at a constant declination from the azimuth.  What this means is that when thought of in a Cartesian domain, this slice is actually a cone.  The axes have been labeled appropriately, to indicate that these are not exactly the $x$ and $y$ axes, but instead differ by a factor of $\\sin(\\theta))$."
+     ]
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "s = yt.SlicePlot(ds, \"theta\", \"funfield\")\n",
+      "s.set_zlim(\"all\", 0.0, 4.0)\n",
+      "s.show()"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "We've seen lots of the `funfield` plots, but we can also look at the Cartesian axes.  This next plot plots the Cartesian $x$, $y$ and $z$ values on a $\\theta$ slice.  Because we're not supplying an argument to the `center` parameter, yt will place it at the center of the $\\theta$ axis, which will be at $\\pi/2$, where it will be aligned with the $x-y$ plane.  The slight change in `sphz` results from the cells themselves migrating, and plotting the center of those cells."
+     ]
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "s = yt.SlicePlot(ds, \"theta\", [\"sphx\", \"sphy\", \"sphz\"])\n",
+      "s.show()"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "We can do the same with the $\\phi$ axis."
+     ]
+    },
+    {
+     "cell_type": "code",
+     "collapsed": true,
+     "input": [
+      "s = yt.SlicePlot(ds, \"phi\", [\"sphx\", \"sphy\", \"sphz\"])\n",
+      "s.show()"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    }
+   ],
+   "metadata": {}
+  }
+ ]
+}
\ No newline at end of file

diff -r afd3a0df1c6d531e9726c34a4be8dbae420f43b8 -r 3298a420955cdf6c2a0844fc0a34f4e48f9eb83e doc/source/examining/index.rst
--- a/doc/source/examining/index.rst
+++ b/doc/source/examining/index.rst
@@ -9,4 +9,5 @@
    loading_data
    generic_array_data
    generic_particle_data
+   spherical_data
    low_level_inspection


https://bitbucket.org/yt_analysis/yt/commits/9102dd58c138/
Changeset:   9102dd58c138
Branch:      yt-3.0
User:        MatthewTurk
Date:        2014-09-03 23:25:52
Summary:     Adding a missed file, expanding description of slicing.
Affected #:  2 files

diff -r 3298a420955cdf6c2a0844fc0a34f4e48f9eb83e -r 9102dd58c138e19c33d1cc81013291106b22a34b doc/source/examining/Loading_Spherical_Data.ipynb
--- a/doc/source/examining/Loading_Spherical_Data.ipynb
+++ b/doc/source/examining/Loading_Spherical_Data.ipynb
@@ -1,7 +1,7 @@
 {
  "metadata": {
   "name": "",
-  "signature": "sha256:7db6f703bdb3d84c63175acb3d05909a52ad1ed7af635f7c71826e4b9bb547fb"
+  "signature": "sha256:88ed88ce8d8f4a359052f287aea17a7cbed435ff960e195097b440191ce6c2ab"
  },
  "nbformat": 3,
  "nbformat_minor": 0,
@@ -89,7 +89,7 @@
      "source": [
       "## Looking at Data\n",
       "\n",
-      "Now we can take slices.  The first thing we will try is making a slice of data along the \"phi\" axis, here $\\pi/2$, which will be along the y axis in the positive direction.\n",
+      "Now we can take slices.  The first thing we will try is making a slice of data along the \"phi\" axis, here $\\pi/2$, which will be along the y axis in the positive direction.  We use the `.slice` attribute, which creates a slice, and then we convert this into a plot window.  Note that here 2 is used to indicate the third axis (0-indexed) which for spherical data is $\\phi$.\n",
       "\n",
       "This is the manual way of creating a plot -- below, we'll use the standard, automatic ways.  Note that the coordinates run from $-r$ to $r$ along the $z$ axis and from $0$ to $r$ along the $R$ axis.  We use the capital $R$ to indicate that it's the $R$ along the $x-y$ plane."
      ]

diff -r 3298a420955cdf6c2a0844fc0a34f4e48f9eb83e -r 9102dd58c138e19c33d1cc81013291106b22a34b doc/source/examining/spherical_data.rst
--- /dev/null
+++ b/doc/source/examining/spherical_data.rst
@@ -0,0 +1,6 @@
+.. _loading-spherical-data:
+
+Loading Spherical Data
+======================
+
+.. notebook:: Loading_Spherical_Data.ipynb


https://bitbucket.org/yt_analysis/yt/commits/b5034a8695c3/
Changeset:   b5034a8695c3
Branch:      yt-3.0
User:        MatthewTurk
Date:        2014-09-07 23:20:30
Summary:     Stopgap for cylindrical coordinates.
Affected #:  1 file

diff -r 9102dd58c138e19c33d1cc81013291106b22a34b -r b5034a8695c3d58a95eb577bbb6a67e7c3d2661d yt/visualization/plot_window.py
--- a/yt/visualization/plot_window.py
+++ b/yt/visualization/plot_window.py
@@ -84,6 +84,7 @@
         # Set our default width to be the full domain
         width = [ds.domain_right_edge[0]*2.0, ds.domain_right_edge[0]*2.0]
         center = ds.arr([0.0, 0.0, 0.0], "code_length")
+        display_center = center.copy()
     elif ds.geometry == "geographic":
         c_r = ((ds.domain_right_edge + ds.domain_left_edge)/2.0)[2]
         center = ds.arr([0.0, 0.0, c_r], "code_length")


https://bitbucket.org/yt_analysis/yt/commits/95a477fcf9ac/
Changeset:   95a477fcf9ac
Branch:      yt-3.0
User:        MatthewTurk
Date:        2014-09-16 13:12:15
Summary:     Merging from upstream
Affected #:  67 files

diff -r b5034a8695c3d58a95eb577bbb6a67e7c3d2661d -r 95a477fcf9ac3d51b348404756f07ef466bb0998 doc/source/analyzing/analysis_modules/halo_catalogs.rst
--- a/doc/source/analyzing/analysis_modules/halo_catalogs.rst
+++ b/doc/source/analyzing/analysis_modules/halo_catalogs.rst
@@ -129,7 +129,14 @@
 are center_of_mass and bulk_velocity. Their definitions are available in 
 ``yt/analysis_modules/halo_analysis/halo_quantities.py``. If you think that 
 your quantity may be of use to the general community, add it to 
-``halo_quantities.py`` and issue a pull request.
+``halo_quantities.py`` and issue a pull request.  Default halo quantities are:
+
+* ``particle_identifier`` -- Halo ID (e.g. 0 to N)
+* ``particle_mass`` -- Mass of halo
+* ``particle_position_x`` -- Location of halo
+* ``particle_position_y`` -- Location of halo
+* ``particle_position_z`` -- Location of halo
+* ``virial_radius`` -- Virial radius of halo
 
 An example of adding a quantity:
 

diff -r b5034a8695c3d58a95eb577bbb6a67e7c3d2661d -r 95a477fcf9ac3d51b348404756f07ef466bb0998 doc/source/analyzing/analysis_modules/halo_finders.rst
--- a/doc/source/analyzing/analysis_modules/halo_finders.rst
+++ b/doc/source/analyzing/analysis_modules/halo_finders.rst
@@ -75,7 +75,8 @@
   mass. In simulations where the highest-resolution particles all have the 
   same mass (ie: zoom-in grid based simulations), one can set up a particle
   filter to select the lowest mass particles and perform the halo finding
-  only on those.
+  only on those.  See the this cookbook recipe for an example: 
+  :ref:`cookbook-rockstar-nested-grid`.
 
 To run the Rockstar Halo finding, you must launch python with MPI and 
 parallelization enabled. While Rockstar itself does not require MPI to run, 

diff -r b5034a8695c3d58a95eb577bbb6a67e7c3d2661d -r 95a477fcf9ac3d51b348404756f07ef466bb0998 doc/source/cookbook/cosmological_analysis.rst
--- a/doc/source/cookbook/cosmological_analysis.rst
+++ b/doc/source/cookbook/cosmological_analysis.rst
@@ -14,6 +14,22 @@
 
 .. yt_cookbook:: halo_plotting.py
 
+.. _cookbook-rockstar-nested-grid:
+
+Running Rockstar to Find Halos on Multi-Resolution-Particle Datasets
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The version of Rockstar installed with yt does not have the capability
+to work on datasets with particles of different masses.  Unfortunately,
+many simulations possess particles of different masses, notably cosmological 
+zoom datasets.  This recipe uses Rockstar in two different ways to generate a 
+HaloCatalog from the highest resolution dark matter particles (the ones 
+inside the zoom region).  It then overlays some of those halos on a projection
+as a demonstration.  See :ref:`halo-analysis` and :ref:`annotate-halos` for
+more information.
+
+.. yt_cookbook:: rockstar_nest.py
+
 .. _cookbook-halo_finding:
 
 Halo Profiling and Custom Analysis

diff -r b5034a8695c3d58a95eb577bbb6a67e7c3d2661d -r 95a477fcf9ac3d51b348404756f07ef466bb0998 doc/source/cookbook/power_spectrum_example.py
--- a/doc/source/cookbook/power_spectrum_example.py
+++ b/doc/source/cookbook/power_spectrum_example.py
@@ -57,7 +57,7 @@
     
     # physical limits to the wavenumbers
     kmin = np.min(1.0/L)
-    kmax = np.max(0.5*dims/L)
+    kmax = np.min(0.5*dims/L)
     
     kbins = np.arange(kmin, kmax, kmin)
     N = len(kbins)
@@ -112,7 +112,6 @@
     return np.abs(ru)**2
 
 
-if __name__ == "__main__":
 
-    ds = yt.load("maestro_xrb_lores_23437")
-    doit(ds)
+ds = yt.load("maestro_xrb_lores_23437")
+doit(ds)

diff -r b5034a8695c3d58a95eb577bbb6a67e7c3d2661d -r 95a477fcf9ac3d51b348404756f07ef466bb0998 doc/source/cookbook/rockstar_nest.py
--- /dev/null
+++ b/doc/source/cookbook/rockstar_nest.py
@@ -0,0 +1,74 @@
+# You must run this job in parallel.  
+# There are several mpi flags which can be useful in order for it to work OK.
+# It requires at least 3 processors in order to run because of the way in which 
+# rockstar divides up the work.  Make sure you have mpi4py installed as per 
+# http://yt-project.org/docs/dev/analyzing/parallel_computation.html#setting-up-parallel-yt
+    
+# Usage: mpirun -np <num_procs> --mca btl ^openib python this_script.py
+
+import yt
+from yt.analysis_modules.halo_analysis.halo_catalog import HaloCatalog
+from yt.data_objects.particle_filters import add_particle_filter
+from yt.analysis_modules.halo_finding.rockstar.api import RockstarHaloFinder
+yt.enable_parallelism() # rockstar halofinding requires parallelism
+
+# Create a dark matter particle filter
+# This will be code dependent, but this function here is true for enzo
+
+def DarkMatter(pfilter, data):
+    filter = data[("all", "particle_type")] == 1 # DM = 1, Stars = 2
+    return filter
+
+add_particle_filter("dark_matter", function=DarkMatter, filtered_type='all', \
+                    requires=["particle_type"])
+
+# First, we make sure that this script is being run using mpirun with
+# at least 3 processors as indicated in the comments above.
+assert(yt.communication_system.communicators[-1].size >= 3)
+
+# Load the dataset and apply dark matter filter
+fn = "Enzo_64/DD0043/data0043"
+ds = yt.load(fn)
+ds.add_particle_filter('dark_matter')
+
+# Determine highest resolution DM particle mass in sim by looking
+# at the extrema of the dark_matter particle_mass field.
+ad = ds.all_data()
+min_dm_mass = ad.quantities.extrema(('dark_matter','particle_mass'))[0]
+
+# Define a new particle filter to isolate all highest resolution DM particles
+# and apply it to dataset
+def MaxResDarkMatter(pfilter, data):
+    return data["particle_mass"] <= 1.01 * min_dm_mass
+
+add_particle_filter("max_res_dark_matter", function=MaxResDarkMatter, \
+                    filtered_type='dark_matter', requires=["particle_mass"])
+ds.add_particle_filter('max_res_dark_matter')
+
+# If desired, we can see the total number of DM and High-res DM particles
+#if yt.is_root():
+#    print "Simulation has %d DM particles." % ad['dark_matter','particle_type'].shape
+#    print "Simulation has %d Highest Res DM particles." % ad['max_res_dark_matter', 'particle_type'].shape
+
+# Run the halo catalog on the dataset only on the highest resolution dark matter 
+# particles
+hc = HaloCatalog(data_ds=ds, finder_method='rockstar', \
+                 finder_kwargs={'dm_only':True, 'particle_type':'max_res_dark_matter'})
+hc.create()
+
+# Or alternatively, just run the RockstarHaloFinder and later import the 
+# output file as necessary.  You can skip this step if you've already run it
+# once, but be careful since subsequent halo finds will overwrite this data.
+#rhf = RockstarHaloFinder(ds, particle_type="max_res_dark_matter")
+#rhf.run()
+# Load the halo list from a rockstar output for this dataset
+# Create a projection with the halos overplot on top
+#halos = yt.load('rockstar_halos/halos_0.0.bin')
+#hc = HaloCatalog(halos_ds=halos)
+#hc.load()
+
+# Regardless of your method of creating the halo catalog, use it to overplot the
+# halos on a projection.
+p = yt.ProjectionPlot(ds, "x", "density")
+p.annotate_halos(hc, annotate_field = 'particle_identifier', width=(10,'Mpc'), factor=2)
+p.save()

diff -r b5034a8695c3d58a95eb577bbb6a67e7c3d2661d -r 95a477fcf9ac3d51b348404756f07ef466bb0998 doc/source/cookbook/tests/test_cookbook.py
--- /dev/null
+++ b/doc/source/cookbook/tests/test_cookbook.py
@@ -0,0 +1,29 @@
+# -*- coding: utf-8 -*-
+"""Module for cookbook testing
+
+
+This test should be run from main yt directory.
+
+Example:
+
+      $ sed -e '/where/d' -i nose.cfg setup.cfg
+      $ nosetests doc/source/cookbook/tests/test_cookbook.py -P -v
+"""
+import glob
+import os
+import sys
+
+sys.path.append(os.path.join(os.getcwd(), "doc/source/cookbook"))
+
+
+def test_recipe():
+    '''Dummy test grabbing all cookbook's recipes'''
+    for fname in glob.glob("doc/source/cookbook/*.py"):
+        module_name = os.path.splitext(os.path.basename(fname))[0]
+        yield check_recipe, module_name
+
+
+def check_recipe(module_name):
+    '''Run single recipe'''
+    __import__(module_name)
+    assert True

diff -r b5034a8695c3d58a95eb577bbb6a67e7c3d2661d -r 95a477fcf9ac3d51b348404756f07ef466bb0998 doc/source/cookbook/thin_slice_projection.py
--- a/doc/source/cookbook/thin_slice_projection.py
+++ b/doc/source/cookbook/thin_slice_projection.py
@@ -4,7 +4,7 @@
 ds = yt.load("Enzo_64/DD0030/data0030")
 
 # Make a projection that is the full width of the domain,
-# but only 10 Mpc in depth.  This is done by creating a
+# but only 5 Mpc in depth.  This is done by creating a
 # region object with this exact geometry and providing it
 # as a data_source for the projection.
 
@@ -17,12 +17,12 @@
 right_corner = ds.domain_right_edge
 
 # Now adjust the size of the region along the line of sight (x axis).
-depth = ds.quan(10.0,'Mpc')
+depth = ds.quan(5.0,'Mpc')
 left_corner[0] = center[0] - 0.5 * depth
-left_corner[0] = center[0] + 0.5 * depth
+right_corner[0] = center[0] + 0.5 * depth
 
 # Create the region
-region = ds.region(center, left_corner, right_corner)
+region = ds.box(left_corner, right_corner)
 
 # Create a density projection and supply the region we have just created.
 # Only cells within the region will be included in the projection.

diff -r b5034a8695c3d58a95eb577bbb6a67e7c3d2661d -r 95a477fcf9ac3d51b348404756f07ef466bb0998 doc/source/examining/Loading_Generic_Particle_Data.ipynb
--- a/doc/source/examining/Loading_Generic_Particle_Data.ipynb
+++ b/doc/source/examining/Loading_Generic_Particle_Data.ipynb
@@ -74,7 +74,7 @@
       "import yt\n",
       "from yt.units import parsec, Msun\n",
       "\n",
-      "bbox = 1.1*np.array([[min(ppx), max(ppx)], [min(ppy), max(ppy)], [min(ppy), max(ppy)]])\n",
+      "bbox = 1.1*np.array([[min(ppx), max(ppx)], [min(ppy), max(ppy)], [min(ppz), max(ppz)]])\n",
       "\n",
       "ds = yt.load_particles(data, length_unit=parsec, mass_unit=1e8*Msun, n_ref=256, bbox=bbox)"
      ],

diff -r b5034a8695c3d58a95eb577bbb6a67e7c3d2661d -r 95a477fcf9ac3d51b348404756f07ef466bb0998 doc/source/visualizing/_cb_docstrings.inc
--- a/doc/source/visualizing/_cb_docstrings.inc
+++ b/doc/source/visualizing/_cb_docstrings.inc
@@ -151,19 +151,28 @@
 Overplot Halo Annotations
 ~~~~~~~~~~~~~~~~~~~~~~~~~
 
-.. function:: annotate_halos(self, halo_catalog, col='white', alpha=1, \
-                             width=None):
+.. function:: annotate_halos(self, halo_catalog, circle_kwargs=None, width=None, \ 
+                             annotate_field=False, font_kwargs=None, factor=1.0):
 
    (This is a proxy for
    :class:`~yt.visualization.plot_modifications.HaloCatalogCallback`.)
 
    Accepts a :class:`~yt.analysis_modules.halo_analysis.halo_catalog.HaloCatalog` 
-   and plots a circle at the location of each
-   halo with the radius of the circle corresponding to the virial radius of the
-   halo.  If ``width`` is set to None (default) all halos are plotted.
-   Otherwise, only halos that fall within a slab with width ``width`` centered
-   on the center of the plot data. The color and transparency of the circles can
-   be controlled with ``col`` and ``alpha`` respectively.
+   and plots a circle at the location of each halo with the radius of the 
+   circle corresponding to the virial radius of the halo.  If ``width`` is set 
+   to None (default) all halos are plotted, otherwise it accepts a tuple in 
+   the form (1.0, ‘Mpc’) to only display halos that fall within a slab with 
+   width ``width`` centered on the center of the plot data.  The appearance of 
+   the circles can be changed with the circle_kwargs dictionary, which is 
+   supplied to the Matplotlib patch Circle.  One can label each of the halos 
+   with the annotate_field, which accepts a field contained in the halo catalog 
+   to add text to the plot near the halo (example: annotate_field = 
+   ``particle_mass`` will write the halo mass next to each halo, whereas 
+   ``particle_identifier`` shows the halo number).  font_kwargs contains the 
+   arguments controlling the text appearance of the annotated field.
+   Factor is the number the virial radius is multiplied by for plotting the 
+   circles. Ex: factor = 2.0 will plot circles with twice the radius of each 
+   halo virial radius.
 
 .. python-script::
 
@@ -177,7 +186,7 @@
    hc.create()
 
    prj = yt.ProjectionPlot(data_ds, 'z', 'density')
-   prj.annotate_halos(hc)
+   prj.annotate_halos(hc, annotate_field=particle_identifier)
    prj.save()
 
 Overplot a Straight Line

diff -r b5034a8695c3d58a95eb577bbb6a67e7c3d2661d -r 95a477fcf9ac3d51b348404756f07ef466bb0998 doc/source/visualizing/colormaps/index.rst
--- a/doc/source/visualizing/colormaps/index.rst
+++ b/doc/source/visualizing/colormaps/index.rst
@@ -6,14 +6,20 @@
 There are several colormaps available for yt.  yt includes all of the 
 matplotlib colormaps as well for nearly all functions.  Individual visualization
 functions usually allow you to specify a colormap with the ``cmap`` flag.
-There are a small number of functions (mostly contained in the image_writer 
-module; e.g. write_bitmap, write_image, write_projection, etc.), which do 
-not load the matplotlib infrastructure and can only access the colormaps 
-native to yt.  
 
-Here is a chart of all of the colormaps available.  In addition to each 
-colormap displayed here, you can access its "reverse" by simply appending a 
-``"_r"`` to the end of the colormap name.
+If you have installed brewer2mpl (``pip install brewer2mpl`` or see
+`https://github.com/jiffyclub/brewer2mpl
+<https://github.com/jiffyclub/brewer2mpl>`_), you can also access the discrete
+colormaps available on `http://colorbrewer2.org <http://colorbrewer2.org>`_.
+Instead of supplying the colormap name, specify a tuple of the form (name, type,
+number), for example ``('RdBu', 'Diverging', 9)``.  These discrete colormaps will
+not be interpolated, and can be useful for creating
+colorblind/printer/grayscale-friendly plots. For more information, visit
+`http://colorbrewer2.org <http://colorbrewer2.org>`_.
+
+Here is a chart of all of the yt and matplotlib colormaps available.  In
+addition to each colormap displayed here, you can access its "reverse" by simply
+appending a ``"_r"`` to the end of the colormap name.
 
 All Colormaps (including matplotlib)
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~

diff -r b5034a8695c3d58a95eb577bbb6a67e7c3d2661d -r 95a477fcf9ac3d51b348404756f07ef466bb0998 scripts/iyt
--- a/scripts/iyt
+++ b/scripts/iyt
@@ -90,6 +90,7 @@
     kwargs = dict()
 
 ip.ex("from yt.mods import *")
+ip.ex("import yt")
 
 # Now we add some tab completers, in the vein of:
 # http://pymel.googlecode.com/svn/trunk/tools/ipymel.py

diff -r b5034a8695c3d58a95eb577bbb6a67e7c3d2661d -r 95a477fcf9ac3d51b348404756f07ef466bb0998 yt/analysis_modules/absorption_spectrum/absorption_line.py
--- a/yt/analysis_modules/absorption_spectrum/absorption_line.py
+++ b/yt/analysis_modules/absorption_spectrum/absorption_line.py
@@ -195,7 +195,6 @@
     ## tau_0
     tau_X = np.sqrt(np.pi) * e**2 / (me * ccgs) * \
         column_density * fval / vdop
-    tau1 = tau_X * lam1cgs
     tau0 = tau_X * lam0cgs
 
     # dimensionless frequency offset in units of doppler freq

diff -r b5034a8695c3d58a95eb577bbb6a67e7c3d2661d -r 95a477fcf9ac3d51b348404756f07ef466bb0998 yt/analysis_modules/cosmological_observation/light_cone/light_cone.py
--- a/yt/analysis_modules/cosmological_observation/light_cone/light_cone.py
+++ b/yt/analysis_modules/cosmological_observation/light_cone/light_cone.py
@@ -328,7 +328,7 @@
                                                         output["redshift"])
                 proper_box_size = self.simulation.box_size / \
                   (1.0 + output["redshift"])
-                pixel_xarea = (proper_box_size.in_cgs() / pixels)**2 #in proper cm^2
+                pixel_area = (proper_box_size.in_cgs() / pixels)**2 #in proper cm^2
                 factor = pixel_area / (4.0 * np.pi * dL.in_cgs()**2)
                 mylog.info("Distance to slice = %s" % dL)
                 frb[field] *= factor #in erg/s/cm^2/Hz on observer"s image plane.

diff -r b5034a8695c3d58a95eb577bbb6a67e7c3d2661d -r 95a477fcf9ac3d51b348404756f07ef466bb0998 yt/analysis_modules/halo_analysis/fields.py
--- a/yt/analysis_modules/halo_analysis/fields.py
+++ b/yt/analysis_modules/halo_analysis/fields.py
@@ -30,7 +30,7 @@
         sl_right = slice(2, None, None)
         div_fac = 2.0
     else:
-        sl_left, sl_right, div_face = slice_info
+        sl_left, sl_right, div_fac = slice_info
 
     def _virial_radius(field, data):
         virial_radius = data.get_field_parameter("virial_radius")

diff -r b5034a8695c3d58a95eb577bbb6a67e7c3d2661d -r 95a477fcf9ac3d51b348404756f07ef466bb0998 yt/analysis_modules/halo_analysis/halo_callbacks.py
--- a/yt/analysis_modules/halo_analysis/halo_callbacks.py
+++ b/yt/analysis_modules/halo_analysis/halo_callbacks.py
@@ -80,7 +80,6 @@
     """
 
     dds = halo.halo_catalog.data_ds
-    hds = halo.halo_catalog.halos_ds
     center = dds.arr([halo.quantities["particle_position_%s" % axis] \
                       for axis in "xyz"])
     radius = factor * halo.quantities[radius_field]

diff -r b5034a8695c3d58a95eb577bbb6a67e7c3d2661d -r 95a477fcf9ac3d51b348404756f07ef466bb0998 yt/analysis_modules/halo_mass_function/halo_mass_function.py
--- a/yt/analysis_modules/halo_mass_function/halo_mass_function.py
+++ b/yt/analysis_modules/halo_mass_function/halo_mass_function.py
@@ -788,7 +788,7 @@
     
         # Now compute the CDM+HDM+baryon transfer functions
         tf_cb = self.tf_master*self.growth_cb/self.growth_k0;
-        tf_cbnu = self.tf_master*self.growth_cbnu/self.growth_k0;
+        #tf_cbnu = self.tf_master*self.growth_cbnu/self.growth_k0;
         return tf_cb
 
 
@@ -832,7 +832,6 @@
     area1 = np.sum(areas)
     # Now we refine until the error is smaller than *error*.
     diff = area1 - area0
-    area_final = area1
     area_last = area1
     one_pow = 3
     while diff > error:

diff -r b5034a8695c3d58a95eb577bbb6a67e7c3d2661d -r 95a477fcf9ac3d51b348404756f07ef466bb0998 yt/analysis_modules/level_sets/contour_finder.py
--- a/yt/analysis_modules/level_sets/contour_finder.py
+++ b/yt/analysis_modules/level_sets/contour_finder.py
@@ -32,7 +32,6 @@
     contours = {}
     node_ids = []
     DLE = data_source.ds.domain_left_edge
-    total_vol = None
     selector = getattr(data_source, "base_object", data_source).selector
     masks = dict((g.id, m) for g, m in data_source.blocks)
     for (g, node, (sl, dims, gi)) in data_source.tiles.slice_traverse():

diff -r b5034a8695c3d58a95eb577bbb6a67e7c3d2661d -r 95a477fcf9ac3d51b348404756f07ef466bb0998 yt/analysis_modules/photon_simulator/photon_models.py
--- a/yt/analysis_modules/photon_simulator/photon_models.py
+++ b/yt/analysis_modules/photon_simulator/photon_models.py
@@ -128,7 +128,6 @@
         energy = self.spectral_model.ebins
     
         cell_em = EM[idxs]*vol_scale
-        cell_vol = vol[idxs]*vol_scale
     
         number_of_photons = np.zeros(dshape, dtype='uint64')
         energies = []
@@ -139,7 +138,6 @@
 
         for i, ikT in enumerate(kT_idxs):
 
-            ncells = int(bcounts[i])
             ibegin = bcell[i]
             iend = ecell[i]
             kT = kT_bins[ikT] + 0.5*dkT

diff -r b5034a8695c3d58a95eb577bbb6a67e7c3d2661d -r 95a477fcf9ac3d51b348404756f07ef466bb0998 yt/analysis_modules/photon_simulator/photon_simulator.py
--- a/yt/analysis_modules/photon_simulator/photon_simulator.py
+++ b/yt/analysis_modules/photon_simulator/photon_simulator.py
@@ -490,7 +490,6 @@
         z_hat = orient.unit_vectors[2]
 
         n_ph = self.photons["NumberOfPhotons"]
-        num_cells = len(n_ph)
         n_ph_tot = n_ph.sum()
         
         eff_area = None
@@ -667,7 +666,6 @@
         tblhdu = hdulist["MATRIX"]
         n_de = len(tblhdu.data["ENERG_LO"])
         mylog.info("Number of energy bins in RMF: %d" % (n_de))
-        de = tblhdu.data["ENERG_HI"] - tblhdu.data["ENERG_LO"]
         mylog.info("Energy limits: %g %g" % (min(tblhdu.data["ENERG_LO"]),
                                              max(tblhdu.data["ENERG_HI"])))
 
@@ -682,7 +680,6 @@
         phYY = events["ypix"][eidxs]
 
         detectedChannels = []
-        pindex = 0
 
         # run through all photon energies and find which bin they go in
         k = 0

diff -r b5034a8695c3d58a95eb577bbb6a67e7c3d2661d -r 95a477fcf9ac3d51b348404756f07ef466bb0998 yt/analysis_modules/sunrise_export/sunrise_exporter.py
--- a/yt/analysis_modules/sunrise_export/sunrise_exporter.py
+++ b/yt/analysis_modules/sunrise_export/sunrise_exporter.py
@@ -128,7 +128,6 @@
     if fni.endswith('.fits'):
         fni = fni.replace('.fits','')
 
-    ndomains_finished = 0
     for (num_halos, domain, halos) in domains_list:
         dle,dre = domain
         print 'exporting: '
@@ -154,7 +153,6 @@
             fh.write("%6.6e \n"%(halo.Rvir*ds['kpc']))
         fh.close()
         export_to_sunrise(ds, fnf, star_particle_type, dle*1.0/dn, dre*1.0/dn)
-        ndomains_finished +=1
 
 def domains_from_halos(ds,halo_list,frvir=0.15):
     domains = {}
@@ -172,8 +170,6 @@
     domains_list = [(len(v),k,v) for k,v in domains.iteritems()]
     domains_list.sort() 
     domains_list.reverse() #we want the most populated domains first
-    domains_limits = [d[1] for d in domains_list]
-    domains_halos  = [d[2] for d in domains_list]
     return domains_list
 
 def prepare_octree(ds,ile,start_level=0,debug=True,dd=None,center=None):
@@ -245,10 +241,6 @@
     hs       = hilbert_state()
     start_time = time.time()
     if debug:
-        if center is not None: 
-            c = center*ds['kpc']
-        else:
-            c = ile*1.0/ds.domain_dimensions*ds['kpc']
         printing = lambda x: print_oct(x)
     else:
         printing = None
@@ -332,7 +324,7 @@
         #then translate onto the subgrid integer index 
         parent_fle  = grid.left_edges + cell_index*grid.dx
         subgrid_ile = np.floor((parent_fle - subgrid.left_edges)/subgrid.dx)
-        for i, (vertex,hilbert_child) in enumerate(hilbert):
+        for (vertex, hilbert_child) in hilbert:
             #vertex is a combination of three 0s and 1s to 
             #denote each of the 8 octs
             if level < 0:

diff -r b5034a8695c3d58a95eb577bbb6a67e7c3d2661d -r 95a477fcf9ac3d51b348404756f07ef466bb0998 yt/analysis_modules/sunyaev_zeldovich/tests/test_projection.py
--- a/yt/analysis_modules/sunyaev_zeldovich/tests/test_projection.py
+++ b/yt/analysis_modules/sunyaev_zeldovich/tests/test_projection.py
@@ -89,8 +89,6 @@
     L = 2 * R * cm_per_kpc
     bbox = np.array([[-0.5,0.5],[-0.5,0.5],[-0.5,0.5]]) * L
 
-    dl = L/nz
-
     ds = load_uniform_grid(data, ddims, length_unit='cm', bbox=bbox)
     ds.index
 

diff -r b5034a8695c3d58a95eb577bbb6a67e7c3d2661d -r 95a477fcf9ac3d51b348404756f07ef466bb0998 yt/data_objects/data_containers.py
--- a/yt/data_objects/data_containers.py
+++ b/yt/data_objects/data_containers.py
@@ -418,7 +418,6 @@
         otherwise Glue will be started.
         """
         from glue.core import DataCollection, Data
-        from glue.core.coordinates import coordinates_from_header
         from glue.qt.glue_application import GlueApplication
         
         gdata = Data(label=label)
@@ -494,6 +493,18 @@
                     ftype = self._current_fluid_type
                     if (ftype, fname) not in self.ds.field_info:
                         ftype = self.ds._last_freq[0]
+
+                # really ugly check to ensure that this field really does exist somewhere,
+                # in some naming convention, before returning it as a possible field type
+                if (ftype,fname) not in self.ds.field_list and \
+                        fname not in self.ds.field_list and \
+                        (ftype,fname) not in self.ds.derived_field_list and \
+                        fname not in self.ds.derived_field_list and \
+                        (ftype,fname) not in self._container_fields:
+                    raise YTFieldNotFound((ftype,fname),self.ds)
+
+            # these tests are really insufficient as a field type may be valid, and the
+            # field name may be valid, but not the combination (field type, field name)
             if finfo.particle_type and ftype not in self.ds.particle_types:
                 raise YTFieldTypeNotFound(ftype)
             elif not finfo.particle_type and ftype not in self.ds.fluid_types:
@@ -621,7 +632,7 @@
                 fields_to_generate.append(field)
                 continue
             fields_to_get.append(field)
-        if len(fields_to_get) == 0 and fields_to_generate == 0:
+        if len(fields_to_get) == 0 and len(fields_to_generate) == 0:
             return
         elif self._locked == True:
             raise GenerationInProgress(fields)

diff -r b5034a8695c3d58a95eb577bbb6a67e7c3d2661d -r 95a477fcf9ac3d51b348404756f07ef466bb0998 yt/data_objects/static_output.py
--- a/yt/data_objects/static_output.py
+++ b/yt/data_objects/static_output.py
@@ -455,8 +455,6 @@
             self._last_freq = field
             self._last_finfo = self.field_info[(ftype, fname)]
             return self._last_finfo
-        if fname == self._last_freq[1]:
-            return self._last_finfo
         if fname in self.field_info:
             # Sometimes, if guessing_type == True, this will be switched for
             # the type of field it is.  So we look at the field type and

diff -r b5034a8695c3d58a95eb577bbb6a67e7c3d2661d -r 95a477fcf9ac3d51b348404756f07ef466bb0998 yt/data_objects/tests/test_spheres.py
--- a/yt/data_objects/tests/test_spheres.py
+++ b/yt/data_objects/tests/test_spheres.py
@@ -6,10 +6,11 @@
     from yt.config import ytcfg
     ytcfg["yt","__withintesting"] = "True"
 
+_fields_to_compare = ("spherical_r", "cylindrical_r",
+                      "spherical_theta", "cylindrical_theta",
+                      "spherical_phi", "cylindrical_z")
+
 def test_domain_sphere():
-    ds = fake_random_ds(16, fields = ("density"))
-    sp = ds.sphere(ds.domain_center, ds.domain_width[0])
-
     # Now we test that we can get different radial velocities based on field
     # parameters.
 
@@ -51,3 +52,12 @@
     yield assert_equal, np.any(rp0["radial_velocity"][rp0.used] ==
                                rp1["radial_velocity"][rp1.used]), \
                                False
+
+    ref_sp = ds.sphere("c", 0.25)
+    for f in _fields_to_compare:
+        ref_sp[f].sort()
+    for center in periodicity_cases(ds):
+        sp = ds.sphere(center, 0.25)
+        for f in _fields_to_compare:
+            sp[f].sort()
+            yield assert_equal, sp[f], ref_sp[f]

diff -r b5034a8695c3d58a95eb577bbb6a67e7c3d2661d -r 95a477fcf9ac3d51b348404756f07ef466bb0998 yt/fields/particle_fields.py
--- a/yt/fields/particle_fields.py
+++ b/yt/fields/particle_fields.py
@@ -322,10 +322,6 @@
     create_magnitude_field(registry, "particle_specific_angular_momentum",
                            "cm**2/s", ftype=ptype, particle_type=True)
     
-    def _particle_angular_momentum(field, data):
-        return data[ptype, "particle_mass"] \
-             * data[ptype, "particle_specific_angular_momentum"]
-
     def _particle_angular_momentum_x(field, data):
         return data[ptype, "particle_mass"] * \
                data[ptype, "particle_specific_angular_momentum_x"]
@@ -350,6 +346,15 @@
              units="g*cm**2/s", particle_type=True,
              validators=[ValidateParameter('center')])
 
+    def _particle_angular_momentum(field, data):
+        return data[ptype, "particle_mass"] \
+            * data[ptype, "particle_specific_angular_momentum"]
+    registry.add_field((ptype, "particle_angular_momentum"),
+              function=_particle_angular_momentum,
+              particle_type=True,
+              units="g*cm**2/s",
+              validators=[ValidateParameter("center")])
+
     create_magnitude_field(registry, "particle_angular_momentum",
                            "g*cm**2/s", ftype=ptype, particle_type=True)
     

diff -r b5034a8695c3d58a95eb577bbb6a67e7c3d2661d -r 95a477fcf9ac3d51b348404756f07ef466bb0998 yt/fields/vector_operations.py
--- a/yt/fields/vector_operations.py
+++ b/yt/fields/vector_operations.py
@@ -131,7 +131,7 @@
     registry.add_field((ftype, "radial_%s" % basename),
                        function = _radial, units = field_units)
     registry.add_field((ftype, "radial_%s_absolute" % basename),
-                       function = _radial, units = field_units)
+                       function = _radial_absolute, units = field_units)
     registry.add_field((ftype, "tangential_%s" % basename),
                        function=_tangential, units = field_units)
 

diff -r b5034a8695c3d58a95eb577bbb6a67e7c3d2661d -r 95a477fcf9ac3d51b348404756f07ef466bb0998 yt/frontends/art/tests/test_outputs.py
--- a/yt/frontends/art/tests/test_outputs.py
+++ b/yt/frontends/art/tests/test_outputs.py
@@ -14,11 +14,13 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-from yt.testing import *
+from yt.testing import \
+    requires_file, \
+    assert_equal
 from yt.utilities.answer_testing.framework import \
     requires_ds, \
-    small_patch_amr, \
     big_patch_amr, \
+    PixelizedProjectionValuesTest, \
     data_dir_load
 from yt.frontends.art.api import ARTDataset
 
@@ -41,3 +43,8 @@
                     yield PixelizedProjectionValuesTest(
                         d9p, axis, field, weight_field,
                         dobj_name)
+
+
+ at requires_file(d9p)
+def test_ARTDataset():
+    assert isinstance(data_dir_load(d9p), ARTDataset)

diff -r b5034a8695c3d58a95eb577bbb6a67e7c3d2661d -r 95a477fcf9ac3d51b348404756f07ef466bb0998 yt/frontends/artio/tests/test_outputs.py
--- a/yt/frontends/artio/tests/test_outputs.py
+++ b/yt/frontends/artio/tests/test_outputs.py
@@ -1,5 +1,5 @@
 """
-ARTIO frontend tests 
+ARTIO frontend tests
 
 
 
@@ -24,7 +24,7 @@
 from yt.frontends.artio.api import ARTIODataset
 
 _fields = ("temperature", "density", "velocity_magnitude",
-           ("deposit", "all_density"), ("deposit", "all_count")) 
+           ("deposit", "all_density"), ("deposit", "all_count"))
 
 sizmbhloz = "sizmbhloz-clref04SNth-rs9_a0.9011/sizmbhloz-clref04SNth-rs9_a0.9011.art"
 @requires_ds(sizmbhloz)
@@ -45,3 +45,8 @@
         s1 = dobj["ones"].sum()
         s2 = sum(mask.sum() for block, mask in dobj.blocks)
         yield assert_equal, s1, s2
+
+
+ at requires_file(sizmbhloz)
+def test_ARTIODataset():
+    assert isinstance(data_dir_load(sizmbhloz), ARTIODataset)

diff -r b5034a8695c3d58a95eb577bbb6a67e7c3d2661d -r 95a477fcf9ac3d51b348404756f07ef466bb0998 yt/frontends/athena/tests/test_outputs.py
--- a/yt/frontends/athena/tests/test_outputs.py
+++ b/yt/frontends/athena/tests/test_outputs.py
@@ -57,3 +57,8 @@
     for test in small_patch_amr(stripping, _fields_stripping):
         test_stripping.__name__ = test.description
         yield test
+
+
+ at requires_file(cloud)
+def test_AthenaDataset():
+    assert isinstance(data_dir_load(cloud), AthenaDataset)

diff -r b5034a8695c3d58a95eb577bbb6a67e7c3d2661d -r 95a477fcf9ac3d51b348404756f07ef466bb0998 yt/frontends/boxlib/data_structures.py
--- a/yt/frontends/boxlib/data_structures.py
+++ b/yt/frontends/boxlib/data_structures.py
@@ -1,5 +1,5 @@
 """
-Data structures for Boxlib Codes 
+Data structures for BoxLib Codes
 
 
 
@@ -15,10 +15,8 @@
 
 import os
 import re
-import weakref
 import itertools
 
-from collections import defaultdict
 from stat import ST_CTIME
 
 import numpy as np
@@ -27,53 +25,46 @@
 from yt.data_objects.grid_patch import AMRGridPatch
 from yt.geometry.grid_geometry_handler import GridIndex
 from yt.data_objects.static_output import Dataset
-from yt.utilities.definitions import \
-    mpc_conversion, sec_conversion
+
 from yt.utilities.parallel_tools.parallel_analysis_interface import \
     parallel_root_only
 from yt.utilities.lib.misc_utilities import \
     get_box_grids_level
-from yt.geometry.selection_routines import \
-    RegionSelector
 from yt.utilities.io_handler import \
     io_registry
-from yt.utilities.physical_constants import \
-    cm_per_mpc
 
 from .fields import \
     BoxlibFieldInfo, \
     MaestroFieldInfo, \
     CastroFieldInfo
 
-from .io import IOHandlerBoxlib
 # This is what we use to find scientific notation that might include d's
 # instead of e's.
 _scinot_finder = re.compile(r"[-+]?[0-9]*\.?[0-9]+([eEdD][-+]?[0-9]+)?")
 # This is the dimensions in the Cell_H file for each level
 # It is different for different dimensionalities, so we make a list
-_dim_finder = [ \
+_dim_finder = [
     re.compile(r"\(\((\d+)\) \((\d+)\) \(\d+\)\)$"),
     re.compile(r"\(\((\d+,\d+)\) \((\d+,\d+)\) \(\d+,\d+\)\)$"),
     re.compile(r"\(\((\d+,\d+,\d+)\) \((\d+,\d+,\d+)\) \(\d+,\d+,\d+\)\)$")]
 # This is the line that prefixes each set of data for a FAB in the FAB file
 # It is different for different dimensionalities, so we make a list
 _endian_regex = r"^FAB \(\(\d+, \([0-9 ]+\)\),\((\d+), \(([0-9 ]+)\)\)\)"
-_header_pattern = [ \
-    re.compile(_endian_regex + 
+_header_pattern = [
+    re.compile(_endian_regex +
                r"\(\((\d+)\) \((\d+)\) \((\d+)\)\) (\d+)\n"),
-    re.compile(_endian_regex + 
+    re.compile(_endian_regex +
                r"\(\((\d+,\d+)\) \((\d+,\d+)\) \((\d+,\d+)\)\) (\d+)\n"),
-    re.compile(_endian_regex + 
+    re.compile(_endian_regex +
                r"\(\((\d+,\d+,\d+)\) \((\d+,\d+,\d+)\) \((\d+,\d+,\d+)\)\) (\d+)\n")]
 
 
-
 class BoxlibGrid(AMRGridPatch):
     _id_offset = 0
     _offset = -1
 
-    def __init__(self, grid_id, offset, filename = None,
-                 index = None):
+    def __init__(self, grid_id, offset, filename=None,
+                 index=None):
         super(BoxlibGrid, self).__init__(grid_id, filename, index)
         self._base_offset = offset
         self._parent_id = []
@@ -126,7 +117,7 @@
         return coords
 
     # Override this as well, since refine_by can vary
-    def _fill_child_mask(self, child, mask, tofill, dlevel = 1):
+    def _fill_child_mask(self, child, mask, tofill, dlevel=1):
         rf = self.ds.ref_factors[self.Level]
         if dlevel != 1:
             raise NotImplementedError
@@ -139,8 +130,10 @@
              startIndex[1]:endIndex[1],
              startIndex[2]:endIndex[2]] = tofill
 
+
 class BoxlibHierarchy(GridIndex):
     grid = BoxlibGrid
+
     def __init__(self, ds, dataset_type='boxlib_native'):
         self.dataset_type = dataset_type
         self.header_filename = os.path.join(ds.output_dir, 'Header')
@@ -149,19 +142,17 @@
         GridIndex.__init__(self, ds, dataset_type)
         self._cache_endianness(self.grids[-1])
 
-        #self._read_particles()
-
     def _parse_index(self):
         """
         read the global header file for an Boxlib plotfile output.
         """
         self.max_level = self.dataset._max_level
-        header_file = open(self.header_filename,'r')
+        header_file = open(self.header_filename, 'r')
 
         self.dimensionality = self.dataset.dimensionality
         _our_dim_finder = _dim_finder[self.dimensionality-1]
-        DRE = self.dataset.domain_right_edge # shortcut
-        DLE = self.dataset.domain_left_edge # shortcut
+        DRE = self.dataset.domain_right_edge  # shortcut
+        DLE = self.dataset.domain_left_edge   # shortcut
 
         # We can now skip to the point in the file we want to start parsing.
         header_file.seek(self.dataset._header_mesh_start)
@@ -190,13 +181,13 @@
         if int(header_file.next()) != 0:
             raise RuntimeError("INTERNAL ERROR! This should be a zero.")
 
-        # each level is one group with ngrids on it. 
-        # each grid has self.dimensionality number of lines of 2 reals 
+        # each level is one group with ngrids on it.
+        # each grid has self.dimensionality number of lines of 2 reals
         self.grids = []
         grid_counter = 0
         for level in range(self.max_level + 1):
             vals = header_file.next().split()
-            lev, ngrids, cur_time = int(vals[0]),int(vals[1]),float(vals[2])
+            lev, ngrids = int(vals[0]), int(vals[1])
             assert(lev == level)
             nsteps = int(header_file.next())
             for gi in range(ngrids):
@@ -232,10 +223,10 @@
             for gi in range(ngrids):
                 # components within it
                 start, stop = _our_dim_finder.match(level_header_file.next()).groups()
-                # fix for non-3d data 
+                # fix for non-3d data
                 # note we append '0' to both ends b/c of the '+1' in dims below
                 start += ',0'*(3-self.dimensionality)
-                stop  += ',0'*(3-self.dimensionality)
+                stop += ',0'*(3-self.dimensionality)
                 start = np.array(start.split(","), dtype="int64")
                 stop = np.array(stop.split(","), dtype="int64")
                 dims = stop - start + 1
@@ -259,7 +250,7 @@
             # already read the filenames above...
         self.float_type = 'float64'
 
-    def _cache_endianness(self,test_grid):
+    def _cache_endianness(self, test_grid):
         """
         Cache the endianness and bytes perreal of the grids by using a
         test grid and assuming that all grids have the same
@@ -270,7 +261,7 @@
         # open the test file & grab the header
         with open(os.path.expanduser(test_grid.filename), 'rb') as f:
             header = f.readline()
-        
+
         bpr, endian, start, stop, centering, nc = \
             _header_pattern[self.dimensionality-1].search(header).groups()
         # Note that previously we were using a different value for BPR than we
@@ -294,7 +285,8 @@
         self.grids = np.array(self.grids, dtype='object')
         self._reconstruct_parent_child()
         for i, grid in enumerate(self.grids):
-            if (i%1e4) == 0: mylog.debug("Prepared % 7i / % 7i grids", i, self.num_grids)
+            if (i % 1e4) == 0: mylog.debug("Prepared % 7i / % 7i grids", i,
+                                           self.num_grids)
             grid._prepare_grid()
             grid._setup_dx()
         mylog.debug("Done creating grid objects")
@@ -308,10 +300,10 @@
                                 self.grid_levels[i] + 1,
                                 self.grid_left_edge, self.grid_right_edge,
                                 self.grid_levels, mask)
-            ids = np.where(mask.astype("bool")) # where is a tuple
-            grid._children_ids = ids[0] + grid._id_offset 
+            ids = np.where(mask.astype("bool"))  # where is a tuple
+            grid._children_ids = ids[0] + grid._id_offset
         mylog.debug("Second pass; identifying parents")
-        for i, grid in enumerate(self.grids): # Second pass
+        for i, grid in enumerate(self.grids):  # Second pass
             for child in grid.Children:
                 child._parent_id.append(i + grid._id_offset)
 
@@ -331,10 +323,10 @@
         for line in header_file:
             if len(line.split()) != 3: continue
             self.num_grids += int(line.split()[1])
-        
+
     def _initialize_grid_arrays(self):
         super(BoxlibHierarchy, self)._initialize_grid_arrays()
-        self.grid_start_index = np.zeros((self.num_grids,3), 'int64')
+        self.grid_start_index = np.zeros((self.num_grids, 3), 'int64')
 
     def _initialize_state_variables(self):
         """override to not re-initialize num_grids in AMRHierarchy.__init__
@@ -349,7 +341,7 @@
         self.field_list = [("boxlib", f) for f in
                            self.dataset._field_list]
         self.field_indexes = dict((f[1], i)
-                                for i, f in enumerate(self.field_list))
+                                  for i, f in enumerate(self.field_list))
         # There are times when field_list may change.  We copy it here to
         # avoid that possibility.
         self.field_order = [f for f in self.field_list]
@@ -357,6 +349,7 @@
     def _setup_data_io(self):
         self.io = io_registry[self.dataset_type](self.dataset)
 
+
 class BoxlibDataset(Dataset):
     """
     This class is a stripped down class that simply reads and parses
@@ -370,10 +363,10 @@
     periodicity = (True, True, True)
 
     def __init__(self, output_dir,
-                 cparam_filename = "inputs",
-                 fparam_filename = "probin",
+                 cparam_filename="inputs",
+                 fparam_filename="probin",
                  dataset_type='boxlib_native',
-                 storage_filename = None):
+                 storage_filename=None):
         """
         The paramfile is usually called "inputs"
         and there may be a fortran inputs file usually called "probin"
@@ -390,14 +383,13 @@
         Dataset.__init__(self, output_dir, dataset_type)
 
         # These are still used in a few places.
-        if not "HydroMethod" in self.parameters.keys():
+        if "HydroMethod" not in self.parameters.keys():
             self.parameters["HydroMethod"] = 'boxlib'
-        self.parameters["Time"] = 1. # default unit is 1...
-        self.parameters["EOSType"] = -1 # default
+        self.parameters["Time"] = 1.     # default unit is 1...
+        self.parameters["EOSType"] = -1  # default
         self.parameters["gamma"] = self.parameters.get(
             "materials.gamma", 1.6667)
 
-
     def _localize_check(self, fn):
         # If the file exists, use it.  If not, set it to None.
         root_dir = os.path.dirname(self.output_dir)
@@ -410,6 +402,8 @@
     def _is_valid(cls, *args, **kwargs):
         # fill our args
         output_dir = args[0]
+        # boxlib datasets are always directories
+        if not os.path.isdir(output_dir): return False
         header_filename = os.path.join(output_dir, "Header")
         jobinfo_filename = os.path.join(output_dir, "job_info")
         if not os.path.exists(header_filename):
@@ -418,11 +412,11 @@
         args = inspect.getcallargs(cls.__init__, args, kwargs)
         # This might need to be localized somehow
         inputs_filename = os.path.join(
-                            os.path.dirname(os.path.abspath(output_dir)),
-                            args['cparam_filename'])
+            os.path.dirname(os.path.abspath(output_dir)),
+            args['cparam_filename'])
         if not os.path.exists(inputs_filename) and \
            not os.path.exists(jobinfo_filename):
-            return True # We have no parameters to go off of
+            return True  # We have no parameters to go off of
         # If we do have either inputs or jobinfo, we should be deferring to a
         # different frontend.
         return False
@@ -464,7 +458,7 @@
             self.omega_lambda = self.parameters["comoving_OmL"]
             self.omega_matter = self.parameters["comoving_OmM"]
             self.hubble_constant = self.parameters["comoving_h"]
-            a_file = open(os.path.join(self.output_dir,'comoving_a'))
+            a_file = open(os.path.join(self.output_dir, 'comoving_a'))
             line = a_file.readline().strip()
             a_file.close()
             self.current_redshift = 1/float(line) - 1
@@ -491,7 +485,7 @@
             # So we'll try to determine this.
             vals = vals.split()
             if any(_scinot_finder.match(v) for v in vals):
-                vals = [float(v.replace("D","e").replace("d","e"))
+                vals = [float(v.replace("D", "e").replace("d", "e"))
                         for v in vals]
             if len(vals) == 1:
                 vals = vals[0]
@@ -509,22 +503,22 @@
         # call readline() if we want to end up with an offset at the very end.
         # Fortunately, elsewhere we don't care about the offset, so we're fine
         # everywhere else using iteration exclusively.
-        header_file = open(os.path.join(self.output_dir,'Header'))
+        header_file = open(os.path.join(self.output_dir, 'Header'))
         self.orion_version = header_file.readline().rstrip()
         n_fields = int(header_file.readline())
 
         self._field_list = [header_file.readline().strip()
-                           for i in range(n_fields)]
+                            for i in range(n_fields)]
 
         self.dimensionality = int(header_file.readline())
         self.current_time = float(header_file.readline())
         # This is traditionally a index attribute, so we will set it, but
         # in a slightly hidden variable.
-        self._max_level = int(header_file.readline()) 
+        self._max_level = int(header_file.readline())
         self.domain_left_edge = np.array(header_file.readline().split(),
                                          dtype="float64")
         self.domain_right_edge = np.array(header_file.readline().split(),
-                                         dtype="float64")
+                                          dtype="float64")
         ref_factors = np.array([int(i) for i in
                                 header_file.readline().split()])
         if ref_factors.size == 0:
@@ -540,26 +534,26 @@
             self.refine_by = min(ref_factors)
             # Check that they're all multiples of the minimum.
             if not all(float(rf)/self.refine_by ==
-                   int(float(rf)/self.refine_by) for rf in ref_factors):
+                       int(float(rf)/self.refine_by) for rf in ref_factors):
                 raise RuntimeError
             base_log = np.log2(self.refine_by)
-            self.level_offsets = [0] # level 0 has to have 0 offset
+            self.level_offsets = [0]  # level 0 has to have 0 offset
             lo = 0
             for lm1, rf in enumerate(self.ref_factors):
                 lo += int(np.log2(rf) / base_log) - 1
                 self.level_offsets.append(lo)
-        #assert(np.unique(ref_factors).size == 1)
+        # assert(np.unique(ref_factors).size == 1)
         else:
             self.refine_by = ref_factors[0]
             self.level_offsets = [0 for l in range(self._max_level + 1)]
-        # Now we read the global index space, to get 
+        # Now we read the global index space, to get
         index_space = header_file.readline()
         # This will be of the form:
         #  ((0,0,0) (255,255,255) (0,0,0)) ((0,0,0) (511,511,511) (0,0,0))
         # So note that if we split it all up based on spaces, we should be
         # fine, as long as we take the first two entries, which correspond to
         # the root level.  I'm not 100% pleased with this solution.
-        root_space = index_space.replace("(","").replace(")","").split()[:2]
+        root_space = index_space.replace("(", "").replace(")", "").split()[:2]
         start = np.array(root_space[0].split(","), dtype="int64")
         stop = np.array(root_space[1].split(","), dtype="int64")
         self.domain_dimensions = stop - start + 1
@@ -582,9 +576,9 @@
             raise RuntimeError("yt does not yet support spherical geometry")
 
         # overrides for 1/2-dimensional data
-        if self.dimensionality == 1: 
+        if self.dimensionality == 1:
             self._setup1d()
-        elif self.dimensionality == 2: 
+        elif self.dimensionality == 2:
             self._setup2d()
 
     def _set_code_unit_attributes(self):
@@ -594,20 +588,20 @@
         self.velocity_unit = self.quan(1.0, "cm/s")
 
     def _setup1d(self):
-#        self._index_class = BoxlibHierarchy1D
-#        self._fieldinfo_fallback = Orion1DFieldInfo
+        # self._index_class = BoxlibHierarchy1D
+        # self._fieldinfo_fallback = Orion1DFieldInfo
         self.domain_left_edge = \
             np.concatenate([self.domain_left_edge, [0.0, 0.0]])
         self.domain_right_edge = \
             np.concatenate([self.domain_right_edge, [1.0, 1.0]])
         tmp = self.domain_dimensions.tolist()
-        tmp.extend((1,1))
+        tmp.extend((1, 1))
         self.domain_dimensions = np.array(tmp)
         tmp = list(self.periodicity)
         tmp[1] = False
         tmp[2] = False
         self.periodicity = ensure_tuple(tmp)
-        
+
     def _setup2d(self):
         self.domain_left_edge = \
             np.concatenate([self.domain_left_edge, [0.0]])
@@ -636,12 +630,13 @@
         offset = self.level_offsets[l1] - self.level_offsets[l0]
         return self.refine_by**(l1-l0 + offset)
 
+
 class OrionHierarchy(BoxlibHierarchy):
-    
+
     def __init__(self, ds, dataset_type='orion_native'):
         BoxlibHierarchy.__init__(self, ds, dataset_type)
         self._read_particles()
-        #self.io = IOHandlerOrion
+        # self.io = IOHandlerOrion
 
     def _read_particles(self):
         """
@@ -673,7 +668,7 @@
                 coord = [particle_position_x, particle_position_y, particle_position_z]
                 # for each particle, determine which grids contain it
                 # copied from object_finding_mixin.py
-                mask=np.ones(self.num_grids)
+                mask = np.ones(self.num_grids)
                 for i in xrange(len(coord)):
                     np.choose(np.greater(self.grid_left_edge[:,i],coord[i]), (mask,0), mask)
                     np.choose(np.greater(self.grid_right_edge[:,i],coord[i]), (0,mask), mask)
@@ -688,39 +683,42 @@
                     self.grid_particle_count[ind] += 1
                     self.grids[ind].NumberOfParticles += 1
         return True
-                
+
+
 class OrionDataset(BoxlibDataset):
 
     _index_class = OrionHierarchy
 
     def __init__(self, output_dir,
-                 cparam_filename = "inputs",
-                 fparam_filename = "probin",
+                 cparam_filename="inputs",
+                 fparam_filename="probin",
                  dataset_type='orion_native',
-                 storage_filename = None):
+                 storage_filename=None):
 
         BoxlibDataset.__init__(self, output_dir,
-                 cparam_filename, fparam_filename, dataset_type)
-          
+                               cparam_filename, fparam_filename, dataset_type)
+
     @classmethod
     def _is_valid(cls, *args, **kwargs):
-        # fill our args                                                                               
+        # fill our args
         output_dir = args[0]
+        # boxlib datasets are always directories
+        if not os.path.isdir(output_dir): return False
         header_filename = os.path.join(output_dir, "Header")
         jobinfo_filename = os.path.join(output_dir, "job_info")
         if not os.path.exists(header_filename):
-            # We *know* it's not boxlib if Header doesn't exist.                                      
+            # We *know* it's not boxlib if Header doesn't exist.
             return False
         args = inspect.getcallargs(cls.__init__, args, kwargs)
-        # This might need to be localized somehow                                                     
+        # This might need to be localized somehow
         inputs_filename = os.path.join(
-                            os.path.dirname(os.path.abspath(output_dir)),
-                            args['cparam_filename'])
+            os.path.dirname(os.path.abspath(output_dir)),
+            args['cparam_filename'])
         if not os.path.exists(inputs_filename):
             return False
         if os.path.exists(jobinfo_filename):
             return False
-        # Now we check for all the others                                                             
+        # Now we check for all the others
         lines = open(inputs_filename).readlines()
         if any(("castro." in line for line in lines)): return False
         if any(("nyx." in line for line in lines)): return False
@@ -728,6 +726,7 @@
         if any(("geometry.prob_lo" in line for line in lines)): return True
         return False
 
+
 class CastroDataset(BoxlibDataset):
 
     _field_info_class = CastroFieldInfo
@@ -736,6 +735,8 @@
     def _is_valid(cls, *args, **kwargs):
         # fill our args
         output_dir = args[0]
+        # boxlib datasets are always directories
+        if not os.path.isdir(output_dir): return False
         header_filename = os.path.join(output_dir, "Header")
         jobinfo_filename = os.path.join(output_dir, "job_info")
         if not os.path.exists(header_filename):
@@ -748,6 +749,7 @@
         if any(line.startswith("Castro   ") for line in lines): return True
         return False
 
+
 class MaestroDataset(BoxlibDataset):
 
     _field_info_class = MaestroFieldInfo
@@ -756,6 +758,8 @@
     def _is_valid(cls, *args, **kwargs):
         # fill our args
         output_dir = args[0]
+        # boxlib datasets are always directories
+        if not os.path.isdir(output_dir): return False
         header_filename = os.path.join(output_dir, "Header")
         jobinfo_filename = os.path.join(output_dir, "job_info")
         if not os.path.exists(header_filename):
@@ -765,7 +769,7 @@
             return False
         # Now we check the job_info for the mention of maestro
         lines = open(jobinfo_filename).readlines()
-        if any("maestro" in line.lower() for line in lines): return True
+        if any(line.startswith("MAESTRO   ") for line in lines): return True
         return False
 
     def _parse_parameter_file(self):
@@ -782,7 +786,7 @@
                 line = f.next()
             # get the runtime parameters
             for line in f:
-                p, v = (_.strip() for _ in line[4:].split("=",1))
+                p, v = (_.strip() for _ in line[4:].split("=", 1))
                 if len(v) == 0:
                     self.parameters[p] = ""
                 else:
@@ -827,7 +831,7 @@
         maxlevel = int(header.readline()) # max level
 
         # Skip over how many grids on each level; this is degenerate
-        for i in range(maxlevel + 1):dummy = header.readline()
+        for i in range(maxlevel + 1): dummy = header.readline()
 
         grid_info = np.fromiter((int(i) for line in header.readlines()
                                  for i in line.split()),
@@ -844,6 +848,7 @@
 
         self.grid_particle_count[:, 0] = grid_info[:, 1]
 
+
 class NyxDataset(BoxlibDataset):
 
     _index_class = NyxHierarchy
@@ -852,6 +857,8 @@
     def _is_valid(cls, *args, **kwargs):
         # fill our args
         pname = args[0].rstrip("/")
+        # boxlib datasets are always directories
+        if not os.path.isdir(pname): return False
         dn = os.path.dirname(pname)
         if len(args) > 1:
             kwargs['paramFilename'] = args[1]
@@ -862,15 +869,13 @@
         # We check for the job_info file's existence because this is currently
         # what distinguishes Nyx data from MAESTRO data.
         pfn = os.path.join(pfname)
-        if not os.path.exists(pfn): return False
+        if not os.path.exists(pfn) or os.path.isdir(pfn): return False
         nyx = any(("nyx." in line for line in open(pfn)))
-        maestro = os.path.exists(os.path.join(pname, "job_info"))
-        orion = (not nyx) and (not maestro)
         return nyx
 
     def _parse_parameter_file(self):
         super(NyxDataset, self)._parse_parameter_file()
-        #return
+        # return
         # Nyx is always cosmological.
         self.cosmological_simulation = 1
         self.omega_lambda = self.parameters["comoving_OmL"]
@@ -904,7 +909,7 @@
     v = vals.split()[0] # Just in case there are multiple; we'll go
                         # back afterward to using vals.
     try:
-        float(v.upper().replace("D","E"))
+        float(v.upper().replace("D", "E"))
     except:
         pcast = str
         if v in ("F", "T"):

diff -r b5034a8695c3d58a95eb577bbb6a67e7c3d2661d -r 95a477fcf9ac3d51b348404756f07ef466bb0998 yt/frontends/boxlib/tests/test_orion.py
--- a/yt/frontends/boxlib/tests/test_orion.py
+++ b/yt/frontends/boxlib/tests/test_orion.py
@@ -42,3 +42,8 @@
     for test in small_patch_amr(rt, _fields):
         test_radtube.__name__ = test.description
         yield test
+
+
+ at requires_file(rt)
+def test_OrionDataset():
+    assert isinstance(data_dir_load(rt), OrionDataset)

diff -r b5034a8695c3d58a95eb577bbb6a67e7c3d2661d -r 95a477fcf9ac3d51b348404756f07ef466bb0998 yt/frontends/chombo/tests/test_outputs.py
--- a/yt/frontends/chombo/tests/test_outputs.py
+++ b/yt/frontends/chombo/tests/test_outputs.py
@@ -13,15 +13,18 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-from yt.testing import *
+from yt.testing import \
+    requires_file, \
+    assert_equal
 from yt.utilities.answer_testing.framework import \
     requires_ds, \
     small_patch_amr, \
-    big_patch_amr, \
     data_dir_load
-from yt.frontends.chombo.api import ChomboDataset
+from yt.frontends.chombo.api import \
+    ChomboDataset, \
+    Orion2Dataset
 
-_fields = ("density", "velocity_magnitude", #"velocity_divergence",
+_fields = ("density", "velocity_magnitude",  # "velocity_divergence",
            "magnetic_field_x")
 
 gc = "GaussianCloud/data.0077.3d.hdf5"
@@ -49,6 +52,22 @@
 def test_zp():
     ds = data_dir_load(zp)
     yield assert_equal, str(ds), "plt32.2d.hdf5"
-    for test in small_patch_amr(zp, _zp_fields, input_center="c", input_weight="rhs"):
+    for test in small_patch_amr(zp, _zp_fields, input_center="c",
+                                input_weight="rhs"):
         test_tb.__name__ = test.description
         yield test
+
+
+ at requires_file(zp)
+def test_ChomboDataset():
+    assert isinstance(data_dir_load(zp), ChomboDataset)
+
+
+ at requires_file(gc)
+def test_Orion2Dataset():
+    assert isinstance(data_dir_load(gc), Orion2Dataset)
+
+
+#@requires_file(kho)
+#def test_PlutoDataset():
+#    assert isinstance(data_dir_load(kho), PlutoDataset)

diff -r b5034a8695c3d58a95eb577bbb6a67e7c3d2661d -r 95a477fcf9ac3d51b348404756f07ef466bb0998 yt/frontends/flash/tests/test_outputs.py
--- a/yt/frontends/flash/tests/test_outputs.py
+++ b/yt/frontends/flash/tests/test_outputs.py
@@ -42,3 +42,8 @@
     for test in small_patch_amr(wt, _fields_2d):
         test_wind_tunnel.__name__ = test.description
         yield test
+
+
+ at requires_file(wt)
+def test_FLASHDataset():
+    assert isinstance(data_dir_load(wt), FLASHDataset)

diff -r b5034a8695c3d58a95eb577bbb6a67e7c3d2661d -r 95a477fcf9ac3d51b348404756f07ef466bb0998 yt/frontends/moab/tests/test_c5.py
--- a/yt/frontends/moab/tests/test_c5.py
+++ b/yt/frontends/moab/tests/test_c5.py
@@ -56,3 +56,7 @@
         for dobj_name in dso:
             yield FieldValuesTest(c5, field, dobj_name)
 
+
+ at requires_file(c5)
+def test_MoabHex8Dataset():
+    assert isinstance(data_dir_load(c5), MoabHex8Dataset)

diff -r b5034a8695c3d58a95eb577bbb6a67e7c3d2661d -r 95a477fcf9ac3d51b348404756f07ef466bb0998 yt/frontends/ramses/fields.py
--- a/yt/frontends/ramses/fields.py
+++ b/yt/frontends/ramses/fields.py
@@ -94,8 +94,9 @@
             return rv
         self.add_field(("gas", "temperature"), function=_temperature,
                         units="K")
+        self.create_cooling_fields()
 
-    def create_cooling_fields(self, filename):
+    def create_cooling_fields(self):
         num = os.path.basename(self.ds.parameter_filename).split("."
                 )[0].split("_")[1]
         filename = "%s/cooling_%05i.out" % (
@@ -104,7 +105,7 @@
         if not os.path.exists(filename): return
         def _create_field(name, interp_object):
             def _func(field, data):
-                shape = data["Temperature"].shape
+                shape = data["temperature"].shape
                 d = {'lognH': np.log10(_X*data["density"]/mh).ravel(),
                      'logT' : np.log10(data["temperature"]).ravel()}
                 rv = 10**interp_object(d).reshape(shape)
@@ -131,4 +132,4 @@
             interp = BilinearFieldInterpolator(tvals[n],
                         (avals["lognH"], avals["logT"]),
                         ["lognH", "logT"], truncate = True)
-            _create_field(n, interp)
+            _create_field(("gas", n), interp)

diff -r b5034a8695c3d58a95eb577bbb6a67e7c3d2661d -r 95a477fcf9ac3d51b348404756f07ef466bb0998 yt/frontends/ramses/tests/test_outputs.py
--- a/yt/frontends/ramses/tests/test_outputs.py
+++ b/yt/frontends/ramses/tests/test_outputs.py
@@ -1,5 +1,5 @@
 """
-RAMSES frontend tests 
+RAMSES frontend tests
 
 
 
@@ -21,10 +21,10 @@
     PixelizedProjectionValuesTest, \
     FieldValuesTest, \
     create_obj
-from yt.frontends.artio.api import ARTIODataset
+from yt.frontends.ramses.api import RAMSESDataset
 
 _fields = ("temperature", "density", "velocity_magnitude",
-           ("deposit", "all_density"), ("deposit", "all_count")) 
+           ("deposit", "all_density"), ("deposit", "all_count"))
 
 output_00080 = "output_00080/info_00080.txt"
 @requires_ds(output_00080)
@@ -44,3 +44,8 @@
         s1 = dobj["ones"].sum()
         s2 = sum(mask.sum() for block, mask in dobj.blocks)
         yield assert_equal, s1, s2
+
+
+ at requires_file(output_00080)
+def test_RAMSESDataset():
+    assert isinstance(data_dir_load(output_00080), RAMSESDataset)

diff -r b5034a8695c3d58a95eb577bbb6a67e7c3d2661d -r 95a477fcf9ac3d51b348404756f07ef466bb0998 yt/frontends/sph/io.py
--- a/yt/frontends/sph/io.py
+++ b/yt/frontends/sph/io.py
@@ -53,7 +53,7 @@
     _vector_fields = ("Coordinates", "Velocity", "Velocities")
     _known_ptypes = ghdf5_ptypes
     _var_mass = None
-    _element_names = ('Hydrogen', 'Helium', 'Carbon', 'Nitrogen', 'Oxygen', 
+    _element_names = ('Hydrogen', 'Helium', 'Carbon', 'Nitrogen', 'Oxygen',
                        'Neon', 'Magnesium', 'Silicon', 'Iron' )
 
 
@@ -81,6 +81,8 @@
             f = _get_h5_handle(data_file.filename)
             # This double-reads
             for ptype, field_list in sorted(ptf.items()):
+                if data_file.total_particles[ptype] == 0:
+                    continue
                 x = f["/%s/Coordinates" % ptype][:,0].astype("float64")
                 y = f["/%s/Coordinates" % ptype][:,1].astype("float64")
                 z = f["/%s/Coordinates" % ptype][:,2].astype("float64")
@@ -96,6 +98,8 @@
         for data_file in sorted(data_files):
             f = _get_h5_handle(data_file.filename)
             for ptype, field_list in sorted(ptf.items()):
+                if data_file.total_particles[ptype] == 0:
+                    continue
                 g = f["/%s" % ptype]
                 coords = g["Coordinates"][:].astype("float64")
                 mask = selector.select_points(
@@ -103,11 +107,11 @@
                 del coords
                 if mask is None: continue
                 for field in field_list:
-                    
+
                     if field in ("Mass", "Masses") and \
                         ptype not in self.var_mass:
                         data = np.empty(mask.sum(), dtype="float64")
-                        ind = self._known_ptypes.index(ptype) 
+                        ind = self._known_ptypes.index(ptype)
                         data[:] = self.ds["Massarr"][ind]
 
                     elif field in self._element_names:
@@ -152,7 +156,7 @@
         f = _get_h5_handle(data_file.filename)
         pcount = f["/Header"].attrs["NumPart_ThisFile"][:]
         f.close()
-        npart = dict(("PartType%s" % (i), v) for i, v in enumerate(pcount)) 
+        npart = dict(("PartType%s" % (i), v) for i, v in enumerate(pcount))
         return npart
 
 
@@ -164,7 +168,7 @@
 
         # loop over all keys in OWLS hdf5 file
         #--------------------------------------------------
-        for key in f.keys():   
+        for key in f.keys():
 
             # only want particle data
             #--------------------------------------
@@ -334,7 +338,7 @@
 
     def _count_particles(self, data_file):
         npart = dict((self._ptypes[i], v)
-            for i, v in enumerate(data_file.header["Npart"])) 
+            for i, v in enumerate(data_file.header["Npart"]))
         return npart
 
     # header is 256, but we have 4 at beginning and end for ints
@@ -443,13 +447,13 @@
         dtype = None
         # We need to do some fairly ugly detection to see what format the auxiliary
         # files are in.  They can be either ascii or binary, and the binary files can be
-        # either floats, ints, or doubles.  We're going to use a try-catch cascade to 
+        # either floats, ints, or doubles.  We're going to use a try-catch cascade to
         # determine the format.
         try:#ASCII
             auxdata = np.genfromtxt(filename, skip_header=1)
             if auxdata.size != np.sum(data_file.total_particles.values()):
                 print "Error reading auxiliary tipsy file"
-                raise RuntimeError 
+                raise RuntimeError
         except ValueError:#binary/xdr
             f = open(filename, 'rb')
             l = struct.unpack(data_file.ds.endian+"i", f.read(4))[0]
@@ -469,7 +473,7 @@
                 except struct.error: # None of the binary attempts to read succeeded
                     print "Error reading auxiliary tipsy file"
                     raise RuntimeError
-            
+
         # Use the mask to slice out the appropriate particle type data
         if mask.size == data_file.total_particles['Gas']:
             return auxdata[:data_file.total_particles['Gas']]
@@ -556,14 +560,14 @@
 
     def _update_domain(self, data_file):
         '''
-        This method is used to determine the size needed for a box that will 
+        This method is used to determine the size needed for a box that will
         bound the particles.  It simply finds the largest position of the
         whole set of particles, and sets the domain to +/- that value.
         '''
         ds = data_file.ds
         ind = 0
         # Check to make sure that the domain hasn't already been set
-        # by the parameter file 
+        # by the parameter file
         if np.all(np.isfinite(ds.domain_left_edge)) and np.all(np.isfinite(ds.domain_right_edge)):
             return
         with open(data_file.filename, "rb") as f:
@@ -682,11 +686,11 @@
                 continue
             field_list.append((ptype, field))
         if any(["Gas"==f[0] for f in field_list]): #Add the auxiliary fields to each ptype we have
-            field_list += [("Gas",a) for a in self._aux_fields] 
+            field_list += [("Gas",a) for a in self._aux_fields]
         if any(["DarkMatter"==f[0] for f in field_list]):
-            field_list += [("DarkMatter",a) for a in self._aux_fields] 
+            field_list += [("DarkMatter",a) for a in self._aux_fields]
         if any(["Stars"==f[0] for f in field_list]):
-            field_list += [("Stars",a) for a in self._aux_fields] 
+            field_list += [("Stars",a) for a in self._aux_fields]
         self._field_list = field_list
         return self._field_list
 
@@ -706,11 +710,11 @@
 class IOHandlerHTTPStream(BaseIOHandler):
     _dataset_type = "http_particle_stream"
     _vector_fields = ("Coordinates", "Velocity", "Velocities")
-    
+
     def __init__(self, ds):
         if requests is None:
             raise RuntimeError
-        self._url = ds.base_url 
+        self._url = ds.base_url
         # This should eventually manage the IO and cache it
         self.total_bytes = 0
         super(IOHandlerHTTPStream, self).__init__(ds)

diff -r b5034a8695c3d58a95eb577bbb6a67e7c3d2661d -r 95a477fcf9ac3d51b348404756f07ef466bb0998 yt/frontends/sph/tests/test_owls.py
--- a/yt/frontends/sph/tests/test_owls.py
+++ b/yt/frontends/sph/tests/test_owls.py
@@ -53,3 +53,8 @@
         s1 = dobj["ones"].sum()
         s2 = sum(mask.sum() for block, mask in dobj.blocks)
         yield assert_equal, s1, s2
+
+
+ at requires_file(os33)
+def test_OWLSDataset():
+    assert isinstance(data_dir_load(os33), OWLSDataset)

diff -r b5034a8695c3d58a95eb577bbb6a67e7c3d2661d -r 95a477fcf9ac3d51b348404756f07ef466bb0998 yt/frontends/sph/tests/test_tipsy.py
--- a/yt/frontends/sph/tests/test_tipsy.py
+++ b/yt/frontends/sph/tests/test_tipsy.py
@@ -92,3 +92,8 @@
         s1 = dobj["ones"].sum()
         s2 = sum(mask.sum() for block, mask in dobj.blocks)
         yield assert_equal, s1, s2
+
+
+ at requires_file(pkdgrav)
+def test_TipsyDataset():
+    assert isinstance(data_dir_load(pkdgrav), TipsyDataset)

diff -r b5034a8695c3d58a95eb577bbb6a67e7c3d2661d -r 95a477fcf9ac3d51b348404756f07ef466bb0998 yt/funcs.py
--- a/yt/funcs.py
+++ b/yt/funcs.py
@@ -270,7 +270,6 @@
 
     api_version = get_ipython_api_version()
 
-    stack = inspect.stack()
     frame = inspect.stack()[num_up]
     loc = frame[0].f_locals.copy()
     glo = frame[0].f_globals
@@ -537,7 +536,6 @@
     return version_info
 
 def get_script_contents():
-    stack = inspect.stack()
     top_frame = inspect.stack()[-1]
     finfo = inspect.getframeinfo(top_frame[0])
     if finfo[2] != "<module>": return None
@@ -749,6 +747,7 @@
     return _func
     
 def enable_plugins():
+    import yt
     from yt.config import ytcfg
     my_plugin_name = ytcfg.get("yt","pluginfilename")
     # We assume that it is with respect to the $HOME/.yt directory
@@ -758,7 +757,7 @@
         _fn = os.path.expanduser("~/.yt/%s" % my_plugin_name)
     if os.path.isfile(_fn):
         mylog.info("Loading plugins from %s", _fn)
-        execfile(_fn)
+        execfile(_fn, yt.__dict__)
 
 def fix_unitary(u):
     if u == '1':

diff -r b5034a8695c3d58a95eb577bbb6a67e7c3d2661d -r 95a477fcf9ac3d51b348404756f07ef466bb0998 yt/geometry/geometry_handler.py
--- a/yt/geometry/geometry_handler.py
+++ b/yt/geometry/geometry_handler.py
@@ -214,40 +214,37 @@
         for ftype, fname in fields:
             if fname in self.field_list or (ftype, fname) in self.field_list:
                 fields_to_read.append((ftype, fname))
+            elif fname in self.ds.derived_field_list or (ftype, fname) in self.ds.derived_field_list:
+                fields_to_generate.append((ftype, fname))
             else:
-                fields_to_generate.append((ftype, fname))
+                raise YTFieldNotFound((ftype,fname), self.ds)
         return fields_to_read, fields_to_generate
 
     def _read_particle_fields(self, fields, dobj, chunk = None):
         if len(fields) == 0: return {}, []
+        fields_to_read, fields_to_generate = self._split_fields(fields)
+        if len(fields_to_read) == 0:
+            return {}, fields_to_generate
         selector = dobj.selector
         if chunk is None:
             self._identify_base_chunk(dobj)
-        fields_to_return = {}
-        fields_to_read, fields_to_generate = self._split_fields(fields)
-        if len(fields_to_read) == 0:
-            return {}, fields_to_generate
         fields_to_return = self.io._read_particle_selection(
             self._chunk_io(dobj, cache = False),
             selector,
             fields_to_read)
-        for field in fields_to_read:
-            ftype, fname = field
-            finfo = self.ds._get_field_info(*field)
         return fields_to_return, fields_to_generate
 
     def _read_fluid_fields(self, fields, dobj, chunk = None):
         if len(fields) == 0: return {}, []
+        fields_to_read, fields_to_generate = self._split_fields(fields)
+        if len(fields_to_read) == 0:
+            return {}, fields_to_generate
         selector = dobj.selector
         if chunk is None:
             self._identify_base_chunk(dobj)
             chunk_size = dobj.size
         else:
             chunk_size = chunk.data_size
-        fields_to_return = {}
-        fields_to_read, fields_to_generate = self._split_fields(fields)
-        if len(fields_to_read) == 0:
-            return {}, fields_to_generate
         fields_to_return = self.io._read_fluid_selection(
             self._chunk_io(dobj),
             selector,

diff -r b5034a8695c3d58a95eb577bbb6a67e7c3d2661d -r 95a477fcf9ac3d51b348404756f07ef466bb0998 yt/geometry/oct_container.pyx
--- a/yt/geometry/oct_container.pyx
+++ b/yt/geometry/oct_container.pyx
@@ -418,7 +418,7 @@
         cdef np.ndarray[np.uint8_t, ndim=1] coords
         cdef OctVisitorData data
         self.setup_data(&data, domain_id)
-        coords = np.zeros((num_cells*8), dtype="uint8")
+        coords = np.zeros((num_cells*data.nz), dtype="uint8")
         data.array = <void *> coords.data
         self.visit_all_octs(selector, oct_visitors.mask_octs, &data)
         return coords.astype("bool")

diff -r b5034a8695c3d58a95eb577bbb6a67e7c3d2661d -r 95a477fcf9ac3d51b348404756f07ef466bb0998 yt/testing.py
--- a/yt/testing.py
+++ b/yt/testing.py
@@ -624,6 +624,18 @@
         return _func
     return compare_results(func)
 
+def periodicity_cases(ds):
+    # This is a generator that yields things near the corners.  It's good for
+    # getting different places to check periodicity.
+    yield (ds.domain_left_edge + ds.domain_right_edge)/2.0
+    dx = ds.domain_width / ds.domain_dimensions
+    # We start one dx in, and only go to one in as well.
+    for i in (1, ds.domain_dimensions[0] - 2):
+        for j in (1, ds.domain_dimensions[1] - 2):
+            for k in (1, ds.domain_dimensions[2] - 2):
+                center = dx * np.array([i,j,k]) + ds.domain_left_edge
+                yield center
+
 def run_nose(verbose=False, run_answer_tests=False, answer_big_data=False):
     import nose, os, sys, yt
     from yt.funcs import mylog

diff -r b5034a8695c3d58a95eb577bbb6a67e7c3d2661d -r 95a477fcf9ac3d51b348404756f07ef466bb0998 yt/utilities/amr_kdtree/amr_kdtree.py
--- a/yt/utilities/amr_kdtree/amr_kdtree.py
+++ b/yt/utilities/amr_kdtree/amr_kdtree.py
@@ -81,8 +81,6 @@
         for lvl in lvl_range:
             #grids = self.data_source.select_grids(lvl)
             grids = np.array([b for b, mask in self.data_source.blocks if b.Level == lvl])
-            gids = np.array([g.id for g in grids if g.Level == lvl],
-                            dtype="int64")
             if len(grids) == 0: continue
             self.add_grids(grids)
 
@@ -93,7 +91,6 @@
             grid = self.ds.index.grids[node.grid - self._id_offset]
             dds = grid.dds
             gle = grid.LeftEdge
-            gre = grid.RightEdge
             nle = self.ds.arr(get_left_edge(node), input_units="code_length")
             nre = self.ds.arr(get_right_edge(node), input_units="code_length")
             li = np.rint((nle-gle)/dds).astype('int32')

diff -r b5034a8695c3d58a95eb577bbb6a67e7c3d2661d -r 95a477fcf9ac3d51b348404756f07ef466bb0998 yt/utilities/answer_testing/framework.py
--- a/yt/utilities/answer_testing/framework.py
+++ b/yt/utilities/answer_testing/framework.py
@@ -574,7 +574,7 @@
         for newp, oldp in zip(new_result["parents"], old_result["parents"]):
             assert(newp == oldp)
         for newc, oldc in zip(new_result["children"], old_result["children"]):
-            assert(newp == oldp)
+            assert(newc == oldc)
 
 class SimulatedHaloMassFunctionTest(AnswerTestingTest):
     _type_name = "SimulatedHaloMassFunction"

diff -r b5034a8695c3d58a95eb577bbb6a67e7c3d2661d -r 95a477fcf9ac3d51b348404756f07ef466bb0998 yt/utilities/answer_testing/runner.py
--- a/yt/utilities/answer_testing/runner.py
+++ b/yt/utilities/answer_testing/runner.py
@@ -89,8 +89,7 @@
         self.plot_tests = plot_tests
 
     def run_all_tests(self):
-        plot_list = []
-        for i,name in enumerate(sorted(test_registry)):
+        for name in sorted(test_registry):
             self.run_test(name)
         return self.passed_tests
 
@@ -98,7 +97,6 @@
         # We'll also need to call the "compare" operation,
         # but for that we'll need a data store.
         test = test_registry[name]
-        plot_list = []
         if test.output_type == 'single':
             mot = MultipleOutputTest(self.io_log)
             for i,fn in enumerate(mot):

diff -r b5034a8695c3d58a95eb577bbb6a67e7c3d2661d -r 95a477fcf9ac3d51b348404756f07ef466bb0998 yt/utilities/decompose.py
--- a/yt/utilities/decompose.py
+++ b/yt/utilities/decompose.py
@@ -34,13 +34,7 @@
     """ Calculate list of product(psize) subarrays of arr, along with their
         left and right edges
     """
-    grid_left_edges = np.empty([np.product(psize), 3], dtype=np.float64)
-    grid_right_edges = np.empty([np.product(psize), 3], dtype=np.float64)
-    n_d = shape
-    d_s = (bbox[:, 1] - bbox[:, 0]) / n_d
-    grid_left_edges, grid_right_edges, shapes, slices = \
-            split_array(bbox[:, 0], bbox[:, 1], shape, psize)
-    return grid_left_edges, grid_right_edges, shapes, slices
+    return split_array(bbox[:, 0], bbox[:, 1], shape, psize)
 
 
 def evaluate_domain_decomposition(n_d, pieces, ldom):

diff -r b5034a8695c3d58a95eb577bbb6a67e7c3d2661d -r 95a477fcf9ac3d51b348404756f07ef466bb0998 yt/utilities/flagging_methods.py
--- a/yt/utilities/flagging_methods.py
+++ b/yt/utilities/flagging_methods.py
@@ -147,8 +147,6 @@
         for dim in range(3):
             sig = self.sigs[dim]
             sd = sig[:-2] - 2.0*sig[1:-1] + sig[2:]
-            grid_ends = np.zeros((sig.size, 2))
-            ng = 0
             center = int((self.flagged.shape[dim] - 1) / 2)
             strength = zero_strength = 0
             for i in range(1, sig.size-2):

diff -r b5034a8695c3d58a95eb577bbb6a67e7c3d2661d -r 95a477fcf9ac3d51b348404756f07ef466bb0998 yt/utilities/fortran_utils.py
--- a/yt/utilities/fortran_utils.py
+++ b/yt/utilities/fortran_utils.py
@@ -208,7 +208,6 @@
     >>> skip(f, 3)
     """
     skipped = []
-    pos = f.tell()
     for i in range(n):
         fmt = endian+"I"
         size = f.read(struct.calcsize(fmt))

diff -r b5034a8695c3d58a95eb577bbb6a67e7c3d2661d -r 95a477fcf9ac3d51b348404756f07ef466bb0998 yt/utilities/lib/tests/test_alt_ray_tracers.py
--- a/yt/utilities/lib/tests/test_alt_ray_tracers.py
+++ b/yt/utilities/lib/tests/test_alt_ray_tracers.py
@@ -14,7 +14,7 @@
 
 def setup():
     # set up some sample cylindrical grid data, radiating out from center
-    global left_grid, right_grid, amr_levels, center_grid
+    global left_grid, right_grid, amr_levels, center_grid, data
     np.seterr(all='ignore')
     l1, r1, lvl1 = amrspace([0.0, 1.0, 0.0, -1.0, 0.0, 2*np.pi], levels=(7,7,0))
     l2, r2, lvl2 = amrspace([0.0, 1.0, 0.0,  1.0, 0.0, 2*np.pi], levels=(7,7,0))

diff -r b5034a8695c3d58a95eb577bbb6a67e7c3d2661d -r 95a477fcf9ac3d51b348404756f07ef466bb0998 yt/utilities/linear_interpolators.py
--- a/yt/utilities/linear_interpolators.py
+++ b/yt/utilities/linear_interpolators.py
@@ -240,7 +240,6 @@
     Return an iterator over EnzoSphere objects generated from the appropriate 
     columns in *filename*.  Optionally specify the *unit* radius is in.
     """
-    sp_list = []
     for line in open(filename):
         if line.startswith("#"): continue
         vals = line.split()

diff -r b5034a8695c3d58a95eb577bbb6a67e7c3d2661d -r 95a477fcf9ac3d51b348404756f07ef466bb0998 yt/utilities/minimal_representation.py
--- a/yt/utilities/minimal_representation.py
+++ b/yt/utilities/minimal_representation.py
@@ -207,7 +207,6 @@
 
     def _generate_post(self):
         metadata = self._attrs
-        chunks = []
         return (metadata, ("chunks", []))
 
 class MinimalNotebook(MinimalRepresentation):

diff -r b5034a8695c3d58a95eb577bbb6a67e7c3d2661d -r 95a477fcf9ac3d51b348404756f07ef466bb0998 yt/utilities/parallel_tools/io_runner.py
--- a/yt/utilities/parallel_tools/io_runner.py
+++ b/yt/utilities/parallel_tools/io_runner.py
@@ -57,7 +57,7 @@
         ds = self.ds
         fields = [f for f in ds.field_list
                   if not ds.field_info[f].particle_type]
-        dsields = [f for f in ds.field_list
+        pfields = [f for f in ds.field_list
                    if ds.field_info[f].particle_type]
         # Preload is only defined for Enzo ...
         if ds.index.io._dataset_type == "enzo_packed_3d":

diff -r b5034a8695c3d58a95eb577bbb6a67e7c3d2661d -r 95a477fcf9ac3d51b348404756f07ef466bb0998 yt/utilities/sdf.py
--- a/yt/utilities/sdf.py
+++ b/yt/utilities/sdf.py
@@ -524,6 +524,19 @@
     return sdf
 
 
+def _shift_periodic(pos, left, right, domain_width):
+    """
+    Periodically shift positions that are right of left+domain_width to
+    the left, and those left of right-domain_width to the right.
+    """
+    for i in range(3):
+        mask = pos[:,i] >= left[i] + domain_width[i]
+        pos[mask, i] -= domain_width[i]
+        mask = pos[:,i] < right[i] - domain_width[i]
+        pos[mask, i] += domain_width[i]
+    return
+
+
 class SDFIndex(object):
 
     """docstring for SDFIndex
@@ -914,14 +927,7 @@
             DW = self.true_domain_width
             # This hurts, but is useful for periodicity. Probably should check first
             # if it is even needed for a given left/right
-            for i in range(3):
-                #pos[:,i] = np.mod(pos[:,i] - left[i],
-                #                  self.true_domain_width[i]) + left[i]
-                mask = pos[:,i] >= left[i] + DW[i]
-                pos[mask, i] -= DW[i]
-                mask = pos[:,i] < right[i] - DW[i]
-                pos[mask, i] += DW[i]
-                #del mask
+            _shift_periodic(pos, left, right, DW)
 
             # Now get all particles that are within the bbox
             mask = np.all(pos >= left, axis=1) * np.all(pos < right, axis=1)
@@ -945,6 +951,39 @@
 
             yield filtered
 
+    def filter_sphere(self, center, radius, myiter):
+        """
+        Filter data by masking out data outside of a sphere defined
+        by a center and radius. Account for periodicity of data, allowing
+        left/right to be outside of the domain.
+        """
+
+        # Get left/right for periodicity considerations
+        left = center - radius
+        right = center + radius
+        for data in myiter:
+            pos = np.array([data['x'].copy(), data['y'].copy(), data['z'].copy()]).T
+
+            DW = self.true_domain_width
+            _shift_periodic(pos, left, right, DW)
+
+            # Now get all particles that are within the sphere 
+            mask = ((pos-center)**2).sum(axis=1)**0.5 < radius
+
+            mylog.debug("Filtering particles, returning %i out of %i" % (mask.sum(), mask.shape[0]))
+
+            if not np.any(mask):
+                continue
+
+            filtered = {ax: pos[:, i][mask] for i, ax in enumerate('xyz')}
+            for f in data.keys():
+                if f in 'xyz':
+                    continue
+                filtered[f] = data[f][mask]
+
+            yield filtered
+
+
     def iter_filtered_bbox_fields(self, left, right, data,
                                   pos_fields, fields):
         """
@@ -964,11 +1003,7 @@
 
         # This hurts, but is useful for periodicity. Probably should check first
         # if it is even needed for a given left/right
-        for i in range(3):
-            mask = pos[:,i] >= DW[i] + left[i]
-            pos[mask, i] -= DW[i]
-            mask = pos[:,i] < right[i] - DW[i]
-            pos[mask, i] += DW[i]
+        _shift_periodic(pos, left, right, DW)
 
         mylog.debug("Periodic filtering, %s %s %s %s" % (left, right, pos.min(axis=0), pos.max(axis=0)))
         # Now get all particles that are within the bbox
@@ -987,6 +1022,10 @@
                 yield f, data[f][mask]
 
     def iter_bbox_data(self, left, right, fields):
+        """
+        Iterate over all data within a bounding box defined by a left
+        and a right.
+        """
         _ensure_xyz_fields(fields)
         mylog.debug('MIDX Loading region from %s to %s' %(left, right))
         inds = self.get_bbox(left, right)
@@ -1007,15 +1046,17 @@
         #    yield dd
 
     def iter_sphere_data(self, center, radius, fields):
+        """
+        Iterate over all data within some sphere defined by a center and
+        a radius.
+        """
         _ensure_xyz_fields(fields)
         mylog.debug('MIDX Loading spherical region %s to %s' %(center, radius))
         inds = self.get_bbox(center-radius, center+radius)
 
-        my_filter = sphere_filter(center, radius, self.true_domain_width)
-
-        for dd in self.filter_particles(
-            self.iter_data(inds, fields),
-            my_filter):
+        for dd in self.filter_sphere(
+            center, radius,
+            self.iter_data(inds, fields)):
             yield dd
 
     def iter_ibbox_data(self, left, right, fields):

diff -r b5034a8695c3d58a95eb577bbb6a67e7c3d2661d -r 95a477fcf9ac3d51b348404756f07ef466bb0998 yt/utilities/spatial/setup.py
--- a/yt/utilities/spatial/setup.py
+++ b/yt/utilities/spatial/setup.py
@@ -4,8 +4,8 @@
 
 def configuration(parent_package='', top_path=None):
     from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs
-    from numpy.distutils.system_info import get_info
-    from distutils.sysconfig import get_python_inc
+#    from numpy.distutils.system_info import get_info
+#    from distutils.sysconfig import get_python_inc
 
     config = Configuration('spatial', parent_package, top_path)
 

This diff is so big that we needed to truncate the remainder.

https://bitbucket.org/yt_analysis/yt/commits/6f0db65e21bc/
Changeset:   6f0db65e21bc
Branch:      yt
User:        brittonsmith
Date:        2014-09-16 23:09:04
Summary:     Merged in MatthewTurk/yt/yt-3.0 (pull request #1126)

Address some shortcomings of non-Cartesian plots
Affected #:  26 files

diff -r 66bbf028d950fca7b523c06712c4f155bfbc15a4 -r 6f0db65e21bcabd8283cadb52e30b6c1ed9a4afc doc/source/examining/Loading_Spherical_Data.ipynb
--- /dev/null
+++ b/doc/source/examining/Loading_Spherical_Data.ipynb
@@ -0,0 +1,188 @@
+{
+ "metadata": {
+  "name": "",
+  "signature": "sha256:88ed88ce8d8f4a359052f287aea17a7cbed435ff960e195097b440191ce6c2ab"
+ },
+ "nbformat": 3,
+ "nbformat_minor": 0,
+ "worksheets": [
+  {
+   "cells": [
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "# Loading Spherical Data\n",
+      "\n",
+      "With version 3.0 of yt, it has gained the ability to load data from non-Cartesian systems.  This support is still being extended, but here is an example of how to load spherical data from a regularly-spaced grid.  For irregularly spaced grids, a similar setup can be used, but the `load_hexahedral_mesh` method will have to be used instead.\n",
+      "\n",
+      "Note that in yt, \"spherical\" means that it is ordered $r$, $\\theta$, $\\phi$, where $\\theta$ is the declination from the azimuth (running from $0$ to $\\pi$ and $\\phi$ is the angle around the zenith (running from $0$ to $2\\pi$).\n",
+      "\n",
+      "We first start out by loading yt."
+     ]
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "import numpy as np\n",
+      "import yt"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "Now, we create a few derived fields.  The first three are just straight translations of the Cartesian coordinates, so that we can see where we are located in the data, and understand what we're seeing.  The final one is just a fun field that is some combination of the three coordinates, and will vary in all dimensions."
+     ]
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "@yt.derived_field(name = \"sphx\", units = \"cm\", take_log=False)\n",
+      "def sphx(field, data):\n",
+      "    return np.cos(data[\"phi\"]) * np.sin(data[\"theta\"])*data[\"r\"]\n",
+      "@yt.derived_field(name = \"sphy\", units = \"cm\", take_log=False)\n",
+      "def sphy(field, data):\n",
+      "    return np.sin(data[\"phi\"]) * np.sin(data[\"theta\"])*data[\"r\"]\n",
+      "@yt.derived_field(name = \"sphz\", units = \"cm\", take_log=False)\n",
+      "def sphz(field, data):\n",
+      "    return np.cos(data[\"theta\"])*data[\"r\"]\n",
+      "@yt.derived_field(name = \"funfield\", units=\"cm\", take_log=False)\n",
+      "def funfield(field, data):\n",
+      "    return (np.sin(data[\"phi\"])**2 + np.cos(data[\"theta\"])**2) * (1.0*data[\"r\"].uq+data[\"r\"])"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "## Loading Data\n",
+      "\n",
+      "Now we can actually load our data.  We use the `load_uniform_grid` function here.  Normally, the first argument would be a dictionary of field data, where the keys were the field names and the values the field data arrays.  Here, we're just going to look at derived fields, so we supply an empty one.\n",
+      "\n",
+      "The next few arguments are the number of dimensions, the bounds, and we then specify the geometry as spherical."
+     ]
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "ds = yt.load_uniform_grid({}, [128, 128, 128],\n",
+      "                          bbox=np.array([[0.0, 1.0], [0.0, np.pi], [0.0, 2*np.pi]]),\n",
+      "                          geometry=\"spherical\")"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "## Looking at Data\n",
+      "\n",
+      "Now we can take slices.  The first thing we will try is making a slice of data along the \"phi\" axis, here $\\pi/2$, which will be along the y axis in the positive direction.  We use the `.slice` attribute, which creates a slice, and then we convert this into a plot window.  Note that here 2 is used to indicate the third axis (0-indexed) which for spherical data is $\\phi$.\n",
+      "\n",
+      "This is the manual way of creating a plot -- below, we'll use the standard, automatic ways.  Note that the coordinates run from $-r$ to $r$ along the $z$ axis and from $0$ to $r$ along the $R$ axis.  We use the capital $R$ to indicate that it's the $R$ along the $x-y$ plane."
+     ]
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "s = ds.slice(2, np.pi/2)\n",
+      "p = s.to_pw(\"funfield\", origin=\"native\")\n",
+      "p.set_zlim(\"all\", 0.0, 4.0)\n",
+      "p.show()"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "We can also slice along $r$.  For now, this creates a regular grid with *incorrect* units for phi and theta.  We are currently exploring two other options -- a simple aitoff projection, and fixing it to use the correct units as-is."
+     ]
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "s = yt.SlicePlot(ds, \"r\", \"funfield\")\n",
+      "s.set_zlim(\"all\", 0.0, 4.0)\n",
+      "s.show()"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "We can also slice at constant $\\theta$.  But, this is a weird thing!  We're slicing at a constant declination from the azimuth.  What this means is that when thought of in a Cartesian domain, this slice is actually a cone.  The axes have been labeled appropriately, to indicate that these are not exactly the $x$ and $y$ axes, but instead differ by a factor of $\\sin(\\theta))$."
+     ]
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "s = yt.SlicePlot(ds, \"theta\", \"funfield\")\n",
+      "s.set_zlim(\"all\", 0.0, 4.0)\n",
+      "s.show()"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "We've seen lots of the `funfield` plots, but we can also look at the Cartesian axes.  This next plot plots the Cartesian $x$, $y$ and $z$ values on a $\\theta$ slice.  Because we're not supplying an argument to the `center` parameter, yt will place it at the center of the $\\theta$ axis, which will be at $\\pi/2$, where it will be aligned with the $x-y$ plane.  The slight change in `sphz` results from the cells themselves migrating, and plotting the center of those cells."
+     ]
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "s = yt.SlicePlot(ds, \"theta\", [\"sphx\", \"sphy\", \"sphz\"])\n",
+      "s.show()"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "We can do the same with the $\\phi$ axis."
+     ]
+    },
+    {
+     "cell_type": "code",
+     "collapsed": true,
+     "input": [
+      "s = yt.SlicePlot(ds, \"phi\", [\"sphx\", \"sphy\", \"sphz\"])\n",
+      "s.show()"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    }
+   ],
+   "metadata": {}
+  }
+ ]
+}
\ No newline at end of file

diff -r 66bbf028d950fca7b523c06712c4f155bfbc15a4 -r 6f0db65e21bcabd8283cadb52e30b6c1ed9a4afc doc/source/examining/index.rst
--- a/doc/source/examining/index.rst
+++ b/doc/source/examining/index.rst
@@ -9,4 +9,5 @@
    loading_data
    generic_array_data
    generic_particle_data
+   spherical_data
    low_level_inspection

diff -r 66bbf028d950fca7b523c06712c4f155bfbc15a4 -r 6f0db65e21bcabd8283cadb52e30b6c1ed9a4afc doc/source/examining/spherical_data.rst
--- /dev/null
+++ b/doc/source/examining/spherical_data.rst
@@ -0,0 +1,6 @@
+.. _loading-spherical-data:
+
+Loading Spherical Data
+======================
+
+.. notebook:: Loading_Spherical_Data.ipynb

diff -r 66bbf028d950fca7b523c06712c4f155bfbc15a4 -r 6f0db65e21bcabd8283cadb52e30b6c1ed9a4afc yt/data_objects/data_containers.py
--- a/yt/data_objects/data_containers.py
+++ b/yt/data_objects/data_containers.py
@@ -804,7 +804,8 @@
         skip += list(set(frb._exclude_fields).difference(set(self._key_fields)))
         self.fields = ensure_list(fields) + \
             [k for k in self.field_data if k not in skip]
-        (bounds, center) = get_window_parameters(axis, center, width, self.ds)
+        (bounds, center, display_center) = \
+            get_window_parameters(axis, center, width, self.ds)
         pw = PWViewerMPL(self, bounds, fields=self.fields, origin=origin,
                          frb_generator=frb, plot_type=plot_type)
         pw._setup_plots()

diff -r 66bbf028d950fca7b523c06712c4f155bfbc15a4 -r 6f0db65e21bcabd8283cadb52e30b6c1ed9a4afc yt/data_objects/static_output.py
--- a/yt/data_objects/static_output.py
+++ b/yt/data_objects/static_output.py
@@ -45,17 +45,12 @@
     YTArray, \
     YTQuantity
 
-from yt.geometry.cartesian_coordinates import \
-    CartesianCoordinateHandler
-from yt.geometry.polar_coordinates import \
-    PolarCoordinateHandler
-from yt.geometry.cylindrical_coordinates import \
-    CylindricalCoordinateHandler
-from yt.geometry.spherical_coordinates import \
-    SphericalCoordinateHandler
-from yt.geometry.geographic_coordinates import \
-    GeographicCoordinateHandler
-from yt.geometry.spec_cube_coordinates import \
+from yt.geometry.coordinates.api import \
+    CartesianCoordinateHandler, \
+    PolarCoordinateHandler, \
+    CylindricalCoordinateHandler, \
+    SphericalCoordinateHandler, \
+    GeographicCoordinateHandler, \
     SpectralCubeCoordinateHandler
 
 # We want to support the movie format in the future.

diff -r 66bbf028d950fca7b523c06712c4f155bfbc15a4 -r 6f0db65e21bcabd8283cadb52e30b6c1ed9a4afc yt/funcs.py
--- a/yt/funcs.py
+++ b/yt/funcs.py
@@ -758,3 +758,9 @@
     if os.path.isfile(_fn):
         mylog.info("Loading plugins from %s", _fn)
         execfile(_fn, yt.__dict__)
+
+def fix_unitary(u):
+    if u == '1':
+        return 'unitary'
+    else:
+        return u

diff -r 66bbf028d950fca7b523c06712c4f155bfbc15a4 -r 6f0db65e21bcabd8283cadb52e30b6c1ed9a4afc yt/geometry/cartesian_coordinates.py
--- a/yt/geometry/cartesian_coordinates.py
+++ /dev/null
@@ -1,122 +0,0 @@
-"""
-Cartesian fields
-
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2013, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-import numpy as np
-from .coordinate_handler import \
-    CoordinateHandler, \
-    _unknown_coord, \
-    _get_coord_fields
-import yt.visualization._MPL as _MPL
-
-class CartesianCoordinateHandler(CoordinateHandler):
-
-    def __init__(self, ds):
-        super(CartesianCoordinateHandler, self).__init__(ds)
-
-    def setup_fields(self, registry):
-        for axi, ax in enumerate('xyz'):
-            f1, f2 = _get_coord_fields(axi)
-            registry.add_field(("index", "d%s" % ax), function = f1,
-                               display_field = False,
-                               units = "code_length")
-            registry.add_field(("index", "%s" % ax), function = f2,
-                               display_field = False,
-                               units = "code_length")
-        def _cell_volume(field, data):
-            rv  = data["index", "dx"].copy(order='K')
-            rv *= data["index", "dy"]
-            rv *= data["index", "dz"]
-            return rv
-        registry.add_field(("index", "cell_volume"), function=_cell_volume,
-                           display_field=False, units = "code_length**3")
-        registry.check_derived_fields(
-            [("index", "dx"), ("index", "dy"), ("index", "dz"),
-             ("index", "x"), ("index", "y"), ("index", "z"),
-             ("index", "cell_volume")])
-
-    def pixelize(self, dimension, data_source, field, bounds, size,
-                 antialias = True, periodic = True):
-        if dimension < 3:
-            return self._ortho_pixelize(data_source, field, bounds, size,
-                                        antialias, dimension, periodic)
-        else:
-            return self._oblique_pixelize(data_source, field, bounds, size,
-                                          antialias)
-
-    def _ortho_pixelize(self, data_source, field, bounds, size, antialias,
-                        dim, periodic):
-        # We should be using fcoords
-        period = self.period[:2].copy() # dummy here
-        period[0] = self.period[self.x_axis[dim]]
-        period[1] = self.period[self.y_axis[dim]]
-        if hasattr(period, 'in_units'):
-            period = period.in_units("code_length").d
-        buff = _MPL.Pixelize(data_source['px'], data_source['py'],
-                             data_source['pdx'], data_source['pdy'],
-                             data_source[field], size[0], size[1],
-                             bounds, int(antialias),
-                             period, int(periodic)).transpose()
-        return buff
-
-    def _oblique_pixelize(self, data_source, field, bounds, size, antialias):
-        indices = np.argsort(data_source['dx'])[::-1]
-        buff = _MPL.CPixelize(data_source['x'], data_source['y'],
-                              data_source['z'], data_source['px'],
-                              data_source['py'], data_source['pdx'],
-                              data_source['pdy'], data_source['pdz'],
-                              data_source.center, data_source._inv_mat, indices,
-                              data_source[field], size[0], size[1], bounds).transpose()
-        return buff
-
-    def convert_from_cartesian(self, coord):
-        return coord
-
-    def convert_to_cartesian(self, coord):
-        return coord
-
-    def convert_to_cylindrical(self, coord):
-        center = self.ds.domain_center
-        return cartesian_to_cylindrical(coord, center)
-
-    def convert_from_cylindrical(self, coord):
-        center = self.ds.domain_center
-        return cylindrical_to_cartesian(coord, center)
-
-    def convert_to_spherical(self, coord):
-        raise NotImplementedError
-
-    def convert_from_spherical(self, coord):
-        raise NotImplementedError
-
-    # Despite being mutables, we uses these here to be clear about how these
-    # are generated and to ensure that they are not re-generated unnecessarily
-    axis_name = { 0  : 'x',  1  : 'y',  2  : 'z',
-                 'x' : 'x', 'y' : 'y', 'z' : 'z',
-                 'X' : 'x', 'Y' : 'y', 'Z' : 'z'}
-
-    axis_id = { 'x' : 0, 'y' : 1, 'z' : 2,
-                 0  : 0,  1  : 1,  2  : 2}
-
-    x_axis = { 'x' : 1, 'y' : 2, 'z' : 0,
-                0  : 1,  1  : 2,  2  : 0}
-
-    y_axis = { 'x' : 2, 'y' : 0, 'z' : 1,
-                0  : 2,  1  : 0,  2  : 1}
-
-    @property
-    def period(self):
-        return self.ds.domain_width
-

diff -r 66bbf028d950fca7b523c06712c4f155bfbc15a4 -r 6f0db65e21bcabd8283cadb52e30b6c1ed9a4afc yt/geometry/coordinate_handler.py
--- a/yt/geometry/coordinate_handler.py
+++ /dev/null
@@ -1,116 +0,0 @@
-"""
-Coordinate handler base class.
-
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2013, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-import numpy as np
-import abc
-import weakref
-
-from yt.funcs import *
-from yt.fields.field_info_container import \
-    NullFunc, FieldInfoContainer
-from yt.utilities.io_handler import io_registry
-from yt.utilities.logger import ytLogger as mylog
-from yt.utilities.parallel_tools.parallel_analysis_interface import \
-    ParallelAnalysisInterface
-from yt.utilities.lib.misc_utilities import \
-    pixelize_cylinder
-import yt.visualization._MPL as _MPL
-
-def _unknown_coord(field, data):
-    raise YTCoordinateNotImplemented
-
-def _get_coord_fields(axi, units = "code_length"):
-    def _dds(field, data):
-        rv = data.ds.arr(data.fwidth[...,axi].copy(), units)
-        return data._reshape_vals(rv)
-    def _coords(field, data):
-        rv = data.ds.arr(data.fcoords[...,axi].copy(), units)
-        return data._reshape_vals(rv)
-    return _dds, _coords
-
-class CoordinateHandler(object):
-    
-    def __init__(self, ds):
-        self.ds = weakref.proxy(ds)
-
-    def setup_fields(self):
-        # This should return field definitions for x, y, z, r, theta, phi
-        raise NotImplementedError
-
-    def pixelize(self, dimension, data_source, field, bounds, size, antialias = True):
-        # This should *actually* be a pixelize call, not just returning the
-        # pixelizer
-        raise NotImplementedError
-
-    def distance(self, start, end):
-        p1 = self.convert_to_cartesian(start)
-        p2 = self.convert_to_cartesian(end)
-        return np.sqrt(((p1-p2)**2.0).sum())
-
-    def convert_from_cartesian(self, coord):
-        raise NotImplementedError
-
-    def convert_to_cartesian(self, coord):
-        raise NotImplementedError
-
-    def convert_to_cylindrical(self, coord):
-        raise NotImplementedError
-
-    def convert_from_cylindrical(self, coord):
-        raise NotImplementedError
-
-    def convert_to_spherical(self, coord):
-        raise NotImplementedError
-
-    def convert_from_spherical(self, coord):
-        raise NotImplementedError
-
-    @property
-    def axis_name(self):
-        raise NotImplementedError
-
-    @property
-    def axis_id(self):
-        raise NotImplementedError
-
-    @property
-    def x_axis(self):
-        raise NotImplementedError
-
-    @property
-    def y_axis(self):
-        raise NotImplementedError
-
-    @property
-    def period(self):
-        raise NotImplementedError
-
-def cartesian_to_cylindrical(coord, center = (0,0,0)):
-    c2 = np.zeros_like(coord)
-    c2[...,0] = ((coord[...,0] - center[0])**2.0
-              +  (coord[...,1] - center[1])**2.0)**0.5
-    c2[...,1] = coord[...,2] # rzt
-    c2[...,2] = np.arctan2(coord[...,1] - center[1],
-                           coord[...,0] - center[0])
-    return c2
-
-def cylindrical_to_cartesian(coord, center = (0,0,0)):
-    c2 = np.zeros_like(coord)
-    c2[...,0] = np.cos(coord[...,0]) * coord[...,1] + center[0]
-    c2[...,1] = np.sin(coord[...,0]) * coord[...,1] + center[1]
-    c2[...,2] = coord[...,2]
-    return c2
-

diff -r 66bbf028d950fca7b523c06712c4f155bfbc15a4 -r 6f0db65e21bcabd8283cadb52e30b6c1ed9a4afc yt/geometry/coordinates/api.py
--- /dev/null
+++ b/yt/geometry/coordinates/api.py
@@ -0,0 +1,29 @@
+"""
+API for coordinate handlers
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2014, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from .coordinate_handler import \
+    CoordinateHandler
+
+from .cartesian_coordinates import \
+    CartesianCoordinateHandler
+from .polar_coordinates import \
+    PolarCoordinateHandler
+from .cylindrical_coordinates import \
+    CylindricalCoordinateHandler
+from .spherical_coordinates import \
+    SphericalCoordinateHandler
+from .geographic_coordinates import \
+    GeographicCoordinateHandler
+from .spec_cube_coordinates import \
+    SpectralCubeCoordinateHandler
+

diff -r 66bbf028d950fca7b523c06712c4f155bfbc15a4 -r 6f0db65e21bcabd8283cadb52e30b6c1ed9a4afc yt/geometry/coordinates/cartesian_coordinates.py
--- /dev/null
+++ b/yt/geometry/coordinates/cartesian_coordinates.py
@@ -0,0 +1,122 @@
+"""
+Cartesian fields
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import numpy as np
+from .coordinate_handler import \
+    CoordinateHandler, \
+    _unknown_coord, \
+    _get_coord_fields
+import yt.visualization._MPL as _MPL
+
+class CartesianCoordinateHandler(CoordinateHandler):
+
+    def __init__(self, ds):
+        super(CartesianCoordinateHandler, self).__init__(ds)
+
+    def setup_fields(self, registry):
+        for axi, ax in enumerate('xyz'):
+            f1, f2 = _get_coord_fields(axi)
+            registry.add_field(("index", "d%s" % ax), function = f1,
+                               display_field = False,
+                               units = "code_length")
+            registry.add_field(("index", "%s" % ax), function = f2,
+                               display_field = False,
+                               units = "code_length")
+        def _cell_volume(field, data):
+            rv  = data["index", "dx"].copy(order='K')
+            rv *= data["index", "dy"]
+            rv *= data["index", "dz"]
+            return rv
+        registry.add_field(("index", "cell_volume"), function=_cell_volume,
+                           display_field=False, units = "code_length**3")
+        registry.check_derived_fields(
+            [("index", "dx"), ("index", "dy"), ("index", "dz"),
+             ("index", "x"), ("index", "y"), ("index", "z"),
+             ("index", "cell_volume")])
+
+    def pixelize(self, dimension, data_source, field, bounds, size,
+                 antialias = True, periodic = True):
+        if dimension < 3:
+            return self._ortho_pixelize(data_source, field, bounds, size,
+                                        antialias, dimension, periodic)
+        else:
+            return self._oblique_pixelize(data_source, field, bounds, size,
+                                          antialias)
+
+    def _ortho_pixelize(self, data_source, field, bounds, size, antialias,
+                        dim, periodic):
+        # We should be using fcoords
+        period = self.period[:2].copy() # dummy here
+        period[0] = self.period[self.x_axis[dim]]
+        period[1] = self.period[self.y_axis[dim]]
+        if hasattr(period, 'in_units'):
+            period = period.in_units("code_length").d
+        buff = _MPL.Pixelize(data_source['px'], data_source['py'],
+                             data_source['pdx'], data_source['pdy'],
+                             data_source[field], size[0], size[1],
+                             bounds, int(antialias),
+                             period, int(periodic)).transpose()
+        return buff
+
+    def _oblique_pixelize(self, data_source, field, bounds, size, antialias):
+        indices = np.argsort(data_source['dx'])[::-1]
+        buff = _MPL.CPixelize(data_source['x'], data_source['y'],
+                              data_source['z'], data_source['px'],
+                              data_source['py'], data_source['pdx'],
+                              data_source['pdy'], data_source['pdz'],
+                              data_source.center, data_source._inv_mat, indices,
+                              data_source[field], size[0], size[1], bounds).transpose()
+        return buff
+
+    def convert_from_cartesian(self, coord):
+        return coord
+
+    def convert_to_cartesian(self, coord):
+        return coord
+
+    def convert_to_cylindrical(self, coord):
+        center = self.ds.domain_center
+        return cartesian_to_cylindrical(coord, center)
+
+    def convert_from_cylindrical(self, coord):
+        center = self.ds.domain_center
+        return cylindrical_to_cartesian(coord, center)
+
+    def convert_to_spherical(self, coord):
+        raise NotImplementedError
+
+    def convert_from_spherical(self, coord):
+        raise NotImplementedError
+
+    # Despite being mutables, we uses these here to be clear about how these
+    # are generated and to ensure that they are not re-generated unnecessarily
+    axis_name = { 0  : 'x',  1  : 'y',  2  : 'z',
+                 'x' : 'x', 'y' : 'y', 'z' : 'z',
+                 'X' : 'x', 'Y' : 'y', 'Z' : 'z'}
+
+    axis_id = { 'x' : 0, 'y' : 1, 'z' : 2,
+                 0  : 0,  1  : 1,  2  : 2}
+
+    x_axis = { 'x' : 1, 'y' : 2, 'z' : 0,
+                0  : 1,  1  : 2,  2  : 0}
+
+    y_axis = { 'x' : 2, 'y' : 0, 'z' : 1,
+                0  : 2,  1  : 0,  2  : 1}
+
+    @property
+    def period(self):
+        return self.ds.domain_width
+

diff -r 66bbf028d950fca7b523c06712c4f155bfbc15a4 -r 6f0db65e21bcabd8283cadb52e30b6c1ed9a4afc yt/geometry/coordinates/coordinate_handler.py
--- /dev/null
+++ b/yt/geometry/coordinates/coordinate_handler.py
@@ -0,0 +1,212 @@
+"""
+Coordinate handler base class.
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import numpy as np
+import abc
+import weakref
+from numbers import Number
+
+from yt.funcs import *
+from yt.fields.field_info_container import \
+    NullFunc, FieldInfoContainer
+from yt.utilities.io_handler import io_registry
+from yt.utilities.logger import ytLogger as mylog
+from yt.utilities.parallel_tools.parallel_analysis_interface import \
+    ParallelAnalysisInterface
+from yt.utilities.lib.misc_utilities import \
+    pixelize_cylinder
+import yt.visualization._MPL as _MPL
+from yt.units.yt_array import \
+    YTArray, YTQuantity
+
+def _unknown_coord(field, data):
+    raise YTCoordinateNotImplemented
+
+def _get_coord_fields(axi, units = "code_length"):
+    def _dds(field, data):
+        rv = data.ds.arr(data.fwidth[...,axi].copy(), units)
+        return data._reshape_vals(rv)
+    def _coords(field, data):
+        rv = data.ds.arr(data.fcoords[...,axi].copy(), units)
+        return data._reshape_vals(rv)
+    return _dds, _coords
+
+def validate_iterable_width(width, ds, unit=None):
+    if isinstance(width[0], tuple) and isinstance(width[1], tuple):
+        validate_width_tuple(width[0])
+        validate_width_tuple(width[1])
+        return (ds.quan(width[0][0], fix_unitary(width[0][1])),
+                ds.quan(width[1][0], fix_unitary(width[1][1])))
+    elif isinstance(width[0], Number) and isinstance(width[1], Number):
+        return (ds.quan(width[0], 'code_length'),
+                ds.quan(width[1], 'code_length'))
+    elif isinstance(width[0], YTQuantity) and isinstance(width[1], YTQuantity):
+        return (ds.quan(width[0]), ds.quan(width[1]))
+    else:
+        validate_width_tuple(width)
+        # If width and unit are both valid width tuples, we
+        # assume width controls x and unit controls y
+        try:
+            validate_width_tuple(unit)
+            return (ds.quan(width[0], fix_unitary(width[1])),
+                    ds.quan(unit[0], fix_unitary(unit[1])))
+        except YTInvalidWidthError:
+            return (ds.quan(width[0], fix_unitary(width[1])),
+                    ds.quan(width[0], fix_unitary(width[1])))
+
+class CoordinateHandler(object):
+    
+    def __init__(self, ds):
+        self.ds = weakref.proxy(ds)
+
+    def setup_fields(self):
+        # This should return field definitions for x, y, z, r, theta, phi
+        raise NotImplementedError
+
+    def pixelize(self, dimension, data_source, field, bounds, size, antialias = True):
+        # This should *actually* be a pixelize call, not just returning the
+        # pixelizer
+        raise NotImplementedError
+
+    def distance(self, start, end):
+        p1 = self.convert_to_cartesian(start)
+        p2 = self.convert_to_cartesian(end)
+        return np.sqrt(((p1-p2)**2.0).sum())
+
+    def convert_from_cartesian(self, coord):
+        raise NotImplementedError
+
+    def convert_to_cartesian(self, coord):
+        raise NotImplementedError
+
+    def convert_to_cylindrical(self, coord):
+        raise NotImplementedError
+
+    def convert_from_cylindrical(self, coord):
+        raise NotImplementedError
+
+    def convert_to_spherical(self, coord):
+        raise NotImplementedError
+
+    def convert_from_spherical(self, coord):
+        raise NotImplementedError
+
+    @property
+    def axis_name(self):
+        raise NotImplementedError
+
+    @property
+    def image_axis_name(self):
+        # Default
+        rv = {}
+        for i in range(3):
+            rv[i] = (self.axis_name[self.x_axis[i]],
+                     self.axis_name[self.y_axis[i]])
+            rv[self.axis_name[i]] = rv[i]
+            rv[self.axis_name[i].upper()] = rv[i]
+        return rv
+
+    @property
+    def axis_id(self):
+        raise NotImplementedError
+
+    @property
+    def x_axis(self):
+        raise NotImplementedError
+
+    @property
+    def y_axis(self):
+        raise NotImplementedError
+
+    @property
+    def period(self):
+        raise NotImplementedError
+
+    def sanitize_width(self, axis, width, depth):
+        if width is None:
+            # Default to code units
+            if not iterable(axis):
+                xax = self.x_axis[axis]
+                yax = self.y_axis[axis]
+                w = self.ds.domain_width[[xax, yax]]
+            else:
+                # axis is actually the normal vector
+                # for an off-axis data object.
+                mi = np.argmin(self.ds.domain_width)
+                w = self.ds.domain_width[[mi,mi]]
+            width = (w[0], w[1])
+        elif iterable(width):
+            width = validate_iterable_width(width, self.ds)
+        elif isinstance(width, YTQuantity):
+            width = (width, width)
+        elif isinstance(width, Number):
+            width = (self.ds.quan(width, 'code_length'),
+                     self.ds.quan(width, 'code_length'))
+        else:
+            raise YTInvalidWidthError(width)
+        if depth is not None:
+            if iterable(depth):
+                validate_width_tuple(depth)
+                depth = (self.ds.quan(depth[0], fix_unitary(depth[1])), )
+            elif isinstance(depth, Number):
+                depth = (self.ds.quan(depth, 'code_length',
+                         registry = self.ds.unit_registry), )
+            elif isinstance(depth, YTQuantity):
+                depth = (depth, )
+            else:
+                raise YTInvalidWidthError(depth)
+            return width + depth
+        return width
+
+    def sanitize_center(self, center, axis):
+        if isinstance(center, basestring):
+            if center.lower() == "m" or center.lower() == "max":
+                v, center = self.ds.find_max(("gas", "density"))
+                center = self.ds.arr(center, 'code_length')
+            elif center.lower() == "c" or center.lower() == "center":
+                center = (self.ds.domain_left_edge + self.ds.domain_right_edge) / 2
+            else:
+                raise RuntimeError('center keyword \"%s\" not recognized' % center)
+        elif isinstance(center, YTArray):
+            return self.ds.arr(center), self.convert_to_cartesian(center)
+        elif iterable(center):
+            if iterable(center[0]) and isinstance(center[1], basestring):
+                center = self.ds.arr(center[0], center[1])
+            else:
+                center = self.ds.arr(center, 'code_length')
+        else:
+            raise RuntimeError("center keyword \"%s\" not recognized" % center)
+        # This has to return both a center and a display_center
+        display_center = self.convert_to_cartesian(center)
+        return center, display_center
+
+
+def cartesian_to_cylindrical(coord, center = (0,0,0)):
+    c2 = np.zeros_like(coord)
+    c2[...,0] = ((coord[...,0] - center[0])**2.0
+              +  (coord[...,1] - center[1])**2.0)**0.5
+    c2[...,1] = coord[...,2] # rzt
+    c2[...,2] = np.arctan2(coord[...,1] - center[1],
+                           coord[...,0] - center[0])
+    return c2
+
+def cylindrical_to_cartesian(coord, center = (0,0,0)):
+    c2 = np.zeros_like(coord)
+    c2[...,0] = np.cos(coord[...,0]) * coord[...,1] + center[0]
+    c2[...,1] = np.sin(coord[...,0]) * coord[...,1] + center[1]
+    c2[...,2] = coord[...,2]
+    return c2
+

diff -r 66bbf028d950fca7b523c06712c4f155bfbc15a4 -r 6f0db65e21bcabd8283cadb52e30b6c1ed9a4afc yt/geometry/coordinates/cylindrical_coordinates.py
--- /dev/null
+++ b/yt/geometry/coordinates/cylindrical_coordinates.py
@@ -0,0 +1,162 @@
+"""
+Cylindrical fields
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import numpy as np
+from yt.units.yt_array import YTArray
+from .coordinate_handler import \
+    CoordinateHandler, \
+    _unknown_coord, \
+    _get_coord_fields
+import yt.visualization._MPL as _MPL
+from yt.utilities.lib.misc_utilities import \
+    pixelize_cylinder
+#
+# Cylindrical fields
+#
+
+class CylindricalCoordinateHandler(CoordinateHandler):
+
+    def __init__(self, ds, ordering = 'rzt'):
+        if ordering != 'rzt': raise NotImplementedError
+        super(CylindricalCoordinateHandler, self).__init__(ds)
+
+    def setup_fields(self, registry):
+        # return the fields for r, z, theta
+        registry.add_field(("index", "dx"), function=_unknown_coord)
+        registry.add_field(("index", "dy"), function=_unknown_coord)
+        registry.add_field(("index", "x"), function=_unknown_coord)
+        registry.add_field(("index", "y"), function=_unknown_coord)
+        f1, f2 = _get_coord_fields(0)
+        registry.add_field(("index", "dr"), function = f1,
+                           display_field = False,
+                           units = "code_length")
+        registry.add_field(("index", "r"), function = f2,
+                           display_field = False,
+                           units = "code_length")
+
+        f1, f2 = _get_coord_fields(1)
+        registry.add_field(("index", "dz"), function = f1,
+                           display_field = False,
+                           units = "code_length")
+        registry.add_field(("index", "z"), function = f2,
+                           display_field = False,
+                           units = "code_length")
+
+        f1, f2 = _get_coord_fields(2, "")
+        registry.add_field(("index", "dtheta"), function = f1,
+                           display_field = False,
+                           units = "")
+        registry.add_field(("index", "theta"), function = f2,
+                           display_field = False,
+                           units = "")
+
+        def _CylindricalVolume(field, data):
+            return data["index", "dtheta"] \
+                 * data["index", "r"] \
+                 * data["index", "dr"] \
+                 * data["index", "dz"]
+        registry.add_field(("index", "cell_volume"),
+                 function=_CylindricalVolume,
+                 units = "code_length**3")
+
+
+    def pixelize(self, dimension, data_source, field, bounds, size,
+                 antialias = True, periodic = True):
+        ax_name = self.axis_name[dimension]
+        if ax_name in ('r', 'theta'):
+            return self._ortho_pixelize(data_source, field, bounds, size,
+                                        antialias, dimension, periodic)
+        elif ax_name == "z":
+            return self._cyl_pixelize(data_source, field, bounds, size,
+                                        antialias)
+        else:
+            # Pixelizing along a cylindrical surface is a bit tricky
+            raise NotImplementedError
+
+    def _ortho_pixelize(self, data_source, field, bounds, size, antialias,
+                        dim, periodic):
+        period = self.period[:2].copy() # dummy here
+        period[0] = self.period[self.x_axis[dim]]
+        period[1] = self.period[self.y_axis[dim]]
+        if hasattr(period, 'in_units'):
+            period = period.in_units("code_length").d
+        buff = _MPL.Pixelize(data_source['px'], data_source['py'],
+                             data_source['pdx'], data_source['pdy'],
+                             data_source[field], size[0], size[1],
+                             bounds, int(antialias),
+                             period, int(periodic)).transpose()
+        return buff
+
+    def _cyl_pixelize(self, data_source, field, bounds, size, antialias):
+        buff = pixelize_cylinder(data_source['r'],
+                                 data_source['dr'],
+                                 data_source['theta'],
+                                 data_source['dtheta']/2.0, # half-widths
+                                 size, data_source[field], bounds)
+        return buff
+
+    axis_name = { 0  : 'r',  1  : 'z',  2  : 'theta',
+                 'r' : 'r', 'z' : 'z', 'theta' : 'theta',
+                 'R' : 'r', 'Z' : 'z', 'Theta' : 'theta'}
+
+    axis_id = { 'r' : 0, 'z' : 1, 'theta' : 2,
+                 0  : 0,  1  : 1,  2  : 2}
+
+    x_axis = { 'r' : 1, 'z' : 0, 'theta' : 0,
+                0  : 1,  1  : 0,  2  : 0}
+
+    y_axis = { 'r' : 2, 'z' : 2, 'theta' : 1,
+                0  : 2,  1  : 2,  2  : 1}
+
+    _image_axis_name = None
+
+    @property
+    def image_axis_name(self):    
+        if self._image_axis_name is not None:
+            return self._image_axis_name
+        # This is the x and y axes labels that get displayed.  For
+        # non-Cartesian coordinates, we usually want to override these for
+        # Cartesian coordinates, since we transform them.
+        rv = {0: ('z', 'theta'),
+              1: ('x', 'y'),
+              2: ('r', 'z')}
+        for i in rv.keys():
+            rv[self.axis_name[i]] = rv[i]
+            rv[self.axis_name[i].upper()] = rv[i]
+        self._image_axis_name = rv
+        return rv
+
+    def convert_from_cartesian(self, coord):
+        return cartesian_to_cylindrical(coord)
+
+    def convert_to_cartesian(self, coord):
+        return cylindrical_to_cartesian(coord)
+
+    def convert_to_cylindrical(self, coord):
+        return coord
+
+    def convert_from_cylindrical(self, coord):
+        return coord
+
+    def convert_to_spherical(self, coord):
+        raise NotImplementedError
+
+    def convert_from_spherical(self, coord):
+        raise NotImplementedError
+
+    @property
+    def period(self):
+        return np.array([0.0, 0.0, 2.0*np.pi])

diff -r 66bbf028d950fca7b523c06712c4f155bfbc15a4 -r 6f0db65e21bcabd8283cadb52e30b6c1ed9a4afc yt/geometry/coordinates/geographic_coordinates.py
--- /dev/null
+++ b/yt/geometry/coordinates/geographic_coordinates.py
@@ -0,0 +1,194 @@
+"""
+Geographic fields
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import numpy as np
+from .coordinate_handler import \
+    CoordinateHandler, \
+    _unknown_coord, \
+    _get_coord_fields
+import yt.visualization._MPL as _MPL
+from yt.utilities.lib.misc_utilities import \
+    pixelize_cylinder, pixelize_aitoff
+
+class GeographicCoordinateHandler(CoordinateHandler):
+
+    def __init__(self, ds, ordering = 'latlonalt'):
+        if ordering != 'latlonalt': raise NotImplementedError
+        super(GeographicCoordinateHandler, self).__init__(ds)
+
+    def setup_fields(self, registry):
+        # return the fields for r, z, theta
+        registry.add_field(("index", "dx"), function=_unknown_coord)
+        registry.add_field(("index", "dy"), function=_unknown_coord)
+        registry.add_field(("index", "dz"), function=_unknown_coord)
+        registry.add_field(("index", "x"), function=_unknown_coord)
+        registry.add_field(("index", "y"), function=_unknown_coord)
+        registry.add_field(("index", "z"), function=_unknown_coord)
+        f1, f2 = _get_coord_fields(0, "")
+        registry.add_field(("index", "dlatitude"), function = f1,
+                           display_field = False,
+                           units = "")
+        registry.add_field(("index", "latitude"), function = f2,
+                           display_field = False,
+                           units = "")
+
+        f1, f2 = _get_coord_fields(1, "")
+        registry.add_field(("index", "dlongitude"), function = f1,
+                           display_field = False,
+                           units = "")
+        registry.add_field(("index", "longitude"), function = f2,
+                           display_field = False,
+                           units = "")
+
+        f1, f2 = _get_coord_fields(2)
+        registry.add_field(("index", "daltitude"), function = f1,
+                           display_field = False,
+                           units = "code_length")
+        registry.add_field(("index", "altitude"), function = f2,
+                           display_field = False,
+                           units = "code_length")
+
+        def _SphericalVolume(field, data):
+            # r**2 sin theta dr dtheta dphi
+            # We can use the transformed coordinates here.
+            vol = data["index", "r"]**2.0
+            vol *= data["index", "dr"]
+            vol *= np.sin(data["index", "theta"])
+            vol *= data["index", "dtheta"]
+            vol *= data["index", "dphi"]
+            return vol
+        registry.add_field(("index", "cell_volume"),
+                 function=_SphericalVolume,
+                 units = "code_length**3")
+
+        # Altitude is the radius from the central zone minus the radius of the
+        # surface.
+        def _altitude_to_radius(field, data):
+            surface_height = data.get_field_parameter("surface_height")
+            if surface_height is None:
+                surface_height = getattr(data.ds, "surface_height", 0.0)
+            return data["altitude"] + surface_height
+        registry.add_field(("index", "r"),
+                 function=_altitude_to_radius,
+                 units = "code_length")
+        registry.alias(("index", "dr"), ("index", "daltitude"))
+
+        def _longitude_to_theta(field, data):
+            # longitude runs from -180 to 180.
+            return (data["longitude"] + 180) * np.pi/180.0
+        registry.add_field(("index", "theta"),
+                 function = _longitude_to_theta,
+                 units = "")
+        def _dlongitude_to_dtheta(field, data):
+            return data["dlongitude"] * np.pi/180.0
+        registry.add_field(("index", "dtheta"),
+                 function = _dlongitude_to_dtheta,
+                 units = "")
+
+        def _latitude_to_phi(field, data):
+            # latitude runs from -90 to 90
+            return (data["latitude"] + 90) * np.pi/180.0
+        registry.add_field(("index", "phi"),
+                 function = _latitude_to_phi,
+                 units = "")
+        def _dlatitude_to_dphi(field, data):
+            return data["dlatitude"] * np.pi/180.0
+        registry.add_field(("index", "dphi"),
+                 function = _dlatitude_to_dphi,
+                 units = "")
+
+    def pixelize(self, dimension, data_source, field, bounds, size,
+                 antialias = True, periodic = True):
+        if dimension in (0, 1):
+            return self._cyl_pixelize(data_source, field, bounds, size,
+                                          antialias, dimension)
+        elif dimension == 2:
+            return self._ortho_pixelize(data_source, field, bounds, size,
+                                        antialias, dimension, periodic)
+        else:
+            raise NotImplementedError
+
+    def _ortho_pixelize(self, data_source, field, bounds, size, antialias,
+                        dim, periodic):
+        buff = pixelize_aitoff(data_source["theta"], data_source["dtheta"]/2.0,
+                               data_source["phi"], data_source["dphi"]/2.0,
+                               size, data_source[field], None,
+                               None).transpose()
+        return buff
+
+    def _cyl_pixelize(self, data_source, field, bounds, size, antialias,
+                      dimension):
+        if dimension == 0:
+            buff = pixelize_cylinder(data_source['r'],
+                                     data_source['dr'] / 2.0,
+                                     data_source['theta'],
+                                     data_source['dtheta'] / 2.0, # half-widths
+                                     size, data_source[field], bounds)
+        elif dimension == 1:
+            buff = pixelize_cylinder(data_source['r'],
+                                     data_source['dr'] / 2.0,
+                                     data_source['phi'],
+                                     data_source['dphi'] / 2.0, # half-widths
+                                     size, data_source[field], bounds)
+        else:
+            raise RuntimeError
+        return buff
+
+
+    def convert_from_cartesian(self, coord):
+        raise NotImplementedError
+
+    def convert_to_cartesian(self, coord):
+        raise NotImplementedError
+
+    def convert_to_cylindrical(self, coord):
+        raise NotImplementedError
+
+    def convert_from_cylindrical(self, coord):
+        raise NotImplementedError
+
+    def convert_to_spherical(self, coord):
+        raise NotImplementedError
+
+    def convert_from_spherical(self, coord):
+        raise NotImplementedError
+
+    # Despite being mutables, we uses these here to be clear about how these
+    # are generated and to ensure that they are not re-generated unnecessarily
+    axis_name = { 0  : 'latitude',  1  : 'longitude',  2  : 'altitude',
+                 'latitude' : 'latitude',
+                 'longitude' : 'longitude', 
+                 'altitude' : 'altitude',
+                 'Latitude' : 'latitude',
+                 'Longitude' : 'longitude', 
+                 'Altitude' : 'altitude',
+                 'lat' : 'latitude',
+                 'lon' : 'longitude', 
+                 'alt' : 'altitude' }
+
+    axis_id = { 'latitude' : 0, 'longitude' : 1, 'altitude' : 2,
+                 0  : 0,  1  : 1,  2  : 2}
+
+    x_axis = { 'latitude' : 1, 'longitude' : 0, 'altitude' : 0,
+                0  : 1,  1  : 0,  2  : 0}
+
+    y_axis = { 'latitude' : 2, 'longitude' : 2, 'altitude' : 1,
+                0  : 2,  1  : 2,  2  : 1}
+
+    @property
+    def period(self):
+        return self.ds.domain_width
+

diff -r 66bbf028d950fca7b523c06712c4f155bfbc15a4 -r 6f0db65e21bcabd8283cadb52e30b6c1ed9a4afc yt/geometry/coordinates/polar_coordinates.py
--- /dev/null
+++ b/yt/geometry/coordinates/polar_coordinates.py
@@ -0,0 +1,149 @@
+"""
+Polar fields
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import numpy as np
+from .coordinate_handler import \
+    CoordinateHandler, \
+    _unknown_coord, \
+    _get_coord_fields
+from yt.utilities.lib.misc_utilities import \
+    pixelize_cylinder
+
+class PolarCoordinateHandler(CoordinateHandler):
+
+    def __init__(self, ds, ordering = 'rtz'):
+        if ordering != 'rtz': raise NotImplementedError
+        super(PolarCoordinateHandler, self).__init__(ds)
+
+    def setup_fields(self, registry):
+        # return the fields for r, z, theta
+        registry.add_field("dx", function=_unknown_coord)
+        registry.add_field("dy", function=_unknown_coord)
+        registry.add_field("x", function=_unknown_coord)
+        registry.add_field("y", function=_unknown_coord)
+
+        f1, f2 = _get_coord_fields(0)
+        registry.add_field(("index", "dr"), function = f1,
+                           display_field = False,
+                           units = "code_length")
+        registry.add_field(("index", "r"), function = f2,
+                           display_field = False,
+                           units = "code_length")
+
+        f1, f2 = _get_coord_fields(1, "")
+        registry.add_field(("index", "dtheta"), function = f1,
+                           display_field = False,
+                           units = "")
+        registry.add_field(("index", "theta"), function = f2,
+                           display_field = False,
+                           units = "")
+
+        f1, f2 = _get_coord_fields(2) 
+        registry.add_field(("index", "dz"), function = f1,
+                           display_field = False,
+                           units = "code_length")
+        registry.add_field(("index", "z"), function = f2,
+                           display_field = False,
+                           units = "code_length")
+
+
+        def _CylindricalVolume(field, data):
+            return data["dtheta"] * data["r"] * data["dr"] * data["dz"]
+        registry.add_field("CellVolume", function=_CylindricalVolume)
+
+    def pixelize(self, dimension, data_source, field, bounds, size, antialias = True):
+        ax_name = self.axis_name[dimension]
+        if ax_name in ('r', 'theta'):
+            return self._ortho_pixelize(data_source, field, bounds, size,
+                                        antialias)
+        elif ax_name == "z":
+            return self._polar_pixelize(data_source, field, bounds, size,
+                                        antialias)
+        else:
+            # Pixelizing along a cylindrical surface is a bit tricky
+            raise NotImplementedError
+
+
+    def _ortho_pixelize(self, data_source, field, bounds, size, antialias):
+        buff = _MPL.Pixelize(data_source['px'], data_source['py'],
+                             data_source['pdx'], data_source['pdy'],
+                             data_source[field], size[0], size[1],
+                             bounds, int(antialias),
+                             True, self.period).transpose()
+        return buff
+
+    def _polar_pixelize(self, data_source, field, bounds, size, antialias):
+        # Out bounds here will *always* be what plot window thinks are x0, x1,
+        # y0, y1, but which will actually be rmin, rmax, thetamin, thetamax.
+        buff = pixelize_cylinder(data_source['r'],
+                                 data_source['dr'],
+                                 data_source['theta'],
+                                 data_source['dtheta'] / 2.0, # half-widths
+                                 size, data_source[field], bounds)
+        return buff
+
+    axis_name = { 0  : 'r',  1  : 'theta',  2  : 'z',
+                 'r' : 'r', 'theta' : 'theta', 'z' : 'z',
+                 'R' : 'r', 'Theta' : 'theta', 'Z' : 'z'}
+
+    axis_id = { 'r' : 0, 'theta' : 1, 'z' : 2,
+                 0  : 0,  1  : 1,  2  : 2}
+
+    x_axis = { 'r' : 1, 'theta' : 0, 'z' : 0,
+                0  : 1,  1  : 0,  2  : 0}
+
+    y_axis = { 'r' : 2, 'theta' : 2, 'z' : 1,
+                0  : 2,  1  : 2,  2  : 1}
+
+    _image_axis_name = None
+    @property
+    def image_axis_name(self):    
+        if self._image_axis_name is not None:
+            return self._image_axis_name
+        # This is the x and y axes labels that get displayed.  For
+        # non-Cartesian coordinates, we usually want to override these for
+        # Cartesian coordinates, since we transform them.
+        rv = {0: ('theta', 'z'),
+              1: ('x', 'y'),
+              2: ('r', 'z')}
+        for i in rv.keys():
+            rv[self.axis_name[i]] = rv[i]
+            rv[self.axis_name[i].upper()] = rv[i]
+        self._image_axis_name = rv
+        return rv
+
+    def convert_from_cartesian(self, coord):
+        return cartesian_to_cylindrical(coord)
+
+    def convert_to_cartesian(self, coord):
+        return cylindrical_to_cartesian(coord)
+
+    def convert_to_cylindrical(self, coord):
+        return coord
+
+    def convert_from_cylindrical(self, coord):
+        return coord
+
+    def convert_to_spherical(self, coord):
+        raise NotImplementedError
+
+    def convert_from_spherical(self, coord):
+        raise NotImplementedError
+
+    @property
+    def period(self):
+        return np.array([0.0, 0.0, 2.0*np.pi])
+

diff -r 66bbf028d950fca7b523c06712c4f155bfbc15a4 -r 6f0db65e21bcabd8283cadb52e30b6c1ed9a4afc yt/geometry/coordinates/spec_cube_coordinates.py
--- /dev/null
+++ b/yt/geometry/coordinates/spec_cube_coordinates.py
@@ -0,0 +1,65 @@
+"""
+Cartesian fields
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import numpy as np
+from .cartesian_coordinates import \
+    CartesianCoordinateHandler
+
+class SpectralCubeCoordinateHandler(CartesianCoordinateHandler):
+
+    def __init__(self, ds):
+        super(SpectralCubeCoordinateHandler, self).__init__(ds)
+
+        self.axis_name = {}
+        self.axis_id = {}
+
+        for axis, axis_name in zip([ds.lon_axis, ds.lat_axis, ds.spec_axis],
+                                   ["Image\ x", "Image\ y", ds.spec_name]):
+            lower_ax = "xyz"[axis]
+            upper_ax = lower_ax.upper()
+
+            self.axis_name[axis] = axis_name
+            self.axis_name[lower_ax] = axis_name
+            self.axis_name[upper_ax] = axis_name
+            self.axis_name[axis_name] = axis_name
+
+            self.axis_id[lower_ax] = axis
+            self.axis_id[axis] = axis
+            self.axis_id[axis_name] = axis
+
+        self.default_unit_label = {}
+        self.default_unit_label[ds.lon_axis] = "pixel"
+        self.default_unit_label[ds.lat_axis] = "pixel"
+        self.default_unit_label[ds.spec_axis] = ds.spec_unit
+
+        def _spec_axis(ax, x, y):
+            p = (x,y)[ax]
+            return [self.ds.pixel2spec(pp).v for pp in p]
+
+        self.axis_field = {}
+        self.axis_field[self.ds.spec_axis] = _spec_axis
+
+    def convert_to_cylindrical(self, coord):
+        raise NotImplementedError
+
+    def convert_from_cylindrical(self, coord):
+        raise NotImplementedError
+
+    x_axis = { 'x' : 1, 'y' : 0, 'z' : 0,
+                0  : 1,  1  : 0,  2  : 0}
+
+    y_axis = { 'x' : 2, 'y' : 2, 'z' : 1,
+                0  : 2,  1  : 2,  2  : 1}

diff -r 66bbf028d950fca7b523c06712c4f155bfbc15a4 -r 6f0db65e21bcabd8283cadb52e30b6c1ed9a4afc yt/geometry/coordinates/spherical_coordinates.py
--- /dev/null
+++ b/yt/geometry/coordinates/spherical_coordinates.py
@@ -0,0 +1,221 @@
+"""
+Spherical fields
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import numpy as np
+from .coordinate_handler import \
+    CoordinateHandler, \
+    _unknown_coord, \
+    _get_coord_fields
+import yt.visualization._MPL as _MPL
+from yt.utilities.lib.misc_utilities import \
+    pixelize_cylinder, pixelize_aitoff
+
+class SphericalCoordinateHandler(CoordinateHandler):
+
+    def __init__(self, ds, ordering = 'rtp'):
+        if ordering != 'rtp': raise NotImplementedError
+        super(SphericalCoordinateHandler, self).__init__(ds)
+
+    def setup_fields(self, registry):
+        # return the fields for r, z, theta
+        registry.add_field(("index", "dx"), function=_unknown_coord)
+        registry.add_field(("index", "dy"), function=_unknown_coord)
+        registry.add_field(("index", "dz"), function=_unknown_coord)
+        registry.add_field(("index", "x"), function=_unknown_coord)
+        registry.add_field(("index", "y"), function=_unknown_coord)
+        registry.add_field(("index", "z"), function=_unknown_coord)
+        f1, f2 = _get_coord_fields(0)
+        registry.add_field(("index", "dr"), function = f1,
+                           display_field = False,
+                           units = "code_length")
+        registry.add_field(("index", "r"), function = f2,
+                           display_field = False,
+                           units = "code_length")
+
+        f1, f2 = _get_coord_fields(1, "")
+        registry.add_field(("index", "dtheta"), function = f1,
+                           display_field = False,
+                           units = "")
+        registry.add_field(("index", "theta"), function = f2,
+                           display_field = False,
+                           units = "")
+
+        f1, f2 = _get_coord_fields(2, "")
+        registry.add_field(("index", "dphi"), function = f1,
+                           display_field = False,
+                           units = "")
+        registry.add_field(("index", "phi"), function = f2,
+                           display_field = False,
+                           units = "")
+
+        def _SphericalVolume(field, data):
+            # r**2 sin theta dr dtheta dphi
+            vol = data["index", "r"]**2.0
+            vol *= data["index", "dr"]
+            vol *= np.sin(data["index", "theta"])
+            vol *= data["index", "dtheta"]
+            vol *= data["index", "dphi"]
+            return vol
+        registry.add_field(("index", "cell_volume"),
+                 function=_SphericalVolume,
+                 units = "code_length**3")
+
+    def pixelize(self, dimension, data_source, field, bounds, size,
+                 antialias = True, periodic = True):
+        self.period
+        if dimension == 0:
+            return self._ortho_pixelize(data_source, field, bounds, size,
+                                        antialias, dimension, periodic)
+        elif dimension in (1, 2):
+            return self._cyl_pixelize(data_source, field, bounds, size,
+                                          antialias, dimension)
+        else:
+            raise NotImplementedError
+
+    def _ortho_pixelize(self, data_source, field, bounds, size, antialias,
+                        dim, periodic):
+        # We should be using fcoords
+        period = self.period[:2].copy() # dummy here
+        period[0] = self.period[self.x_axis[dim]]
+        period[1] = self.period[self.y_axis[dim]]
+        period = period.in_units("code_length").d
+        buff = _MPL.Pixelize(data_source['px'], data_source['py'],
+                             data_source['pdx'], data_source['pdy'],
+                             data_source[field], size[0], size[1],
+                             bounds, int(antialias),
+                             period, int(periodic)).transpose()
+        return buff
+
+    def _cyl_pixelize(self, data_source, field, bounds, size, antialias,
+                      dimension):
+        if dimension == 1:
+            buff = pixelize_cylinder(data_source['r'],
+                                     data_source['dr'] / 2.0,
+                                     data_source['phi'],
+                                     data_source['dphi'] / 2.0, # half-widths
+                                     size, data_source[field], bounds)
+        elif dimension == 2:
+            buff = pixelize_cylinder(data_source['r'],
+                                     data_source['dr'] / 2.0,
+                                     data_source['theta'],
+                                     data_source['dtheta'] / 2.0, # half-widths
+                                     size, data_source[field], bounds)
+            buff = buff.transpose()
+        else:
+            raise RuntimeError
+        return buff
+
+
+    def convert_from_cartesian(self, coord):
+        raise NotImplementedError
+
+    def convert_to_cartesian(self, coord):
+        if isinstance(coord, np.ndarray) and len(coord.shape) > 1:
+            r = coord[:,0]
+            theta = coord[:,1]
+            phi = coord[:,2]
+            nc = np.zeros_like(coord)
+            # r, theta, phi
+            nc[:,0] = np.cos(phi) * np.sin(theta)*r
+            nc[:,1] = np.sin(phi) * np.sin(theta)*r
+            nc[:,2] = np.cos(theta) * r
+        else:
+            r, theta, phi = coord
+            nc = (np.cos(phi) * np.sin(theta)*r,
+                  np.sin(phi) * np.sin(theta)*r,
+                  np.cos(theta) * r)
+        return nc
+
+    def convert_to_cylindrical(self, coord):
+        raise NotImplementedError
+
+    def convert_from_cylindrical(self, coord):
+        raise NotImplementedError
+
+    def convert_to_spherical(self, coord):
+        raise NotImplementedError
+
+    def convert_from_spherical(self, coord):
+        raise NotImplementedError
+
+    # Despite being mutables, we uses these here to be clear about how these
+    # are generated and to ensure that they are not re-generated unnecessarily
+    axis_name = { 0  : 'r',  1  : 'theta',  2  : 'phi',
+                 'r' : 'r', 'theta' : 'theta', 'phi' : 'phi',
+                 'R' : 'r', 'Theta' : 'theta', 'Phi' : 'phi'}
+
+    _image_axis_name = None
+    @property
+    def image_axis_name(self):    
+        if self._image_axis_name is not None:
+            return self._image_axis_name
+        # This is the x and y axes labels that get displayed.  For
+        # non-Cartesian coordinates, we usually want to override these for
+        # Cartesian coordinates, since we transform them.
+        rv = {0: ('theta', 'phi'),
+              1: ('x / \\sin(\\theta)', 'y / \\sin(\\theta)'),
+              2: ('R', 'z')}
+        for i in rv.keys():
+            rv[self.axis_name[i]] = rv[i]
+            rv[self.axis_name[i].upper()] = rv[i]
+        self._image_axis_name = rv
+        return rv
+
+
+    axis_id = { 'r' : 0, 'theta' : 1, 'phi' : 2,
+                 0  : 0,  1  : 1,  2  : 2}
+
+    x_axis = { 'r' : 1, 'theta' : 0, 'phi' : 0,
+                0  : 1,  1  : 0,  2  : 0}
+
+    y_axis = { 'r' : 2, 'theta' : 2, 'phi' : 1,
+                0  : 2,  1  : 2,  2  : 1}
+
+    @property
+    def period(self):
+        return self.ds.domain_width
+
+    def sanitize_center(self, center, axis):
+        center, display_center = super(
+            SphericalCoordinateHandler, self).sanitize_center(center, axis)
+        if axis == 0:
+            display_center = center
+        elif axis == 1:
+            display_center = (0.0 * display_center[0],
+                              0.0 * display_center[1],
+                              0.0 * display_center[2])
+        elif axis ==2:
+            display_center = (self.ds.domain_width[0]/2.0,
+                              0.0 * display_center[1],
+                              0.0 * display_center[2])
+        return center, display_center
+
+    def sanitize_width(self, axis, width, depth):
+        if width is not None:
+            width = super(SphericalCoordinateHandler, self).sanitize_width(
+              axis, width, depth)
+        elif axis == 0:
+            width = [self.ds.domain_width[self.x_axis[0]],
+                     self.ds.domain_width[self.y_axis[0]]]
+        elif axis == 1:
+            # Remember, in spherical coordinates when we cut in theta,
+            # we create a conic section
+            width = [2.0*self.ds.domain_width[0],
+                     2.0*self.ds.domain_width[0]]
+        elif axis == 2:
+            width = [self.ds.domain_width[0],
+                     2.0*self.ds.domain_width[0]]
+        return width

diff -r 66bbf028d950fca7b523c06712c4f155bfbc15a4 -r 6f0db65e21bcabd8283cadb52e30b6c1ed9a4afc yt/geometry/cylindrical_coordinates.py
--- a/yt/geometry/cylindrical_coordinates.py
+++ /dev/null
@@ -1,145 +0,0 @@
-"""
-Cylindrical fields
-
-
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2013, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------
-
-import numpy as np
-from yt.units.yt_array import YTArray
-from .coordinate_handler import \
-    CoordinateHandler, \
-    _unknown_coord, \
-    _get_coord_fields
-import yt.visualization._MPL as _MPL
-from yt.utilities.lib.misc_utilities import \
-    pixelize_cylinder
-#
-# Cylindrical fields
-#
-
-class CylindricalCoordinateHandler(CoordinateHandler):
-
-    def __init__(self, ds, ordering = 'rzt'):
-        if ordering != 'rzt': raise NotImplementedError
-        super(CylindricalCoordinateHandler, self).__init__(ds)
-
-    def setup_fields(self, registry):
-        # return the fields for r, z, theta
-        registry.add_field(("index", "dx"), function=_unknown_coord)
-        registry.add_field(("index", "dy"), function=_unknown_coord)
-        registry.add_field(("index", "x"), function=_unknown_coord)
-        registry.add_field(("index", "y"), function=_unknown_coord)
-        f1, f2 = _get_coord_fields(0)
-        registry.add_field(("index", "dr"), function = f1,
-                           display_field = False,
-                           units = "code_length")
-        registry.add_field(("index", "r"), function = f2,
-                           display_field = False,
-                           units = "code_length")
-
-        f1, f2 = _get_coord_fields(1)
-        registry.add_field(("index", "dz"), function = f1,
-                           display_field = False,
-                           units = "code_length")
-        registry.add_field(("index", "z"), function = f2,
-                           display_field = False,
-                           units = "code_length")
-
-        f1, f2 = _get_coord_fields(2, "")
-        registry.add_field(("index", "dtheta"), function = f1,
-                           display_field = False,
-                           units = "")
-        registry.add_field(("index", "theta"), function = f2,
-                           display_field = False,
-                           units = "")
-
-        def _CylindricalVolume(field, data):
-            return data["index", "dtheta"] \
-                 * data["index", "r"] \
-                 * data["index", "dr"] \
-                 * data["index", "dz"]
-        registry.add_field(("index", "cell_volume"),
-                 function=_CylindricalVolume,
-                 units = "code_length**3")
-
-
-    def pixelize(self, dimension, data_source, field, bounds, size,
-                 antialias = True, periodic = True):
-        ax_name = self.axis_name[dimension]
-        if ax_name in ('r', 'theta'):
-            return self._ortho_pixelize(data_source, field, bounds, size,
-                                        antialias, dimension, periodic)
-        elif ax_name == "z":
-            return self._cyl_pixelize(data_source, field, bounds, size,
-                                        antialias)
-        else:
-            # Pixelizing along a cylindrical surface is a bit tricky
-            raise NotImplementedError
-
-    def _ortho_pixelize(self, data_source, field, bounds, size, antialias,
-                        dim, periodic):
-        period = self.period[:2].copy() # dummy here
-        period[0] = self.period[self.x_axis[dim]]
-        period[1] = self.period[self.y_axis[dim]]
-        if hasattr(period, 'in_units'):
-            period = period.in_units("code_length").d
-        buff = _MPL.Pixelize(data_source['px'], data_source['py'],
-                             data_source['pdx'], data_source['pdy'],
-                             data_source[field], size[0], size[1],
-                             bounds, int(antialias),
-                             period, int(periodic)).transpose()
-        return buff
-
-    def _cyl_pixelize(self, data_source, field, bounds, size, antialias):
-        buff = pixelize_cylinder(data_source['r'],
-                                 data_source['dr'],
-                                 data_source['theta'],
-                                 data_source['dtheta']/2.0, # half-widths
-                                 size, data_source[field], bounds)
-        return buff
-
-    axis_name = { 0  : 'r',  1  : 'z',  2  : 'theta',
-                 'r' : 'r', 'z' : 'z', 'theta' : 'theta',
-                 'R' : 'r', 'Z' : 'z', 'Theta' : 'theta'}
-
-    axis_id = { 'r' : 0, 'z' : 1, 'theta' : 2,
-                 0  : 0,  1  : 1,  2  : 2}
-
-    x_axis = { 'r' : 1, 'z' : 0, 'theta' : 0,
-                0  : 1,  1  : 0,  2  : 0}
-
-    y_axis = { 'r' : 2, 'z' : 2, 'theta' : 1,
-                0  : 2,  1  : 2,  2  : 1}
-
-    def convert_from_cartesian(self, coord):
-        return cartesian_to_cylindrical(coord)
-
-    def convert_to_cartesian(self, coord):
-        return cylindrical_to_cartesian(coord)
-
-    def convert_to_cylindrical(self, coord):
-        return coord
-
-    def convert_from_cylindrical(self, coord):
-        return coord
-
-    def convert_to_spherical(self, coord):
-        raise NotImplementedError
-
-    def convert_from_spherical(self, coord):
-        raise NotImplementedError
-
-    @property
-    def period(self):
-        return np.array([0.0, 0.0, 2.0*np.pi])
-

This diff is so big that we needed to truncate the remainder.

Repository URL: https://bitbucket.org/yt_analysis/yt/

--

This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.


More information about the yt-svn mailing list