[yt-svn] commit/yt: 5 new changesets

commits-noreply at bitbucket.org commits-noreply at bitbucket.org
Tue Jun 28 15:09:25 PDT 2016


5 new commits in yt:

https://bitbucket.org/yt_analysis/yt/commits/3e613ecb6e2d/
Changeset:   3e613ecb6e2d
Branch:      yt
User:        MatthewTurk
Date:        2016-06-03 20:39:09+00:00
Summary:     Adding "internal" geographic coordinates that accept depth instead of altitude.
Affected #:  5 files

diff -r 331c7ef7049d257043699a8b570cf7bd8c39a82f -r 3e613ecb6e2d84009a7622e611fa22189e8a5127 yt/data_objects/static_output.py
--- a/yt/data_objects/static_output.py
+++ b/yt/data_objects/static_output.py
@@ -72,7 +72,8 @@
     CylindricalCoordinateHandler, \
     SphericalCoordinateHandler, \
     GeographicCoordinateHandler, \
-    SpectralCubeCoordinateHandler
+    SpectralCubeCoordinateHandler, \
+    InternalGeographicCoordinateHandler
 
 # We want to support the movie format in the future.
 # When such a thing comes to pass, I'll move all the stuff that is contant up
@@ -491,6 +492,8 @@
             cls = SphericalCoordinateHandler
         elif self.geometry == "geographic":
             cls = GeographicCoordinateHandler
+        elif self.geometry == "internal_geographic":
+            cls = InternalGeographicCoordinateHandler
         elif self.geometry == "spectral_cube":
             cls = SpectralCubeCoordinateHandler
         else:

diff -r 331c7ef7049d257043699a8b570cf7bd8c39a82f -r 3e613ecb6e2d84009a7622e611fa22189e8a5127 yt/geometry/coordinates/api.py
--- a/yt/geometry/coordinates/api.py
+++ b/yt/geometry/coordinates/api.py
@@ -23,7 +23,8 @@
 from .spherical_coordinates import \
     SphericalCoordinateHandler
 from .geographic_coordinates import \
-    GeographicCoordinateHandler
+     GeographicCoordinateHandler, \
+     InternalGeographicCoordinateHandler
 from .spec_cube_coordinates import \
     SpectralCubeCoordinateHandler
 

diff -r 331c7ef7049d257043699a8b570cf7bd8c39a82f -r 3e613ecb6e2d84009a7622e611fa22189e8a5127 yt/geometry/coordinates/geographic_coordinates.py
--- a/yt/geometry/coordinates/geographic_coordinates.py
+++ b/yt/geometry/coordinates/geographic_coordinates.py
@@ -23,13 +23,16 @@
     pixelize_cylinder, pixelize_aitoff
 
 class GeographicCoordinateHandler(CoordinateHandler):
+    radial_axis = "altitude"
 
-    def __init__(self, ds, ordering = ('latitude', 'longitude', 'altitude')):
+    def __init__(self, ds, ordering = None):
+        if not ordering:
+            ordering = ('latitude', 'longitude', self.radial_axis)
         super(GeographicCoordinateHandler, self).__init__(ds, ordering)
         self.image_units = {}
         self.image_units[self.axis_id['latitude']] = (None, None)
         self.image_units[self.axis_id['longitude']] = (None, None)
-        self.image_units[self.axis_id['altitude']] = ('deg', 'deg')
+        self.image_units[self.axis_id[self.radial_axis]] = ('deg', 'deg')
 
     def setup_fields(self, registry):
         # return the fields for r, z, theta
@@ -55,11 +58,11 @@
                            display_field = False,
                            units = "")
 
-        f1, f2 = _get_coord_fields(self.axis_id['altitude'])
-        registry.add_field(("index", "daltitude"), function = f1,
+        f1, f2 = _get_coord_fields(self.axis_id[self.radial_axis])
+        registry.add_field(("index", "d%s" % (self.radial_axis,)), function = f1,
                            display_field = False,
                            units = "code_length")
-        registry.add_field(("index", "altitude"), function = f2,
+        registry.add_field(("index", self.radial_axis), function = f2,
                            display_field = False,
                            units = "code_length")
 
@@ -76,10 +79,10 @@
                  function=_SphericalVolume,
                  units = "code_length**3")
 
-        def _path_altitude(field, data):
-            return data["index", "daltitude"]
-        registry.add_field(("index", "path_element_altitude"),
-                 function = _path_altitude,
+        def _path_radial_axis(field, data):
+            return data["index", "d%s" % self.radial_axis]
+        registry.add_field(("index", "path_element_%s" % self.radial_axis),
+                 function = _path_radial_axis,
                  units = "code_length")
         def _path_latitude(field, data):
             # We use r here explicitly
@@ -97,21 +100,6 @@
                  function = _path_longitude,
                  units = "code_length")
 
-        # Altitude is the radius from the central zone minus the radius of the
-        # surface.
-        def _altitude_to_radius(field, data):
-            surface_height = data.get_field_parameter("surface_height")
-            if surface_height is None:
-                if hasattr(data.ds, "surface_height"):
-                    surface_height = data.ds.surface_height
-                else:
-                    surface_height = data.ds.quan(0.0, "code_length")
-            return data["altitude"] + surface_height
-        registry.add_field(("index", "r"),
-                 function=_altitude_to_radius,
-                 units = "code_length")
-        registry.alias(("index", "dr"), ("index", "daltitude"))
-
         def _latitude_to_theta(field, data):
             # latitude runs from -90 to 90
             return (data["latitude"] + 90) * np.pi/180.0
@@ -136,12 +124,47 @@
                  function = _dlongitude_to_dphi,
                  units = "")
 
+        self._setup_radial_fields(registry)
+
+    def _setup_radial_fields(self, registry):
+        # This stays here because we don't want to risk the field detector not
+        # properly getting the data_source, etc.
+        def _altitude_to_radius(field, data):
+            surface_height = data.get_field_parameter("surface_height")
+            if surface_height is None:
+                if hasattr(data.ds, "surface_height"):
+                    surface_height = data.ds.surface_height
+                else:
+                    surface_height = data.ds.quan(0.0, "code_length")
+            return data["altitude"] + surface_height
+        registry.add_field(("index", "r"),
+                 function=_altitude_to_radius,
+                 units = "code_length")
+        registry.alias(("index", "dr"), ("index", "daltitude"))
+
+    def _retrieve_radial_offset(self, data_source = None):
+        # This returns the factor by which the radial field is multiplied and
+        # the scalar its offset by.  Typically the "factor" will *only* be
+        # either 1.0 or -1.0.  The order will be factor * r + offset.
+        # Altitude is the radius from the central zone minus the radius of the
+        # surface.  Depth to radius is negative value of depth plus the
+        # outermost radius.
+        surface_height = None
+        if data_source is not None:
+            surface_height = data_source.get_field_parameter("surface_height")
+        if surface_height is None:
+            if hasattr(self.ds, "surface_height"):
+                surface_height = self.ds.surface_height
+            else:
+                surface_height = self.ds.quan(0.0, "code_length")
+        return surface_height, 1.0
+
     def pixelize(self, dimension, data_source, field, bounds, size,
                  antialias = True, periodic = True):
         if self.axis_name[dimension] in ('latitude', 'longitude'):
             return self._cyl_pixelize(data_source, field, bounds, size,
                                           antialias, dimension)
-        elif self.axis_name[dimension] == 'altitude':
+        elif self.axis_name[dimension] == self.radial_axis:
             return self._ortho_pixelize(data_source, field, bounds, size,
                                         antialias, dimension, periodic)
         else:
@@ -163,13 +186,8 @@
 
     def _cyl_pixelize(self, data_source, field, bounds, size, antialias,
                       dimension):
-        surface_height = data_source.get_field_parameter("surface_height")
-        if surface_height is None:
-            if hasattr(data_source.ds, "surface_height"):
-                surface_height = data_source.ds.surface_height
-            else:
-                surface_height = data_source.ds.quan(0.0, "code_length")
-        r = data_source['py'] + surface_height
+        offset, factor = self._retrieve_radial_offset(data_source)
+        r = factor * data_source['py'] + offset
         # Because of the axis-ordering, dimensions 0 and 1 both have r as py
         # and the angular coordinate as px.  But we need to figure out how to
         # convert our coordinate back to an actual angle, based on which
@@ -191,20 +209,15 @@
             buff = buff.transpose()
         return buff
 
-
     def convert_from_cartesian(self, coord):
         raise NotImplementedError
 
     def convert_to_cartesian(self, coord):
-        if hasattr(self.ds, "surface_height"):
-            surface_height = self.ds.surface_height
-        else:
-            surface_height = self.ds.quan(0.0, "code_length")
+        offset, factor = self._retrieve_radial_offset()
         if isinstance(coord, np.ndarray) and len(coord.shape) > 1:
-            alt = self.axis_id['altitude']
+            r = factor * coord[:,self.axis_id[self.radial_axis]] + offset
             lon = self.axis_id['longitude']
             lat = self.axis_id['latitude']
-            r = coord[:,alt] + surface_height
             theta = coord[:,lon] * np.pi/180
             phi = coord[:,lat] * np.pi/180
             nc = np.zeros_like(coord)
@@ -216,7 +229,7 @@
             a, b, c = coord
             theta = b * np.pi/180
             phi = a * np.pi/180
-            r = surface_height + c
+            r = factor * c + offset
             nc = (np.cos(phi) * np.sin(theta)*r,
                   np.sin(phi) * np.sin(theta)*r,
                   np.cos(theta) * r)
@@ -247,7 +260,7 @@
                   'y / \\sin(\mathrm{latitude})'),
               self.axis_id['longitude']:
                  ('R', 'z'),
-              self.axis_id['altitude']:
+              self.axis_id[self.radial_axis]:
                  ('longitude', 'latitude')}
         for i in list(rv.keys()):
             rv[self.axis_name[i]] = rv[i]
@@ -271,14 +284,14 @@
         center, display_center = super(
             GeographicCoordinateHandler, self).sanitize_center(center, axis)
         name = self.axis_name[axis]
-        if name == 'altitude':
+        if name == self.radial_axis:
             display_center = center
         elif name == 'latitude':
             display_center = (0.0 * display_center[0],
                               0.0 * display_center[1],
                               0.0 * display_center[2])
         elif name == 'longitude':
-            ri = self.axis_id['altitude']
+            ri = self.axis_id[self.radial_axis]
             c = (self.ds.domain_right_edge[ri] +
                  self.ds.domain_left_edge[ri])/2.0
             display_center = [0.0 * display_center[0], 
@@ -292,17 +305,107 @@
         if width is not None:
             width = super(GeographicCoordinateHandler, self).sanitize_width(
               axis, width, depth)
-        elif name == 'altitude':
-            width = [self.ds.domain_width[self.y_axis['altitude']],
-                     self.ds.domain_width[self.x_axis['altitude']]]
+        elif name == self.radial_axis:
+            rax = self.radial_axis
+            width = [self.ds.domain_width[self.y_axis[rax]],
+                     self.ds.domain_width[self.x_axis[rax]]]
         elif name == 'latitude':
-            ri = self.axis_id['altitude']
+            ri = self.axis_id[self.radial_axis]
             # Remember, in spherical coordinates when we cut in theta,
             # we create a conic section
             width = [2.0*self.ds.domain_width[ri],
                      2.0*self.ds.domain_width[ri]]
         elif name == 'longitude':
-            ri = self.axis_id['altitude']
+            ri = self.axis_id[self.radial_axis]
             width = [self.ds.domain_width[ri],
                      2.0*self.ds.domain_width[ri]]
         return width
+
+class InternalGeographicCoordinateHandler(GeographicCoordinateHandler):
+    radial_axis = "depth"
+    
+    def _setup_radial_fields(self, registry):
+        # Altitude is the radius from the central zone minus the radius of the
+        # surface.
+        def _depth_to_radius(field, data):
+            outer_radius = data.get_field_parameter("outer_radius")
+            if outer_radius is None:
+                if hasattr(data.ds, "outer_radius"):
+                    outer_radius = data.ds.outer_radius
+                else:
+                    # Otherwise, we assume that the depth goes to full depth,
+                    # so we can look at the domain right edge in depth.
+                    rax = self.axis_id[self.radial_axis]
+                    outer_radius = data.ds.domain_right_edge[rax]
+            return -1.0 * data["depth"] + outer_radius
+        registry.add_field(("index", "r"),
+                 function=_depth_to_radius,
+                 units = "code_length")
+        registry.alias(("index", "dr"), ("index", "ddepth"))
+
+    def _retrieve_radial_offset(self, data_source = None):
+        # Depth means switching sign and adding to full radius
+        outer_radius = None
+        if data_source is not None:
+            outer_radius = data_source.get_field_parameter("outer_radius")
+        if outer_radius is None:
+            if hasattr(self.ds, "outer_radius"):
+                outer_radius = self.ds.outer_radius
+            else:
+                # Otherwise, we assume that the depth goes to full depth,
+                # so we can look at the domain right edge in depth.
+                rax = self.axis_id[self.radial_axis]
+                outer_radius = self.ds.domain_right_edge[rax]
+        return outer_radius, -1.0
+
+    _x_pairs = (('latitude', 'longitude'),
+                ('longitude', 'latitude'),
+                ('depth', 'latitude'))
+
+    _y_pairs = (('latitude', 'depth'),
+                ('longitude', 'depth'),
+                ('depth', 'longitude'))
+
+    def sanitize_center(self, center, axis):
+        center, display_center = super(
+            GeographicCoordinateHandler, self).sanitize_center(center, axis)
+        name = self.axis_name[axis]
+        if name == self.radial_axis:
+            display_center = center
+        elif name == 'latitude':
+            display_center = (0.0 * display_center[0],
+                              0.0 * display_center[1],
+                              0.0 * display_center[2])
+        elif name == 'longitude':
+            ri = self.axis_id[self.radial_axis]
+            offset, factor = self._retrieve_radial_offset()
+            outermost = factor * self.ds.domain_left_edge[ri] + offset
+            display_center = [0.0 * display_center[0], 
+                              0.0 * display_center[1],
+                              0.0 * display_center[2]]
+            display_center[self.axis_id['latitude']] = outermost/2.0
+        return center, display_center
+
+    def sanitize_width(self, axis, width, depth):
+        name = self.axis_name[axis]
+        if width is not None:
+            width = super(GeographicCoordinateHandler, self).sanitize_width(
+              axis, width, depth)
+        elif name == self.radial_axis:
+            rax = self.radial_axis
+            width = [self.ds.domain_width[self.y_axis[rax]],
+                     self.ds.domain_width[self.x_axis[rax]]]
+        elif name == 'latitude':
+            ri = self.axis_id[self.radial_axis]
+            # Remember, in spherical coordinates when we cut in theta,
+            # we create a conic section
+            offset, factor = self._retrieve_radial_offset()
+            outermost = factor * self.ds.domain_left_edge[ri] + offset
+            width = [2.0*outermost, 2.0*outermost]
+        elif name == 'longitude':
+            ri = self.axis_id[self.radial_axis]
+            offset, factor = self._retrieve_radial_offset()
+            outermost = factor * self.ds.domain_left_edge[ri] + offset
+            width = [outermost, 2.0*outermost]
+        return width
+

diff -r 331c7ef7049d257043699a8b570cf7bd8c39a82f -r 3e613ecb6e2d84009a7622e611fa22189e8a5127 yt/geometry/coordinates/tests/test_geographic_coordinates.py
--- a/yt/geometry/coordinates/tests/test_geographic_coordinates.py
+++ b/yt/geometry/coordinates/tests/test_geographic_coordinates.py
@@ -54,3 +54,48 @@
     # We also want to check that our radius is correct
     yield assert_equal, dd["index","r"], \
                         dd["index","altitude"] + ds.surface_height
+
+def test_internal_geographic_coordinates():
+    # We're going to load up a simple AMR grid and check its volume
+    # calculations and path length calculations.
+
+    # Note that we are setting it up to have depth of 1000 maximum, which
+    # means our volume will be that of a shell 1000 wide, starting at r of
+    # outer_radius - 1000.
+    ds = fake_amr_ds(geometry="internal_geographic")
+    ds.outer_radius = ds.quan(5000, "code_length")
+    axes = ["latitude", "longitude", "depth"]
+    for i, axis in enumerate(axes):
+        dd = ds.all_data()
+        fi = ("index", axis)
+        fd = ("index", "d%s" % axis)
+        ma = np.argmax(dd[fi])
+        yield assert_equal, dd[fi][ma] + dd[fd][ma] / 2.0, ds.domain_right_edge[i].d
+        mi = np.argmin(dd[fi])
+        yield assert_equal, dd[fi][mi] - dd[fd][mi] / 2.0, ds.domain_left_edge[i].d
+        yield assert_equal, dd[fd].max(), (ds.domain_width/ds.domain_dimensions)[i].d
+    inner_r = ds.outer_radius - ds.domain_right_edge[2]
+    outer_r = ds.outer_radius
+    yield assert_equal, dd["index","dtheta"], \
+                        dd["index","dlatitude"]*np.pi/180.0
+    yield assert_equal, dd["index","dphi"], \
+                        dd["index","dlongitude"]*np.pi/180.0
+    # Note our terrible agreement here.
+    yield assert_rel_equal, dd["cell_volume"].sum(dtype="float64"), \
+                        (4.0/3.0) * np.pi * (outer_r**3 - inner_r**3), \
+                        3
+    yield assert_equal, dd["index", "path_element_depth"], \
+                        dd["index", "ddepth"]
+    yield assert_equal, dd["index", "path_element_depth"], \
+                        dd["index", "dr"]
+    # Note that latitude corresponds to theta, longitude to phi
+    yield assert_equal, dd["index", "path_element_latitude"], \
+                        dd["index", "r"] * \
+                        dd["index", "dlatitude"] * np.pi/180.0
+    yield assert_equal, dd["index", "path_element_longitude"], \
+                        dd["index", "r"] * \
+                        dd["index", "dlongitude"] * np.pi/180.0 * \
+                        np.sin((dd["index", "latitude"] + 90.0) * np.pi/180.0)
+    # We also want to check that our radius is correct
+    yield assert_equal, dd["index","r"], \
+                        -1.0*dd["index","depth"] + ds.outer_radius

diff -r 331c7ef7049d257043699a8b570cf7bd8c39a82f -r 3e613ecb6e2d84009a7622e611fa22189e8a5127 yt/testing.py
--- a/yt/testing.py
+++ b/yt/testing.py
@@ -225,6 +225,8 @@
     'cylindrical': ( (0.0, 0.0, 0.0), (1.0, 1.0, 2.0*np.pi) ), # rzt
     'polar'      : ( (0.0, 0.0, 0.0), (1.0, 2.0*np.pi, 1.0) ), # rtz
     'geographic' : ( (-90.0, -180.0, 0.0), (90.0, 180.0, 1000.0) ), # latlonalt
+    'internal_geographic' :
+                   ( (-90.0, -180.0, 0.0), (90.0, 180.0, 1000.0) ), # latlondep
 }
 
 def fake_amr_ds(fields = ("Density",), geometry = "cartesian"):


https://bitbucket.org/yt_analysis/yt/commits/e90982542bb5/
Changeset:   e90982542bb5
Branch:      yt
User:        MatthewTurk
Date:        2016-06-13 19:56:51+00:00
Summary:     Fix missing alt definition
Affected #:  1 file

diff -r 3e613ecb6e2d84009a7622e611fa22189e8a5127 -r e90982542bb59d26f0e74a85b57f0583f289aaac yt/geometry/coordinates/geographic_coordinates.py
--- a/yt/geometry/coordinates/geographic_coordinates.py
+++ b/yt/geometry/coordinates/geographic_coordinates.py
@@ -215,16 +215,17 @@
     def convert_to_cartesian(self, coord):
         offset, factor = self._retrieve_radial_offset()
         if isinstance(coord, np.ndarray) and len(coord.shape) > 1:
-            r = factor * coord[:,self.axis_id[self.radial_axis]] + offset
+            rad = self.axis_id[self.radial_axis]
             lon = self.axis_id['longitude']
             lat = self.axis_id['latitude']
+            r = factor * coord[:,rad] + offset
             theta = coord[:,lon] * np.pi/180
             phi = coord[:,lat] * np.pi/180
             nc = np.zeros_like(coord)
             # r, theta, phi
             nc[:,lat] = np.cos(phi) * np.sin(theta)*r
             nc[:,lon] = np.sin(phi) * np.sin(theta)*r
-            nc[:,alt] = np.cos(theta) * r
+            nc[:,rad] = np.cos(theta) * r
         else:
             a, b, c = coord
             theta = b * np.pi/180


https://bitbucket.org/yt_analysis/yt/commits/5fe1482b4df0/
Changeset:   5fe1482b4df0
Branch:      yt
User:        MatthewTurk
Date:        2016-06-15 18:13:21+00:00
Summary:     Merging from upstream
Affected #:  46 files

diff -r e90982542bb59d26f0e74a85b57f0583f289aaac -r 5fe1482b4df00f40f79e3a2faa21a6d4af020ac0 CONTRIBUTING.rst
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -651,7 +651,7 @@
 .. _multiple-PRs:
 
 Working with Multiple BitBucket Pull Requests
-+++++++++++++++++++++++++++++++++++++++++++++
+---------------------------------------------
 
 Once you become active developing for yt, you may be working on
 various aspects of the code or bugfixes at the same time.  Currently,

diff -r e90982542bb59d26f0e74a85b57f0583f289aaac -r 5fe1482b4df00f40f79e3a2faa21a6d4af020ac0 doc/install_script.sh
--- a/doc/install_script.sh
+++ b/doc/install_script.sh
@@ -704,7 +704,7 @@
 if type -P curl &>/dev/null
 then
     echo "Using curl"
-    export GETFILE="curl -sSO"
+    export GETFILE="curl -sSOL"
 else
     echo "Using wget"
     export GETFILE="wget -nv"

diff -r e90982542bb59d26f0e74a85b57f0583f289aaac -r 5fe1482b4df00f40f79e3a2faa21a6d4af020ac0 doc/source/analyzing/fields.rst
--- a/doc/source/analyzing/fields.rst
+++ b/doc/source/analyzing/fields.rst
@@ -98,22 +98,31 @@
 .. code-block:: python
 
     ad = ds.all_data()
+
+    # just a field name
     density = ad['density']
+
+    # field tuple with no parentheses
     density = ad['gas', 'density']
+
+    # full field tuple
     density = ad[('gas', 'density')]
-    dnesity = ad[ds.fields.gas.density]
+
+    # through the ds.fields object
+    density = ad[ds.fields.gas.density]
 
 The first data access example is the simplest. In that example, the field type
 is inferred from the name of the field. The next two examples use the field type
 explicitly, this might be necessary if there is more than one field type with a
-"density" field defined in the same simulation. The third example is a slightly
-more verbose and is syntactically identical to the second example due to the way
-indexing functions in Python. The final example uses the ``ds.fields` object
-described above. This way of accessing fields lends itself to interactive use,
-especially if you make heavy use of IPython's tab completion features. Any of
-these ways of denoting the ``('gas', 'density')`` field can be used when
-supplying a field name to a yt data object, analysis routines, or plotting and
-visualization function.
+"density" field defined in the same dataset. The third example is slightly more
+verbose but is syntactically identical to the second example due to the way
+indexing works in the Python language.
+
+The final example uses the ``ds.fields`` object described above. This way of
+accessing fields lends itself to interactive use, especially if you make heavy
+use of IPython's tab completion features. Any of these ways of denoting the
+``('gas', 'density')`` field can be used when supplying a field name to a yt
+data object, analysis routines, or plotting and visualization function.
 
 Accessing Fields without a Field Type
 -------------------------------------

diff -r e90982542bb59d26f0e74a85b57f0583f289aaac -r 5fe1482b4df00f40f79e3a2faa21a6d4af020ac0 setup.py
--- a/setup.py
+++ b/setup.py
@@ -83,114 +83,70 @@
     Extension("yt.geometry.grid_visitors",
               ["yt/geometry/grid_visitors.pyx"],
               include_dirs=["yt/utilities/lib"],
-              libraries=std_libs,
-              depends=["yt/utilities/lib/fp_utils.pxd",
-                       "yt/geometry/grid_visitors.pxd"]),
+              libraries=std_libs),
     Extension("yt.geometry.grid_container",
               ["yt/geometry/grid_container.pyx"],
               include_dirs=["yt/utilities/lib/"],
-              libraries=std_libs,
-              depends=["yt/utilities/lib/fp_utils.pxd",
-                       "yt/geometry/grid_container.pxd",
-                       "yt/geometry/grid_visitors.pxd"]),
+              libraries=std_libs),
     Extension("yt.geometry.oct_container",
               ["yt/geometry/oct_container.pyx",
                "yt/utilities/lib/tsearch.c"],
               include_dirs=["yt/utilities/lib"],
-              libraries=std_libs,
-              depends=["yt/utilities/lib/fp_utils.pxd",
-                       "yt/geometry/oct_container.pxd",
-                       "yt/geometry/selection_routines.pxd"]),
+              libraries=std_libs),
     Extension("yt.geometry.oct_visitors",
               ["yt/geometry/oct_visitors.pyx"],
               include_dirs=["yt/utilities/lib/"],
-              libraries=std_libs,
-              depends=["yt/utilities/lib/fp_utils.pxd",
-                       "yt/geometry/oct_container.pxd",
-                       "yt/geometry/selection_routines.pxd"]),
+              libraries=std_libs),
     Extension("yt.geometry.particle_oct_container",
               ["yt/geometry/particle_oct_container.pyx"],
               include_dirs=["yt/utilities/lib/"],
-              libraries=std_libs,
-              depends=["yt/utilities/lib/fp_utils.pxd",
-                       "yt/geometry/oct_container.pxd",
-                       "yt/geometry/selection_routines.pxd"]),
+              libraries=std_libs),
     Extension("yt.geometry.selection_routines",
               ["yt/geometry/selection_routines.pyx"],
               include_dirs=["yt/utilities/lib/"],
-              libraries=std_libs,
-              depends=["yt/utilities/lib/fp_utils.pxd",
-                       "yt/utilities/lib/grid_traversal.pxd",
-                       "yt/geometry/oct_container.pxd",
-                       "yt/geometry/oct_visitors.pxd",
-                       "yt/geometry/grid_container.pxd",
-                       "yt/geometry/grid_visitors.pxd",
-                       "yt/geometry/selection_routines.pxd"]),
+              libraries=std_libs),
     Extension("yt.geometry.particle_deposit",
               ["yt/geometry/particle_deposit.pyx"],
               include_dirs=["yt/utilities/lib/"],
-              libraries=std_libs,
-              depends=["yt/utilities/lib/fp_utils.pxd",
-                       "yt/geometry/oct_container.pxd",
-                       "yt/geometry/selection_routines.pxd",
-                       "yt/geometry/particle_deposit.pxd"]),
+              libraries=std_libs),
     Extension("yt.geometry.particle_smooth",
               ["yt/geometry/particle_smooth.pyx"],
               include_dirs=["yt/utilities/lib/"],
-              libraries=std_libs,
-              depends=["yt/utilities/lib/fp_utils.pxd",
-                       "yt/geometry/oct_container.pxd",
-                       "yt/geometry/selection_routines.pxd",
-                       "yt/geometry/particle_deposit.pxd",
-                       "yt/geometry/particle_smooth.pxd"]),
+              libraries=std_libs),
     Extension("yt.geometry.fake_octree",
               ["yt/geometry/fake_octree.pyx"],
               include_dirs=["yt/utilities/lib/"],
-              libraries=std_libs,
-              depends=["yt/utilities/lib/fp_utils.pxd",
-                       "yt/geometry/oct_container.pxd",
-                       "yt/geometry/selection_routines.pxd"]),
+              libraries=std_libs),
     Extension("yt.utilities.spatial.ckdtree",
               ["yt/utilities/spatial/ckdtree.pyx"],
               include_dirs=["yt/utilities/lib/"],
               libraries=std_libs),
     Extension("yt.utilities.lib.bitarray",
               ["yt/utilities/lib/bitarray.pyx"],
-              libraries=std_libs, depends=["yt/utilities/lib/bitarray.pxd"]),
+              libraries=std_libs),
     Extension("yt.utilities.lib.bounding_volume_hierarchy",
               ["yt/utilities/lib/bounding_volume_hierarchy.pyx"],
               include_dirs=["yt/utilities/lib/"],
               extra_compile_args=omp_args,
               extra_link_args=omp_args,
               libraries=std_libs,
-              depends=["yt/utilities/lib/element_mappings.pxd",
-                       "yt/utilities/lib/mesh_triangulation.h",
-                       "yt/utilities/lib/vec3_ops.pxd",
-                       "yt/utilities/lib/primitives.pxd"]),
+              depends=["yt/utilities/lib/mesh_triangulation.h"]),
     Extension("yt.utilities.lib.contour_finding",
               ["yt/utilities/lib/contour_finding.pyx"],
               include_dirs=["yt/utilities/lib/",
                             "yt/geometry/"],
-              libraries=std_libs,
-              depends=["yt/utilities/lib/fp_utils.pxd",
-                       "yt/utilities/lib/amr_kdtools.pxd",
-                       "yt/utilities/lib/grid_traversal.pxd",
-                       "yt/utilities/lib/contour_finding.pxd",
-                       "yt/geometry/oct_container.pxd"]),
+              libraries=std_libs),
     Extension("yt.utilities.lib.geometry_utils",
               ["yt/utilities/lib/geometry_utils.pyx"],
               extra_compile_args=omp_args,
               extra_link_args=omp_args,
-              libraries=std_libs, depends=["yt/utilities/lib/fp_utils.pxd"]),
+              libraries=std_libs),
     Extension("yt.utilities.lib.marching_cubes",
               ["yt/utilities/lib/marching_cubes.pyx",
                "yt/utilities/lib/fixed_interpolator.c"],
               include_dirs=["yt/utilities/lib/"],
               libraries=std_libs,
-              depends=["yt/utilities/lib/fp_utils.pxd",
-                       "yt/utilities/lib/fixed_interpolator.pxd",
-                       "yt/utilities/lib/fixed_interpolator.h",
-                       ]),
+              depends=["yt/utilities/lib/fixed_interpolator.h"]),
     Extension("yt.utilities.lib.mesh_triangulation",
               ["yt/utilities/lib/mesh_triangulation.pyx"],
               depends=["yt/utilities/lib/mesh_triangulation.h"]),
@@ -198,15 +154,11 @@
               ["yt/utilities/lib/pixelization_routines.pyx",
                "yt/utilities/lib/pixelization_constants.c"],
               include_dirs=["yt/utilities/lib/"],
-              libraries=std_libs, depends=["yt/utilities/lib/fp_utils.pxd",
-                                        "yt/utilities/lib/pixelization_constants.h",
-                                        "yt/utilities/lib/element_mappings.pxd"]),
+              libraries=std_libs,
+              depends=["yt/utilities/lib/pixelization_constants.h"]),
     Extension("yt.utilities.lib.primitives",
               ["yt/utilities/lib/primitives.pyx"],
-              libraries=std_libs, 
-              depends=["yt/utilities/lib/primitives.pxd",
-                       "yt/utilities/lib/vec3_ops.pxd",
-                       "yt/utilities/lib/bounding_volume_hierarchy.pxd"]),
+              libraries=std_libs),
     Extension("yt.utilities.lib.origami",
               ["yt/utilities/lib/origami.pyx",
                "yt/utilities/lib/origami_tags.c"],
@@ -220,15 +172,11 @@
               libraries=std_libs,
               extra_compile_args=omp_args,
               extra_link_args=omp_args,
-              depends=["yt/utilities/lib/fp_utils.pxd",
-                       "yt/utilities/lib/kdtree.h",
-                       "yt/utilities/lib/fixed_interpolator.h",
-                       "yt/utilities/lib/fixed_interpolator.pxd",
-                       "yt/utilities/lib/field_interpolation_tables.pxd",
-                       "yt/utilities/lib/vec3_ops.pxd"]),
+              depends=["yt/utilities/lib/kdtree.h",
+                       "yt/utilities/lib/fixed_interpolator.h"]),
     Extension("yt.utilities.lib.element_mappings",
               ["yt/utilities/lib/element_mappings.pyx"],
-              libraries=std_libs, depends=["yt/utilities/lib/element_mappings.pxd"]),
+              libraries=std_libs),
     Extension("yt.utilities.lib.alt_ray_tracers",
               ["yt/utilities/lib/alt_ray_tracers.pyx"],
               libraries=std_libs),
@@ -244,7 +192,7 @@
     cython_extensions.append(
         Extension("yt.utilities.lib.{}".format(ext_name),
                   ["yt/utilities/lib/{}.pyx".format(ext_name)],
-                  libraries=std_libs, depends=["yt/utilities/lib/fp_utils.pxd"]))
+                  libraries=std_libs))
 
 lib_exts = ["write_array", "ragged_arrays", "line_integral_convolution"]
 for ext_name in lib_exts:
@@ -265,11 +213,7 @@
               include_dirs=["yt/frontends/artio/artio_headers/",
                             "yt/geometry/",
                             "yt/utilities/lib/"],
-              depends=glob.glob("yt/frontends/artio/artio_headers/*.c") +
-              ["yt/utilities/lib/fp_utils.pxd",
-               "yt/geometry/oct_container.pxd",
-               "yt/geometry/selection_routines.pxd",
-               "yt/geometry/particle_deposit.pxd"]),
+              depends=glob.glob("yt/frontends/artio/artio_headers/*.c")),
     Extension("yt.utilities.spatial._distance_wrap",
               glob.glob("yt/utilities/spatial/src/*.c")),
     Extension("yt.visualization._MPL",
@@ -285,31 +229,13 @@
     embree_extensions = [
         Extension("yt.utilities.lib.mesh_construction",
                   ["yt/utilities/lib/mesh_construction.pyx"],
-                  depends=["yt/utilities/lib/mesh_construction.pxd",
-                           "yt/utilities/lib/mesh_triangulation.h",
-                           "yt/utilities/lib/mesh_intersection.pxd",
-                           "yt/utlilites/lib/mesh_samplers.pxd",
-                           "yt/utlilites/lib/mesh_traversal.pxd"]),
+                  depends=["yt/utilities/lib/mesh_triangulation.h"]),
         Extension("yt.utilities.lib.mesh_traversal",
-                  ["yt/utilities/lib/mesh_traversal.pyx"],
-                  depends=["yt/utilities/lib/mesh_traversal.pxd",
-                           "yt/utilities/lib/grid_traversal.pxd",
-                           "yt/utilities/lib/bounding_volume_hierarchy.pxd"]),
+                  ["yt/utilities/lib/mesh_traversal.pyx"]),
         Extension("yt.utilities.lib.mesh_samplers",
-                  ["yt/utilities/lib/mesh_samplers.pyx"],
-                  depends=["yt/utilities/lib/mesh_samplers.pxd",
-                           "yt/utilities/lib/element_mappings.pxd",
-                           "yt/utilities/lib/mesh_construction.pxd",
-                           "yt/utilities/lib/bounding_volume_hierarchy.pxd",
-                           "yt/utilities/lib/primitives.pxd"]),
+                  ["yt/utilities/lib/mesh_samplers.pyx"]),
         Extension("yt.utilities.lib.mesh_intersection",
-                  ["yt/utilities/lib/mesh_intersection.pyx"],
-                  depends=["yt/utilities/lib/mesh_intersection.pxd",
-                           "yt/utilities/lib/mesh_construction.pxd",
-                           "yt/utilities/lib/bounding_volume_hierarchy.pxd",
-                           "yt/utilities/lib/mesh_samplers.pxd",
-                           "yt/utilities/lib/primitives.pxd",
-                           "yt/utilities/lib/vec3_ops.pxd"]),
+                  ["yt/utilities/lib/mesh_intersection.pyx"]),
     ]
 
     embree_prefix = os.path.abspath(read_embree_location())
@@ -385,9 +311,12 @@
         _build_py.run(self)
 
 class build_ext(_build_ext):
-    # subclass setuptools extension builder to avoid importing numpy
+    # subclass setuptools extension builder to avoid importing cython and numpy
     # at top level in setup.py. See http://stackoverflow.com/a/21621689/1382869
     def finalize_options(self):
+        from Cython.Build import cythonize
+        self.distribution.ext_modules[:] = cythonize(
+                self.distribution.ext_modules)
         _build_ext.finalize_options(self)
         # Prevent numpy from thinking it is still in its setup process
         # see http://stackoverflow.com/a/21621493/1382869

diff -r e90982542bb59d26f0e74a85b57f0583f289aaac -r 5fe1482b4df00f40f79e3a2faa21a6d4af020ac0 tests/tests.yaml
--- a/tests/tests.yaml
+++ b/tests/tests.yaml
@@ -26,7 +26,7 @@
   local_gdf_000:
     - yt/frontends/gdf/tests/test_outputs.py
 
-  local_gizmo_000:
+  local_gizmo_001:
     - yt/frontends/gizmo/tests/test_outputs.py
 
   local_halos_000:

diff -r e90982542bb59d26f0e74a85b57f0583f289aaac -r 5fe1482b4df00f40f79e3a2faa21a6d4af020ac0 yt/analysis_modules/level_sets/clump_handling.py
--- a/yt/analysis_modules/level_sets/clump_handling.py
+++ b/yt/analysis_modules/level_sets/clump_handling.py
@@ -57,6 +57,9 @@
         self.min_val = self.data[field].min()
         self.max_val = self.data[field].max()
 
+        if parent is not None:
+            self.data.parent = self.parent.data
+
         # List containing characteristics about clumps that are to be written 
         # out by the write routines.
         if clump_info is None:

diff -r e90982542bb59d26f0e74a85b57f0583f289aaac -r 5fe1482b4df00f40f79e3a2faa21a6d4af020ac0 yt/analysis_modules/level_sets/tests/test_clump_finding.py
--- /dev/null
+++ b/yt/analysis_modules/level_sets/tests/test_clump_finding.py
@@ -0,0 +1,74 @@
+"""
+Clump finder tests
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2016, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import numpy as np
+
+from yt.analysis_modules.level_sets.api import \
+    Clump, \
+    find_clumps, \
+    get_lowest_clumps
+from yt.frontends.stream.api import \
+    load_uniform_grid
+from yt.testing import \
+    assert_array_equal, \
+    assert_equal
+
+def test_clump_finding():
+    n_c = 8
+    n_p = 1
+    dims = (n_c, n_c, n_c)
+
+    density = np.ones(dims)
+    high_rho = 10.
+    # add a couple disconnected density enhancements
+    density[2, 2, 2] = high_rho
+    density[6, 6, 6] = high_rho
+
+    # put a particle at the center of one of them
+    dx = 1. / n_c
+    px = 2.5 * dx * np.ones(n_p)
+    
+    data = {"density": density,
+            "particle_mass": np.ones(n_p),
+            "particle_position_x": px,
+            "particle_position_y": px,
+            "particle_position_z": px,
+            "number_of_particles": n_p}
+
+    ds = load_uniform_grid(data, dims)
+
+    ad = ds.all_data()
+    master_clump = Clump(ad, ("gas", "density"))
+    master_clump.add_validator("min_cells", 1)
+
+    find_clumps(master_clump, 0.5, 2. * high_rho, 10.)
+
+    # there should be two children
+    assert_equal(len(master_clump.children), 2)
+
+    leaf_clumps = get_lowest_clumps(master_clump)
+    # two leaf clumps
+    assert_equal(len(leaf_clumps), 2)
+
+
+    # check some clump fields
+    assert_equal(master_clump.children[0]["density"][0].size, 1)
+    assert_equal(master_clump.children[0]["density"][0], ad["density"].max())
+    assert_equal(master_clump.children[0]["particle_mass"].size, 1)
+    assert_array_equal(master_clump.children[0]["particle_mass"], ad["particle_mass"])
+    assert_equal(master_clump.children[1]["density"][0].size, 1)
+    assert_equal(master_clump.children[1]["density"][0], ad["density"].max())
+    assert_equal(master_clump.children[1]["particle_mass"].size, 0)

diff -r e90982542bb59d26f0e74a85b57f0583f289aaac -r 5fe1482b4df00f40f79e3a2faa21a6d4af020ac0 yt/analysis_modules/photon_simulator/photon_simulator.py
--- a/yt/analysis_modules/photon_simulator/photon_simulator.py
+++ b/yt/analysis_modules/photon_simulator/photon_simulator.py
@@ -1194,8 +1194,9 @@
         col6 = pyfits.Column(name='FLUX', format='D', array=np.array([flux.value]))
         col7 = pyfits.Column(name='SPECTRUM', format='80A', array=np.array([phfile+"[PHLIST,1]"]))
         col8 = pyfits.Column(name='IMAGE', format='80A', array=np.array([phfile+"[PHLIST,1]"]))
+        col9 = pyfits.Column(name='SRC_NAME', format='80A', array=np.array(["yt_src"]))
 
-        coldefs = pyfits.ColDefs([col1, col2, col3, col4, col5, col6, col7, col8])
+        coldefs = pyfits.ColDefs([col1, col2, col3, col4, col5, col6, col7, col8, col9])
 
         wrhdu = pyfits.BinTableHDU.from_columns(coldefs)
         wrhdu.update_ext_name("SRC_CAT")
@@ -1350,13 +1351,17 @@
             f = pyfits.open(self.parameters["RMF"])
             nchan = int(f["EBOUNDS"].header["DETCHANS"])
             num = 0
-            for i in range(1,len(f["EBOUNDS"].columns)+1):
-                if f["EBOUNDS"].header["TTYPE%d" % i] == "CHANNEL":
+            if "MATRIX" in f:
+                mat_key = "MATRIX"
+            elif "SPECRESP MATRIX" in f:
+                mat_key = "SPECRESP MATRIX"
+            for i in range(1,len(f[mat_key].columns)+1):
+                if f[mat_key].header["TTYPE%d" % i] == "F_CHAN":
                     num = i
                     break
             if num > 0:
                 tlmin = "TLMIN%d" % num
-                cmin = int(f["EBOUNDS"].header[tlmin])
+                cmin = int(f[mat_key].header[tlmin])
             else:
                 mylog.warning("Cannot determine minimum allowed value for channel. " +
                               "Setting to 0, which may be wrong.")

diff -r e90982542bb59d26f0e74a85b57f0583f289aaac -r 5fe1482b4df00f40f79e3a2faa21a6d4af020ac0 yt/analysis_modules/photon_simulator/spectral_models.py
--- a/yt/analysis_modules/photon_simulator/spectral_models.py
+++ b/yt/analysis_modules/photon_simulator/spectral_models.py
@@ -264,7 +264,7 @@
         emid = self.emid.d
         if self.thermal_broad:
             sigma = E0*np.sqrt(2.*kT*erg_per_keV/(self.A[element]*amu_grams))/cl
-            vec = broaden_lines(E0, sigma, amp, emid)*de
+            vec = broaden_lines(E0, sigma, amp, ebins)
         else:
             vec = np.histogram(E0, ebins, weights=amp)[0]
         tmpspec += vec

diff -r e90982542bb59d26f0e74a85b57f0583f289aaac -r 5fe1482b4df00f40f79e3a2faa21a6d4af020ac0 yt/analysis_modules/photon_simulator/utils.pyx
--- a/yt/analysis_modules/photon_simulator/utils.pyx
+++ b/yt/analysis_modules/photon_simulator/utils.pyx
@@ -1,31 +1,30 @@
 import numpy as np
 cimport numpy as np
 cimport cython
-from libc.math cimport exp
-
-cdef double gfac = 1.0/np.sqrt(np.pi)
-
+from libc.math cimport erf
+    
 @cython.cdivision(True)
 @cython.boundscheck(False)
 @cython.wraparound(False)
 def broaden_lines(np.ndarray[np.float64_t, ndim=1] E0,
                   np.ndarray[np.float64_t, ndim=1] sigma,
                   np.ndarray[np.float64_t, ndim=1] amp,
-                  np.ndarray[np.float64_t, ndim=1] E):
+                  np.ndarray[np.float64_t, ndim=1] ebins):
 
-    cdef int i, j, n
-    cdef double x, isigma, iamp
-    cdef np.ndarray[np.float64_t, ndim=1] lines
+    cdef int i, j, n, m
+    cdef double x, isigma
+    cdef np.ndarray[np.float64_t, ndim=1] cdf, vec
 
     n = E0.shape[0]
-    m = E.shape[0]
-    lines = np.zeros(m)
-
+    m = ebins.shape[0]
+    cdf = np.zeros(m)
+    vec = np.zeros(m-1)
+    
     for i in range(n):
         isigma = 1.0/sigma[i]
-        iamp = gfac*amp[i]*isigma
         for j in range(m):
-            x = (E[j]-E0[i])*isigma
-            lines[j] += iamp*exp(-x*x)
-
-    return lines
+            x = (ebins[j]-E0[i])*isigma
+            cdf[j] = 0.5*(1+erf(x))
+        for j in range(m-1):
+            vec[j] = vec[j] + (cdf[j+1] - cdf[j])*amp[i]
+    return vec

diff -r e90982542bb59d26f0e74a85b57f0583f289aaac -r 5fe1482b4df00f40f79e3a2faa21a6d4af020ac0 yt/data_objects/selection_data_containers.py
--- a/yt/data_objects/selection_data_containers.py
+++ b/yt/data_objects/selection_data_containers.py
@@ -24,12 +24,13 @@
     iterable, \
     validate_width_tuple, \
     fix_length
+from yt.geometry.selection_routines import \
+    points_in_cells
 from yt.units.yt_array import \
     YTArray
 from yt.utilities.exceptions import \
     YTSphereTooSmall, \
     YTIllDefinedCutRegion, \
-    YTMixedCutRegion, \
     YTEllipsoidOrdering
 from yt.utilities.minimal_representation import \
     MinimalSliceData
@@ -793,8 +794,10 @@
         for field in fields:
             f = self.base_object[field]
             if f.shape != ind.shape:
-                raise YTMixedCutRegion(self.conditionals, field)
-            self.field_data[field] = self.base_object[field][ind]
+                parent = getattr(self, "parent", self.base_object)
+                self.field_data[field] = parent[field][self._part_ind]
+            else:
+                self.field_data[field] = self.base_object[field][ind]
 
     @property
     def blocks(self):
@@ -822,6 +825,22 @@
                 np.logical_and(res, ind, ind)
         return ind
 
+    _particle_mask = None
+    @property
+    def _part_ind(self):
+        if self._particle_mask is None:
+            parent = getattr(self, "parent", self.base_object)
+            units = "code_length"
+            mask = points_in_cells(
+                self["x"].to(units), self["y"].to(units),
+                self["z"].to(units), self["dx"].to(units),
+                self["dy"].to(units), self["dz"].to(units),
+                parent["particle_position_x"].to(units),
+                parent["particle_position_y"].to(units),
+                parent["particle_position_z"].to(units))
+            self._particle_mask = mask
+        return self._particle_mask
+
     @property
     def icoords(self):
         return self.base_object.icoords[self._cond_ind,:]

diff -r e90982542bb59d26f0e74a85b57f0583f289aaac -r 5fe1482b4df00f40f79e3a2faa21a6d4af020ac0 yt/fields/astro_fields.py
--- a/yt/fields/astro_fields.py
+++ b/yt/fields/astro_fields.py
@@ -136,7 +136,8 @@
     registry.add_field((ftype, "sz_kinetic"),
                        function=_sz_kinetic,
                        units=unit_system["length"]**-1,
-                       validators=[ValidateParameter("axis")])
+                       validators=[
+                           ValidateParameter("axis", {'axis': [0, 1, 2]})])
 
     def _szy(field, data):
         scale = 0.88 / mh * kboltz / (me * clight*clight) * sigma_thompson

diff -r e90982542bb59d26f0e74a85b57f0583f289aaac -r 5fe1482b4df00f40f79e3a2faa21a6d4af020ac0 yt/fields/derived_field.py
--- a/yt/fields/derived_field.py
+++ b/yt/fields/derived_field.py
@@ -23,6 +23,7 @@
     NeedsDataField, \
     NeedsProperty, \
     NeedsParameter, \
+    NeedsParameterValue, \
     FieldUnitsError
 from .field_detector import \
     FieldDetector
@@ -256,14 +257,21 @@
     pass
 
 class ValidateParameter(FieldValidator):
-    def __init__(self, parameters):
+    def __init__(self, parameters, parameter_values=None):
         """
         This validator ensures that the dataset has a given parameter.
+
+        If *parameter_values* is supplied, this will also ensure that the field
+        is available for all permutations of the field parameter.
         """
         FieldValidator.__init__(self)
         self.parameters = ensure_list(parameters)
+        self.parameter_values = parameter_values
     def __call__(self, data):
         doesnt_have = []
+        if self.parameter_values is not None:
+            if isinstance(data, FieldDetector):
+                raise NeedsParameterValue(self.parameter_values)
         for p in self.parameters:
             if not data.has_field_parameter(p):
                 doesnt_have.append(p)

diff -r e90982542bb59d26f0e74a85b57f0583f289aaac -r 5fe1482b4df00f40f79e3a2faa21a6d4af020ac0 yt/fields/field_detector.py
--- a/yt/fields/field_detector.py
+++ b/yt/fields/field_detector.py
@@ -17,7 +17,8 @@
 from collections import defaultdict
 from yt.units.yt_array import YTArray
 from .field_exceptions import \
-    NeedsGridType
+    NeedsGridType, \
+    NeedsParameterValue
 
 class FieldDetector(defaultdict):
     Level = 1
@@ -26,7 +27,7 @@
     _id_offset = 0
     domain_id = 0
 
-    def __init__(self, nd = 16, ds = None, flat = False):
+    def __init__(self, nd = 16, ds = None, flat = False, field_parameters=None):
         self.nd = nd
         self.flat = flat
         self._spatial = not flat
@@ -36,6 +37,7 @@
         self.LeftEdge = [0.0, 0.0, 0.0]
         self.RightEdge = [1.0, 1.0, 1.0]
         self.dds = np.ones(3, "float64")
+        self.field_parameters = field_parameters
         class fake_dataset(defaultdict):
             pass
 
@@ -106,6 +108,32 @@
                 for i in nfd.requested_parameters:
                     if i not in self.requested_parameters:
                         self.requested_parameters.append(i)
+            except NeedsParameterValue as npv:
+                # redo field detection with a new FieldDetector, ensuring
+                # all needed field parameter values are set
+                for param in npv.parameter_values:
+                    # temporarily remove any ValidateParameter instances for
+                    # this field to avoid infinitely re-raising
+                    # NeedsParameterValue exceptions
+                    saved_validators = []
+                    for i, validator in enumerate(finfo.validators):
+                        params = getattr(validator, 'parameters', [])
+                        if param in params:
+                            saved_validators.append(validator)
+                            del finfo.validators[i]
+
+                    for pv in npv.parameter_values[param]:
+                        nfd = FieldDetector(self.nd, ds=self.ds,
+                                            field_parameters={param: pv})
+                        vv = finfo(nfd)
+                        for i in nfd.requested:
+                            if i not in self.requested:
+                                self.requested.append(i)
+                        for i in nfd.requested_parameters:
+                            if i not in self.requested_parameters:
+                                self.requested_parameters.append(i)
+
+                    finfo.validators.extend(saved_validators)
             if vv is not None:
                 if not self.flat: self[item] = vv
                 else: self[item] = vv.ravel()
@@ -176,6 +204,8 @@
         }
 
     def get_field_parameter(self, param, default = 0.0):
+        if self.field_parameters and param in self.field_parameters:
+            return self.field_parameters[param]
         self.requested_parameters.append(param)
         if param in ['bulk_velocity', 'center', 'normal']:
             return self.ds.arr(np.random.random(3) * 1e-2, self.fp_units[param])

diff -r e90982542bb59d26f0e74a85b57f0583f289aaac -r 5fe1482b4df00f40f79e3a2faa21a6d4af020ac0 yt/fields/field_exceptions.py
--- a/yt/fields/field_exceptions.py
+++ b/yt/fields/field_exceptions.py
@@ -46,6 +46,10 @@
     def __str__(self):
         return "(%s)" % (self.missing_parameters)
 
+class NeedsParameterValue(ValidationException):
+    def __init__(self, parameter_values):
+        self.parameter_values = parameter_values
+
 class NeedsConfiguration(ValidationException):
     def __init__(self, parameter, value):
         self.parameter = parameter

diff -r e90982542bb59d26f0e74a85b57f0583f289aaac -r 5fe1482b4df00f40f79e3a2faa21a6d4af020ac0 yt/frontends/art/api.py
--- a/yt/frontends/art/api.py
+++ b/yt/frontends/art/api.py
@@ -17,7 +17,8 @@
       ARTDomainFile,\
       ARTDomainSubset,\
       ARTIndex,\
-      ARTDataset
+      ARTDataset, \
+      DarkMatterARTDataset
 
 from .fields import \
       ARTFieldInfo

diff -r e90982542bb59d26f0e74a85b57f0583f289aaac -r 5fe1482b4df00f40f79e3a2faa21a6d4af020ac0 yt/frontends/art/data_structures.py
--- a/yt/frontends/art/data_structures.py
+++ b/yt/frontends/art/data_structures.py
@@ -633,6 +633,10 @@
             return False
         if not f.endswith(suffix):
             return False
+        if "s0" not in f:
+            # ATOMIC.DAT, for instance, passes the other tests, but then dies
+            # during _find_files because it can't be split.
+            return False
         with open(f, 'rb') as fh:
             try:
                 amr_prefix, amr_suffix = filename_pattern['amr']

diff -r e90982542bb59d26f0e74a85b57f0583f289aaac -r 5fe1482b4df00f40f79e3a2faa21a6d4af020ac0 yt/frontends/gadget/tests/test_outputs.py
--- a/yt/frontends/gadget/tests/test_outputs.py
+++ b/yt/frontends/gadget/tests/test_outputs.py
@@ -25,7 +25,6 @@
 
 isothermal_h5 = "IsothermalCollapse/snap_505.hdf5"
 isothermal_bin = "IsothermalCollapse/snap_505"
-g64 = "gizmo_64/output/snap_N64L16_135.hdf5"
 
 # This maps from field names to weight field names to use for projections
 iso_fields = OrderedDict(
@@ -42,11 +41,6 @@
 )
 iso_kwargs = dict(bounding_box=[[-3, 3], [-3, 3], [-3, 3]])
 
-g64_fields = iso_fields.copy()
-g64_fields["deposit", "PartType4_density"] = None
-g64_kwargs = dict(bounding_box=[[-1e5, 1e5], [-1e5, 1e5], [-1e5, 1e5]])
-
-
 @requires_file(isothermal_h5)
 @requires_file(isothermal_bin)
 def test_GadgetDataset():
@@ -62,10 +56,3 @@
     for test in sph_answer(ds, 'snap_505', 2**17, iso_fields):
         test_iso_collapse.__name__ = test.description
         yield test
-
- at requires_ds(g64, big_data=True)
-def test_gizmo_64():
-    ds = data_dir_load(g64, kwargs=g64_kwargs)
-    for test in sph_answer(ds, 'snap_N64L16_135', 524288, g64_fields):
-        test_gizmo_64.__name__ = test.description
-        yield test

diff -r e90982542bb59d26f0e74a85b57f0583f289aaac -r 5fe1482b4df00f40f79e3a2faa21a6d4af020ac0 yt/frontends/gizmo/fields.py
--- a/yt/frontends/gizmo/fields.py
+++ b/yt/frontends/gizmo/fields.py
@@ -99,6 +99,15 @@
               data[ptype, "%s_metallicity" % species]
 
         num_neighbors = 64
+        for species in ['H', 'H_p0', 'H_p1']:
+            for suf in ["_density", "_number_density"]:
+                field = "%s%s" % (species, suf)
+                fn = add_volume_weighted_smoothed_field(
+                    ptype, "particle_position", "particle_mass",
+                    "smoothing_length", "density", field,
+                    self, num_neighbors)
+                self.alias(("gas", field), fn[0])
+
         for species in self.nuclei_names:
             self.add_field(
                 (ptype, "%s_nuclei_mass_density" % species),

diff -r e90982542bb59d26f0e74a85b57f0583f289aaac -r 5fe1482b4df00f40f79e3a2faa21a6d4af020ac0 yt/frontends/gizmo/tests/test_outputs.py
--- a/yt/frontends/gizmo/tests/test_outputs.py
+++ b/yt/frontends/gizmo/tests/test_outputs.py
@@ -34,13 +34,16 @@
         (('gas', 'velocity_magnitude'), None),
         (("deposit", "all_count"), None),
         (("deposit", "all_cic"), None),
+        (("deposit", "PartType0_density"), None),
     ]
 )
 
- at requires_ds(FIRE_m12i)
-def test_GizmoDataset():
-    ds = data_dir_load(FIRE_m12i)
+g64 = "gizmo_64/output/snap_N64L16_135.hdf5"
+
+ at requires_ds(g64, big_data=True)
+def test_gizmo_64():
+    ds = data_dir_load(g64)
     assert isinstance(ds, GizmoDataset)
-    for test in sph_answer(ds, 'snapshot_600', 4786950, fields):
-        test_GizmoDataset.__name__ = test.description
+    for test in sph_answer(ds, 'snap_N64L16_135', 524288, fields):
+        test_gizmo_64.__name__ = test.description
         yield test

diff -r e90982542bb59d26f0e74a85b57f0583f289aaac -r 5fe1482b4df00f40f79e3a2faa21a6d4af020ac0 yt/funcs.py
--- a/yt/funcs.py
+++ b/yt/funcs.py
@@ -963,3 +963,24 @@
     except ImportError:
         requests = None
     return requests
+
+ at contextlib.contextmanager
+def dummy_context_manager(*args, **kwargs):
+    yield
+
+def matplotlib_style_context(style_name=None, after_reset=True):
+    """Returns a context manager for controlling matplotlib style.
+
+    Arguments are passed to matplotlib.style.context() if specified. Defaults
+    to setting "classic" style, after resetting to the default config parameters.
+
+    On older matplotlib versions (<=1.5.0) where matplotlib.style isn't
+    available, returns a dummy context manager.
+    """
+    if style_name is None:
+        style_name = 'classic'
+    try:
+        import matplotlib.style
+        return matplotlib.style.context(style_name, after_reset=after_reset)
+    except ImportError:
+        return dummy_context_manager()

diff -r e90982542bb59d26f0e74a85b57f0583f289aaac -r 5fe1482b4df00f40f79e3a2faa21a6d4af020ac0 yt/geometry/coordinates/cartesian_coordinates.py
--- a/yt/geometry/coordinates/cartesian_coordinates.py
+++ b/yt/geometry/coordinates/cartesian_coordinates.py
@@ -29,6 +29,7 @@
 
 
 class CartesianCoordinateHandler(CoordinateHandler):
+    name = "cartesian"
 
     def __init__(self, ds, ordering = ('x','y','z')):
         super(CartesianCoordinateHandler, self).__init__(ds, ordering)

diff -r e90982542bb59d26f0e74a85b57f0583f289aaac -r 5fe1482b4df00f40f79e3a2faa21a6d4af020ac0 yt/geometry/coordinates/coordinate_handler.py
--- a/yt/geometry/coordinates/coordinate_handler.py
+++ b/yt/geometry/coordinates/coordinate_handler.py
@@ -71,6 +71,7 @@
                     ds.quan(width[0], fix_unitary(width[1])))
 
 class CoordinateHandler(object):
+    name = None
     
     def __init__(self, ds, ordering):
         self.ds = weakref.proxy(ds)

diff -r e90982542bb59d26f0e74a85b57f0583f289aaac -r 5fe1482b4df00f40f79e3a2faa21a6d4af020ac0 yt/geometry/coordinates/cylindrical_coordinates.py
--- a/yt/geometry/coordinates/cylindrical_coordinates.py
+++ b/yt/geometry/coordinates/cylindrical_coordinates.py
@@ -29,6 +29,7 @@
 #
 
 class CylindricalCoordinateHandler(CoordinateHandler):
+    name = "cylindrical"
 
     def __init__(self, ds, ordering = ('r', 'z', 'theta')):
         super(CylindricalCoordinateHandler, self).__init__(ds, ordering)

diff -r e90982542bb59d26f0e74a85b57f0583f289aaac -r 5fe1482b4df00f40f79e3a2faa21a6d4af020ac0 yt/geometry/coordinates/geographic_coordinates.py
--- a/yt/geometry/coordinates/geographic_coordinates.py
+++ b/yt/geometry/coordinates/geographic_coordinates.py
@@ -24,6 +24,7 @@
 
 class GeographicCoordinateHandler(CoordinateHandler):
     radial_axis = "altitude"
+    name = "geographic"
 
     def __init__(self, ds, ordering = None):
         if not ordering:
@@ -324,6 +325,7 @@
 
 class InternalGeographicCoordinateHandler(GeographicCoordinateHandler):
     radial_axis = "depth"
+    name = "internal"
     
     def _setup_radial_fields(self, registry):
         # Altitude is the radius from the central zone minus the radius of the

diff -r e90982542bb59d26f0e74a85b57f0583f289aaac -r 5fe1482b4df00f40f79e3a2faa21a6d4af020ac0 yt/geometry/coordinates/polar_coordinates.py
--- a/yt/geometry/coordinates/polar_coordinates.py
+++ b/yt/geometry/coordinates/polar_coordinates.py
@@ -18,6 +18,7 @@
 
 
 class PolarCoordinateHandler(CylindricalCoordinateHandler):
+    name = "polar"
 
     def __init__(self, ds, ordering = ('r', 'theta', 'z')):
         super(PolarCoordinateHandler, self).__init__(ds, ordering)

diff -r e90982542bb59d26f0e74a85b57f0583f289aaac -r 5fe1482b4df00f40f79e3a2faa21a6d4af020ac0 yt/geometry/coordinates/spec_cube_coordinates.py
--- a/yt/geometry/coordinates/spec_cube_coordinates.py
+++ b/yt/geometry/coordinates/spec_cube_coordinates.py
@@ -20,6 +20,7 @@
     _get_coord_fields
 
 class SpectralCubeCoordinateHandler(CartesianCoordinateHandler):
+    name = "spectral_cube"
 
     def __init__(self, ds, ordering = ('x', 'y', 'z')):
         ordering = tuple("xyz"[axis] for axis in

diff -r e90982542bb59d26f0e74a85b57f0583f289aaac -r 5fe1482b4df00f40f79e3a2faa21a6d4af020ac0 yt/geometry/coordinates/spherical_coordinates.py
--- a/yt/geometry/coordinates/spherical_coordinates.py
+++ b/yt/geometry/coordinates/spherical_coordinates.py
@@ -23,6 +23,7 @@
     pixelize_cylinder, pixelize_aitoff
 
 class SphericalCoordinateHandler(CoordinateHandler):
+    name = "spherical"
 
     def __init__(self, ds, ordering = ('r', 'theta', 'phi')):
         super(SphericalCoordinateHandler, self).__init__(ds, ordering)

diff -r e90982542bb59d26f0e74a85b57f0583f289aaac -r 5fe1482b4df00f40f79e3a2faa21a6d4af020ac0 yt/geometry/selection_routines.pyx
--- a/yt/geometry/selection_routines.pyx
+++ b/yt/geometry/selection_routines.pyx
@@ -2046,3 +2046,42 @@
         return ("halo_particles", self.halo_id)
 
 halo_particles_selector = HaloParticlesSelector
+
+ at cython.cdivision(True)
+ at cython.boundscheck(False)
+ at cython.wraparound(False)
+def points_in_cells(
+        np.float64_t[:] cx,
+        np.float64_t[:] cy,
+        np.float64_t[:] cz,
+        np.float64_t[:] dx,
+        np.float64_t[:] dy,
+        np.float64_t[:] dz,
+        np.float64_t[:] px,
+        np.float64_t[:] py,
+        np.float64_t[:] pz):
+    # Take a list of cells and particles and calculate which particles
+    # are enclosed within one of the cells.  This is used for querying
+    # particle fields on clump/contour objects.
+    # We use brute force since the cells are a relatively unordered collection.
+
+    cdef int p, c, n_p, n_c
+
+    n_p = px.size
+    n_c = cx.size
+    mask = np.ones(n_p, dtype="bool")
+
+    for p in range(n_p):
+        for c in range(n_c):
+            if fabs(px[p] - cx[c]) > 0.5 * dx[c]:
+                mask[p] = False
+                continue
+            if fabs(py[p] - cy[c]) > 0.5 * dy[c]:
+                mask[p] = False
+                continue
+            if fabs(pz[p] - cz[c]) > 0.5 * dz[c]:
+                mask[p] = False
+                continue
+            if mask[p]: break
+
+    return mask

diff -r e90982542bb59d26f0e74a85b57f0583f289aaac -r 5fe1482b4df00f40f79e3a2faa21a6d4af020ac0 yt/units/tests/test_units.py
--- a/yt/units/tests/test_units.py
+++ b/yt/units/tests/test_units.py
@@ -477,6 +477,9 @@
     test_unit = Unit('cm**-3', base_value=1.0, registry=ds.unit_registry)
     assert_equal(test_unit.latex_repr, '\\frac{1}{\\rm{cm}^{3}}')
 
+    test_unit = Unit('m_geom/l_geom**3')
+    assert_equal(test_unit.latex_repr, '\\frac{1}{M_\\odot^{2}}')
+
 def test_latitude_longitude():
     lat = unit_symbols.lat
     lon = unit_symbols.lon

diff -r e90982542bb59d26f0e74a85b57f0583f289aaac -r 5fe1482b4df00f40f79e3a2faa21a6d4af020ac0 yt/units/tests/test_ytarray.py
--- a/yt/units/tests/test_ytarray.py
+++ b/yt/units/tests/test_ytarray.py
@@ -626,6 +626,29 @@
     new_length = fix_length(length, ds=ds)
     yield assert_equal, YTQuantity(10, 'cm'), new_length
 
+def test_code_unit_combinations():
+    """
+    Test comparing code units coming from different datasets
+    """
+    ds1 = fake_random_ds(64, nprocs=1, length_unit=1)
+    ds2 = fake_random_ds(64, nprocs=1, length_unit=10)
+
+    q1 = ds1.quan(1, 'code_length')
+    q2 = ds2.quan(1, 'code_length')
+
+    assert_equal(10*q1, q2)
+    assert_equal(q1/q2, 0.1)
+    assert_true(q1 < q2)
+    assert_true(q2 > q1)
+    assert_true(not bool(q1 > q2))
+    assert_true(not bool(q2 < q1))
+    assert_true(q1 != q2)
+    assert_true(not bool(q1 == q2))
+
+    assert_equal((q1 + q2).in_cgs().value, 11)
+    assert_equal((q2 + q1).in_cgs().value, 11)
+    assert_equal((q1 - q2).in_cgs().value, -9)
+    assert_equal((q2 - q1).in_cgs().value, 9)
 
 def test_ytarray_pickle():
     ds = fake_random_ds(64, nprocs=1)

diff -r e90982542bb59d26f0e74a85b57f0583f289aaac -r 5fe1482b4df00f40f79e3a2faa21a6d4af020ac0 yt/units/unit_lookup_table.py
--- a/yt/units/unit_lookup_table.py
+++ b/yt/units/unit_lookup_table.py
@@ -23,7 +23,7 @@
     planck_charge_esu, planck_energy_erg, planck_mass_grams, \
     planck_temperature_K, planck_time_s, mass_hydrogen_grams, \
     grams_per_pound, standard_gravity_cm_per_s2, pascal_per_atm, \
-    newton_cgs
+    newton_cgs, cm_per_rearth, cm_per_rjup
 import numpy as np
 
 # Lookup a unit symbol with the symbol string, and provide a tuple with the
@@ -87,6 +87,8 @@
     "msun": (mass_sun_grams, dimensions.mass, 0.0, r"M_\odot"),
     "Rsun": (cm_per_rsun, dimensions.length, 0.0, r"R_\odot"),
     "rsun": (cm_per_rsun, dimensions.length, 0.0, r"R_\odot"),
+    "R_sun": (cm_per_rsun, dimensions.length, 0.0, r"R_\odot"),
+    "r_sun": (cm_per_rsun, dimensions.length, 0.0, r"R_\odot"),
     "Lsun": (luminosity_sun_ergs_per_sec, dimensions.power, 0.0, r"L_\odot"),
     "Tsun": (temp_sun_kelvin, dimensions.temperature, 0.0, r"T_\odot"),
     "Zsun": (metallicity_sun, dimensions.dimensionless, 0.0, r"Z_\odot"),
@@ -151,6 +153,12 @@
     "m_geom": (mass_sun_grams, dimensions.mass, 0.0, r"M_\odot"),
     "l_geom": (newton_cgs*mass_sun_grams/speed_of_light_cm_per_s**2, dimensions.length, 0.0, r"M_\odot"),
     "t_geom": (newton_cgs*mass_sun_grams/speed_of_light_cm_per_s**3, dimensions.time, 0.0, r"M_\odot"),
+
+    # Some Solar System units
+    "R_earth": (cm_per_rearth, dimensions.length, 0.0, r"R_\oplus"),
+    "r_earth": (cm_per_rearth, dimensions.length, 0.0, r"R_\oplus"),
+    "R_jup": (cm_per_rjup, dimensions.length, 0.0, r"R_\mathrm{Jup}"),
+    "r_jup": (cm_per_rjup, dimensions.length, 0.0, r"R_\mathrm{Jup}"),
 }
 
 # This dictionary formatting from magnitude package, credit to Juan Reyero.

diff -r e90982542bb59d26f0e74a85b57f0583f289aaac -r 5fe1482b4df00f40f79e3a2faa21a6d4af020ac0 yt/units/unit_object.py
--- a/yt/units/unit_object.py
+++ b/yt/units/unit_object.py
@@ -110,8 +110,25 @@
             symbol_table[ex] = registry.lut[str(ex)][3]
         except:
             symbol_table[ex] = r"\rm{" + str(ex).replace('_', '\ ') + "}"
+
+    # invert the symbol table dict to look for keys with identical values
+    invert_symbols = {}
+    for key, value in symbol_table.items():
+        if value not in invert_symbols:
+            invert_symbols[value] = [key]
+        else:
+            invert_symbols[value].append(key)
+
+    # if there are any units with identical latex representations, substitute
+    # units to avoid  uncanceled terms in the final latex expresion.
+    for val in invert_symbols:
+        symbols = invert_symbols[val]
+        for i in range(1, len(symbols)):
+            expr = expr.subs(symbols[i], symbols[0])
+
     latex_repr = latex(expr, symbol_names=symbol_table, mul_symbol="dot",
                        fold_frac_powers=True, fold_short_frac=True)
+
     if latex_repr == '1':
         return ''
     else:
@@ -258,7 +275,11 @@
     def latex_repr(self):
         if self._latex_repr is not None:
             return self._latex_repr
-        self._latex_repr = get_latex_representation(self.expr, self.registry)
+        if self.expr.is_Atom:
+            expr = self.expr
+        else:
+            expr = self.expr.copy()
+        self._latex_repr = get_latex_representation(expr, self.registry)
         return self._latex_repr
 
     ### Some sympy conventions

diff -r e90982542bb59d26f0e74a85b57f0583f289aaac -r 5fe1482b4df00f40f79e3a2faa21a6d4af020ac0 yt/units/unit_symbols.py
--- a/yt/units/unit_symbols.py
+++ b/yt/units/unit_symbols.py
@@ -116,11 +116,11 @@
 
 Msun = solar_mass = quan(1.0, "Msun")
 msun = quan(1.0, "msun")
-Rsun = solar_radius = quan(1.0, "Rsun")
-rsun = quan(1.0, "rsun")
-Lsun = lsun = solar_luminosity = quan(1.0, "Lsun")
-Tsun = solar_temperature = quan(1.0, "Tsun")
-Zsun = solar_metallicity = quan(1.0, "Zsun")
+Rsun = R_sun = solar_radius = quan(1.0, "Rsun")
+rsun = r_sun = quan(1.0, "rsun")
+Lsun = lsun = l_sun = solar_luminosity = quan(1.0, "Lsun")
+Tsun = T_sun = solar_temperature = quan(1.0, "Tsun")
+Zsun = Z_sun = solar_metallicity = quan(1.0, "Zsun")
 
 #
 # Misc Astronomical units
@@ -129,6 +129,10 @@
 AU = astronomical_unit = quan(1.0, "AU")
 au = quan(1.0, "au")
 ly = light_year = quan(1.0, "ly")
+Rearth = R_earth = earth_radius = quan(1.0, 'R_earth')
+rearth = r_earth = quan(1.0, 'r_earth')
+Rjup = R_jup = jupiter_radius = quan(1.0, 'R_jup')
+rjup = r_jup = quan(1.0, 'r_jup')
 
 #
 # Physical units

diff -r e90982542bb59d26f0e74a85b57f0583f289aaac -r 5fe1482b4df00f40f79e3a2faa21a6d4af020ac0 yt/units/yt_array.py
--- a/yt/units/yt_array.py
+++ b/yt/units/yt_array.py
@@ -167,7 +167,8 @@
     # Check that other is a YTArray.
     if hasattr(other, 'units'):
         if this.units.expr is other.units.expr:
-            return other
+            if this.units.base_value == other.units.base_value:
+                return other
         if not this.units.same_dimensions_as(other.units):
             raise YTUnitOperationError(op_string, this.units, other.units)
         return other.in_units(this.units)

diff -r e90982542bb59d26f0e74a85b57f0583f289aaac -r 5fe1482b4df00f40f79e3a2faa21a6d4af020ac0 yt/utilities/exceptions.py
--- a/yt/utilities/exceptions.py
+++ b/yt/utilities/exceptions.py
@@ -558,3 +558,13 @@
 
     def __str__(self):
         return "Can't determine size specification for %s" % (self.size_spec)
+
+class YTDataTypeUnsupported(YTException):
+    def __init__(self, this, supported):
+        self.supported = supported
+        self.this = this
+
+    def __str__(self):
+        v = "This operation is not supported for data of geometry %s; " % self.this
+        v += "It supports data of geometries %s" % (self.supported,)
+        return v

diff -r e90982542bb59d26f0e74a85b57f0583f289aaac -r 5fe1482b4df00f40f79e3a2faa21a6d4af020ac0 yt/utilities/lib/alt_ray_tracers.pyx
--- a/yt/utilities/lib/alt_ray_tracers.pyx
+++ b/yt/utilities/lib/alt_ray_tracers.pyx
@@ -101,7 +101,7 @@
                                           rleft, rright, zleft, zright, \
                                           cleft, cright, thetaleft, thetaright, \
                                           tmleft, tpleft, tmright, tpright, tsect
-    cdef np.ndarray[np.intp_t, ndim=1] inds, tinds, sinds
+    cdef np.ndarray[np.int64_t, ndim=1, cast=True] inds, tinds, sinds
     cdef np.ndarray[np.float64_t, ndim=2] xyz, rztheta, ptemp, b1, b2, dsect
 
     # set up  points

diff -r e90982542bb59d26f0e74a85b57f0583f289aaac -r 5fe1482b4df00f40f79e3a2faa21a6d4af020ac0 yt/utilities/physical_ratios.py
--- a/yt/utilities/physical_ratios.py
+++ b/yt/utilities/physical_ratios.py
@@ -3,10 +3,11 @@
 # Physical Constants and Units Conversion Factors
 #
 # Values for these constants, unless otherwise noted, are drawn from IAU,
-# IUPAC, and NIST data, whichever is newer.
+# IUPAC, NIST, and NASA data, whichever is newer.
 # http://maia.usno.navy.mil/NSFA/IAU2009_consts.html
 # http://goldbook.iupac.org/list_goldbook_phys_constants_defs.html
 # http://physics.nist.gov/cuu/Constants/index.html
+# http://nssdc.gsfc.nasa.gov/planetary/factsheet/jupiterfact.html
 
 # Elementary masses
 mass_electron_grams = 9.10938291e-28
@@ -29,6 +30,8 @@
 mpc_per_pc    = 1e-6
 mpc_per_au    = 4.84813682e-12
 mpc_per_rsun  = 2.253962e-14
+mpc_per_rearth= 2.06470307893e-16
+mpc_per_rjup  = 2.26566120943e-15
 mpc_per_miles = 5.21552871e-20
 mpc_per_km    = 3.24077929e-20
 mpc_per_cm    = 3.24077929e-25
@@ -40,6 +43,8 @@
 m_per_cm      = 1e-2
 ly_per_cm     = 1.05702341e-18
 rsun_per_cm   = 1.4378145e-11
+rearth_per_cm = 1.56961033e-9 # Mean (volumetric) radius
+rjup_per_cm   = 1.43039006737e-10 # Mean (volumetric) radius
 au_per_cm     = 6.68458712e-14
 ang_per_cm    = 1.0e8
 
@@ -49,6 +54,8 @@
 pc_per_mpc    = 1.0 / mpc_per_pc
 au_per_mpc    = 1.0 / mpc_per_au
 rsun_per_mpc  = 1.0 / mpc_per_rsun
+rearth_per_mpc= 1.0 / mpc_per_rearth
+rjup_per_mpc  = 1.0 / mpc_per_rjup
 miles_per_mpc = 1.0 / mpc_per_miles
 km_per_mpc    = 1.0 / mpc_per_km
 cm_per_mpc    = 1.0 / mpc_per_cm
@@ -59,6 +66,8 @@
 cm_per_pc     = 1.0 / pc_per_cm
 cm_per_ly     = 1.0 / ly_per_cm
 cm_per_rsun   = 1.0 / rsun_per_cm
+cm_per_rearth = 1.0 / rearth_per_cm
+cm_per_rjup   = 1.0 / rjup_per_cm
 cm_per_au     = 1.0 / au_per_cm
 cm_per_ang    = 1.0 / ang_per_cm
 

diff -r e90982542bb59d26f0e74a85b57f0583f289aaac -r 5fe1482b4df00f40f79e3a2faa21a6d4af020ac0 yt/visualization/base_plot_types.py
--- a/yt/visualization/base_plot_types.py
+++ b/yt/visualization/base_plot_types.py
@@ -19,7 +19,8 @@
     get_image_suffix, \
     mylog, \
     iterable, \
-    get_brewer_cmap
+    get_brewer_cmap, \
+    matplotlib_style_context
 import numpy as np
 
 
@@ -70,6 +71,9 @@
             axes.set_position(axrect)
             self.axes = axes
         self.canvas = FigureCanvasAgg(self.figure)
+        for which in ['major', 'minor']:
+            for axis in 'xy':
+                self.axes.tick_params(which=which, axis=axis, direction='in')
 
     def save(self, name, mpl_kwargs=None, canvas=None):
         """Choose backend and save image to disk"""
@@ -97,9 +101,23 @@
             mylog.warning("Unknown suffix %s, defaulting to Agg", suffix)
             canvas = self.canvas
 
-        canvas.print_figure(name, **mpl_kwargs)
+        with matplotlib_style_context():
+            canvas.print_figure(name, **mpl_kwargs)
         return name
 
+    def _get_labels(self):
+        ax = self.axes
+        labels = ax.xaxis.get_ticklabels() + ax.yaxis.get_ticklabels()
+        labels += [ax.title, ax.xaxis.label, ax.yaxis.label,
+                   ax.xaxis.get_offset_text(), ax.yaxis.get_offset_text()]
+        return labels
+
+    def _set_font_properties(self, font_properties, font_color):
+        for label in self._get_labels():
+            label.set_fontproperties(font_properties)
+            if font_color is not None:
+                label.set_color(self.font_color)
+
 
 class ImagePlotMPL(PlotMPL):
     """A base class for yt plots made using imshow
@@ -144,12 +162,15 @@
             self.cb.set_ticks(yticks)
         else:
             self.cb = self.figure.colorbar(self.image, self.cax)
+        for which in ['major', 'minor']:
+            self.cax.tick_params(which=which, axis='y', direction='in')
 
     def _repr_png_(self):
         from ._mpl_imports import FigureCanvasAgg
         canvas = FigureCanvasAgg(self.figure)
         f = BytesIO()
-        canvas.print_figure(f)
+        with matplotlib_style_context():
+            canvas.print_figure(f)
         f.seek(0)
         return f.read()
 
@@ -251,6 +272,13 @@
         self.cax.set_position(caxrect)
         self.figure.set_size_inches(*size)
 
+    def _get_labels(self):
+        labels = super(ImagePlotMPL, self)._get_labels()
+        cbax = self.cb.ax
+        labels += cbax.yaxis.get_ticklabels()
+        labels += [cbax.yaxis.label, cbax.yaxis.get_offset_text()]
+        return labels
+
     def hide_axes(self):
         """
         Hide the axes for a plot including ticks and labels

diff -r e90982542bb59d26f0e74a85b57f0583f289aaac -r 5fe1482b4df00f40f79e3a2faa21a6d4af020ac0 yt/visualization/plot_container.py
--- a/yt/visualization/plot_container.py
+++ b/yt/visualization/plot_container.py
@@ -422,17 +422,8 @@
 
     def _set_font_properties(self):
         for f in self.plots:
-            ax = self.plots[f].axes
-            cbax = self.plots[f].cb.ax
-            labels = ax.xaxis.get_ticklabels() + ax.yaxis.get_ticklabels()
-            labels += cbax.yaxis.get_ticklabels()
-            labels += [ax.title, ax.xaxis.label, ax.yaxis.label,
-                       cbax.yaxis.label, cbax.yaxis.get_offset_text(),
-                       ax.xaxis.get_offset_text(), ax.yaxis.get_offset_text()]
-            for label in labels:
-                label.set_fontproperties(self._font_properties)
-                if self._font_color is not None:
-                    label.set_color(self._font_color)
+            self.plots[f]._set_font_properties(
+                self._font_properties, self._font_color)
 
     @invalidate_plot
     @invalidate_figure

diff -r e90982542bb59d26f0e74a85b57f0583f289aaac -r 5fe1482b4df00f40f79e3a2faa21a6d4af020ac0 yt/visualization/plot_modifications.py
--- a/yt/visualization/plot_modifications.py
+++ b/yt/visualization/plot_modifications.py
@@ -21,6 +21,7 @@
 import numpy as np
 
 from distutils.version import LooseVersion
+from functools import wraps
 
 from yt.funcs import \
     mylog, iterable
@@ -37,21 +38,49 @@
     line_integral_convolution_2d
 from yt.geometry.unstructured_mesh_handler import UnstructuredIndex
 from yt.utilities.lib.mesh_triangulation import triangulate_indices
+from yt.utilities.exceptions import \
+    YTDataTypeUnsupported
 
 from . import _MPL
 
 callback_registry = {}
 
+def _verify_geometry(func):
+    @wraps(func)
+    def _check_geometry(self, plot):
+        geom = plot.data.ds.coordinates.name
+        supp = self._supported_geometries
+        cs = getattr(self, "coord_system", None)
+        if supp is None or geom in supp:
+            return func(self, plot)
+        if cs in ("axis", "figure") and "force" not in supp:
+            return func(self, plot)
+        raise YTDataTypeUnsupported(geom, supp)
+    return _check_geometry
+
 class RegisteredCallback(type):
     def __init__(cls, name, b, d):
         type.__init__(cls, name, b, d)
         callback_registry[name] = cls
+        cls.__call__ = _verify_geometry(cls.__call__)
 
 @add_metaclass(RegisteredCallback)
 class PlotCallback(object):
+    # _supported_geometries is set by subclasses of PlotCallback to a tuple of
+    # strings corresponding to the names of the geometries that a callback
+    # supports.  By default it is None, which means it supports everything.
+    # Note that if there's a coord_system parameter that is set to "axis" or
+    # "figure" this is disregarded.  If "force" is included in the tuple, it
+    # will *not* check whether or not the coord_system is in axis or figure,
+    # and will only look at the geometries.
+    _supported_geometries = None
+
     def __init__(self, *args, **kwargs):
         pass
 
+    def __call__(self, plot):
+        raise NotImplementedError
+
     def project_coords(self, plot, coord):
         """
         Convert coordinates from simulation data coordinates to projected
@@ -248,6 +277,7 @@
     Cutting Planes).
     """
     _type_name = "velocity"
+    _supported_geometries = ("cartesian", "spectral_cube")
     def __init__(self, factor=16, scale=None, scale_units=None, normalize=False):
         PlotCallback.__init__(self)
         self.factor = factor
@@ -293,6 +323,7 @@
     clearly seen for fields with substantial variation in field strength.
     """
     _type_name = "magnetic_field"
+    _supported_geometries = ("cartesian", "spectral_cube")
     def __init__(self, factor=16, scale=None, scale_units=None, normalize=False):
         PlotCallback.__init__(self)
         self.factor = factor
@@ -326,6 +357,7 @@
     (see matplotlib.axes.Axes.quiver for more info)
     """
     _type_name = "quiver"
+    _supported_geometries = ("cartesian", "spectral_cube")
     def __init__(self, field_x, field_y, factor=16, scale=None,
                  scale_units=None, normalize=False, bv_x=0, bv_y=0):
         PlotCallback.__init__(self)
@@ -405,6 +437,7 @@
     queried.
     """
     _type_name = "contour"
+    _supported_geometries = ("cartesian", "spectral_cube")
     def __init__(self, field, ncont=5, factor=4, clim=None,
                  plot_args=None, label=False, take_log=None,
                  label_args=None, text_args=None, data_source=None):
@@ -538,6 +571,7 @@
     can change the linewidth of the displayed grids.
     """
     _type_name = "grids"
+    _supported_geometries = ("cartesian", "spectral_cube")
 
     def __init__(self, alpha=0.7, min_pix=1, min_pix_ids=20, draw_ids=False,
                  periodic=True, min_level=None, max_level=None,
@@ -652,6 +686,7 @@
     *field_color* is a field to be used to colormap the streamlines.
     """
     _type_name = "streamlines"
+    _supported_geometries = ("cartesian", "spectral_cube")
     def __init__(self, field_x, field_y, factor=16,
                  density=1, field_color=None, plot_args=None):
         PlotCallback.__init__(self)
@@ -758,6 +793,7 @@
 
     """
     _type_name = "line"
+    _supported_geometries = ("cartesian", "spectral_cube")
     def __init__(self, p1, p2, data_coords=False, coord_system="data",
                  plot_args=None):
         PlotCallback.__init__(self)
@@ -798,6 +834,7 @@
 
     """
     _type_name = "image_line"
+    _supported_geometries = ("cartesian", "spectral_cube")
     def __init__(self, p1, p2, data_coords=False, coord_system='axis',
                  plot_args=None):
         super(ImageLineCallback, self).__init__(p1, p2, data_coords,
@@ -817,6 +854,7 @@
     *field_y*, skipping every *factor* datapoint in the discretization.
     """
     _type_name = "cquiver"
+    _supported_geometries = ("cartesian", "spectral_cube")
     def __init__(self, field_x, field_y, factor):
         PlotCallback.__init__(self)
         self.field_x = field_x
@@ -862,6 +900,7 @@
     Take a list of *clumps* and plot them as a set of contours.
     """
     _type_name = "clumps"
+    _supported_geometries = ("cartesian", "spectral_cube")
     def __init__(self, clumps, plot_args=None):
         self.clumps = clumps
         if plot_args is None: plot_args = {}
@@ -990,6 +1029,7 @@
 
     """
     _type_name = "arrow"
+    _supported_geometries = ("cartesian", "spectral_cube")
     def __init__(self, pos, code_size=None, length=0.03, width=0.0001,
                  head_width=0.01, head_length=0.01,
                  starting_pos=None, coord_system='data', plot_args=None):
@@ -1105,6 +1145,7 @@
 
     """
     _type_name = "marker"
+    _supported_geometries = ("cartesian", "spectral_cube")
     def __init__(self, pos, marker='x', coord_system="data", plot_args=None):
         def_plot_args = {'color':'w', 's':50}
         self.pos = pos
@@ -1178,6 +1219,7 @@
 
     """
     _type_name = "sphere"
+    _supported_geometries = ("cartesian", "spectral_cube")
     def __init__(self, center, radius, circle_args=None,
                  text=None, coord_system='data', text_args=None):
         def_text_args = {'color':'white'}
@@ -1293,6 +1335,7 @@
     >>> s.save()
     """
     _type_name = "text"
+    _supported_geometries = ("cartesian", "spectral_cube")
     def __init__(self, pos, text, data_coords=False, coord_system='data',
                  text_args=None, inset_box_args=None):
         def_text_args = {'color':'white'}
@@ -1337,6 +1380,7 @@
 
     """
     _type_name = "point"
+    _supported_geometries = ("cartesian", "spectral_cube")
     def __init__(self, pos, text, data_coords=False, coord_system='data',
                  text_args=None, inset_box_args=None):
         super(PointAnnotateCallback, self).__init__(pos, text, data_coords,
@@ -1379,6 +1423,7 @@
     _type_name = 'halos'
     region = None
     _descriptor = None
+    _supported_geometries = ("cartesian", "spectral_cube")
 
     def __init__(self, halo_catalog, circle_args=None, circle_kwargs=None,
                  width=None, annotate_field=None, text_args=None,
@@ -1486,6 +1531,7 @@
     _type_name = "particles"
     region = None
     _descriptor = None
+    _supported_geometries = ("cartesian", "spectral_cube")
     def __init__(self, width, p_size=1.0, col='k', marker='o', stride=1.0,
                  ptype='all', minimum_mass=None, alpha=1.0):
         PlotCallback.__init__(self)
@@ -1626,6 +1672,7 @@
 
     """
     _type_name = "mesh_lines"
+    _supported_geometries = ("cartesian", "spectral_cube")
 
     def __init__(self, plot_args=None):
         super(MeshLinesCallback, self).__init__()
@@ -1690,6 +1737,7 @@
     of the geometry represented by the triangles.
     """
     _type_name = "triangle_facets"
+    _supported_geometries = ("cartesian", "spectral_cube")
 
     def __init__(self, triangle_vertices, plot_args=None):
         super(TriangleFacetsCallback, self).__init__()
@@ -1814,6 +1862,7 @@
     >>> s.annotate_timestamp()
     """
     _type_name = "timestamp"
+    _supported_geometries = ("cartesian", "spectral_cube")
     def __init__(self, x_pos=None, y_pos=None, corner='lower_left', time=True,
                  redshift=False, time_format="t = {time:.1f} {units}",
                  time_unit=None, redshift_format="z = {redshift:.2f}",
@@ -1999,6 +2048,7 @@
     >>> s.annotate_scale()
     """
     _type_name = "scale"
+    _supported_geometries = ("cartesian", "spectral_cube", "force")
     def __init__(self, corner='lower_right', coeff=None, unit=None, pos=None,
                  max_frac=0.16, min_frac=0.015, coord_system='axis',
                  text_args=None, size_bar_args=None, draw_inset_box=False,
@@ -2176,6 +2226,7 @@
 
     """
     _type_name = "ray"
+    _supported_geometries = ("cartesian", "spectral_cube", "force")
     def __init__(self, ray, arrow=False, plot_args=None):
         PlotCallback.__init__(self)
         def_plot_args = {'color':'white', 'linewidth':2}
@@ -2322,6 +2373,7 @@
                                              lim=(0.5,0.65))
     """
     _type_name = "line_integral_convolution"
+    _supported_geometries = ("cartesian", "spectral_cube")
     def __init__(self, field_x, field_y, texture=None, kernellen=50.,
                  lim=(0.5,0.6), cmap='binary', alpha=0.8, const_alpha=False):
         PlotCallback.__init__(self)
@@ -2417,6 +2469,7 @@
     >>> s.save()
     """
     _type_name = "cell_edges"
+    _supported_geometries = ("cartesian", "spectral_cube")
     def __init__(self, line_width=1.0, alpha = 1.0, color=(0.0, 0.0, 0.0)):
         PlotCallback.__init__(self)
         self.line_width = line_width

diff -r e90982542bb59d26f0e74a85b57f0583f289aaac -r 5fe1482b4df00f40f79e3a2faa21a6d4af020ac0 yt/visualization/plot_window.py
--- a/yt/visualization/plot_window.py
+++ b/yt/visualization/plot_window.py
@@ -64,7 +64,8 @@
     YTUnitNotRecognized, \
     YTCannotParseUnitDisplayName, \
     YTUnitConversionError, \
-    YTPlotCallbackError
+    YTPlotCallbackError, \
+    YTDataTypeUnsupported
 
 # Some magic for dealing with pyparsing being included or not
 # included in matplotlib (not in gentoo, yes in everything else)
@@ -984,6 +985,8 @@
                 callback = CallbackMaker(*args[1:], **kwargs)
                 try:
                     callback(cbw)
+                except YTDataTypeUnsupported as e:
+                    six.reraise(YTDataTypeUnsupported, e)
                 except Exception as e:
                     six.reraise(YTPlotCallbackError,
                                 YTPlotCallbackError(callback._type_name, e),

diff -r e90982542bb59d26f0e74a85b57f0583f289aaac -r 5fe1482b4df00f40f79e3a2faa21a6d4af020ac0 yt/visualization/profile_plotter.py
--- a/yt/visualization/profile_plotter.py
+++ b/yt/visualization/profile_plotter.py
@@ -26,7 +26,8 @@
 from io import BytesIO
 
 
-from .base_plot_types import ImagePlotMPL
+from .base_plot_types import \
+    PlotMPL, ImagePlotMPL
 from .plot_container import \
     ImagePlotContainer, \
     log_transform, linear_transform, get_log_minorticks, \
@@ -41,7 +42,8 @@
 from yt.funcs import \
     ensure_list, \
     get_image_suffix, \
-    get_ipython_api_version
+    get_ipython_api_version, \
+    matplotlib_style_context
 
 def get_canvas(name):
     from . import _mpl_imports as mpl
@@ -60,25 +62,33 @@
         canvas_cls = mpl.FigureCanvasAgg
     return canvas_cls
 
+class PlotContainer(OrderedDict):
+    def __missing__(self, key):
+        plot = PlotMPL((10, 8), [0.1, 0.1, 0.8, 0.8], None, None)
+        self[key] = plot
+        return self[key]
+
 class FigureContainer(OrderedDict):
-    def __init__(self):
+    def __init__(self, plots):
+        self.plots = plots
         super(FigureContainer, self).__init__()
 
     def __missing__(self, key):
-        from matplotlib.figure import Figure
-        figure = Figure((10, 8))
-        self[key] = figure
+        self[key] = self.plots[key].figure
         return self[key]
 
+    def __iter__(self):
+        return iter(self.plots)
+
+
 class AxesContainer(OrderedDict):
-    def __init__(self, fig_container):
-        self.fig_container = fig_container
+    def __init__(self, plots):
+        self.plots = plots
         self.ylim = {}
         super(AxesContainer, self).__init__()
 
     def __missing__(self, key):
-        figure = self.fig_container[key]
-        self[key] = figure.add_subplot(111)
+        self[key] = self.plots[key].axes
         return self[key]
 
     def __setitem__(self, key, value):
@@ -227,7 +237,7 @@
         ProfilePlot._initialize_instance(self, profiles, label, plot_spec, y_log)
         
     @validate_plot
-    def save(self, name=None, suffix=None):
+    def save(self, name=None, suffix=None, mpl_kwargs=None):
         r"""
         Saves a 1d profile plot.
 
@@ -238,14 +248,16 @@
         suffix : string
             Specify the image type by its suffix. If not specified, the output
             type will be inferred from the filename. Defaults to PNG.
+        mpl_kwargs : dict
+            A dict of keyword arguments to be passed to matplotlib.
         """
         if not self._plot_valid:
             self._setup_plots()
-        unique = set(self.figures.values())
-        if len(unique) < len(self.figures):
+        unique = set(self.plots.values())
+        if len(unique) < len(self.plots):
             iters = izip(range(len(unique)), sorted(unique))
         else:
-            iters = iteritems(self.figures)
+            iters = iteritems(self.plots)
         if not suffix:
             suffix = "png"
         suffix = ".%s" % suffix
@@ -261,24 +273,23 @@
             if sfx != '':
                 suffix = sfx
                 prefix = name[:name.rfind(suffix)]
-                fullname = True 
+                fullname = True
             else:
                 prefix = name
         xfn = self.profiles[0].x_field
         if isinstance(xfn, tuple):
             xfn = xfn[1]
-        canvas_cls = get_canvas(name)
         fns = []
-        for uid, fig in iters:
+        for uid, plot in iters:
             if isinstance(uid, tuple):
                 uid = uid[1]
-            canvas = canvas_cls(fig)
             if fullname:
                 fns.append("%s%s" % (prefix, suffix))
             else:
                 fns.append("%s_1d-Profile_%s_%s%s" % (prefix, xfn, uid, suffix))
             mylog.info("Saving %s", fns[-1])
-            canvas.print_figure(fns[-1])
+            with matplotlib_style_context():
+                plot.save(fns[-1], mpl_kwargs=mpl_kwargs)
         return fns
 
     @validate_plot
@@ -325,7 +336,8 @@
         for uid, fig in iters:
             canvas = mpl.FigureCanvasAgg(fig)
             f = BytesIO()
-            canvas.print_figure(f)
+            with matplotlib_style_context():
+                canvas.print_figure(f)
             f.seek(0)
             img = base64.b64encode(f.read()).decode()
             ret += r'<img style="max-width:100%%;max-height:100%%;" ' \
@@ -352,10 +364,14 @@
             axes.set_ylim(*self.axes.ylim[fname])
             if any(self.label):
                 axes.legend(loc="best")
+        self._set_font_properties()
         self._plot_valid = True
 
     @classmethod
     def _initialize_instance(cls, obj, profiles, labels, plot_specs, y_log):
+        from matplotlib.font_manager import FontProperties
+        obj._font_properties = FontProperties(family='stixgeneral', size=18)
+        obj._font_color = None
         obj.profiles = ensure_list(profiles)
         obj.x_log = None
         obj.y_log = {}
@@ -368,8 +384,9 @@
         if plot_specs is None:
             plot_specs = [dict() for p in obj.profiles]
         obj.plot_spec = plot_specs
-        obj.figures = FigureContainer()
-        obj.axes = AxesContainer(obj.figures)
+        obj.plots = PlotContainer()
+        obj.figures = FigureContainer(obj.plots)
+        obj.axes = AxesContainer(obj.plots)
         obj._setup_plots()
         return obj
 
@@ -597,6 +614,11 @@
                 break
         return self
 
+    def _set_font_properties(self):
+        for f in self.plots:
+            self.plots[f]._set_font_properties(
+                self._font_properties, self._font_color)
+
     def _get_field_log(self, field_y, profile):
         yfi = profile.field_info[field_y]
         if self.x_log is None:
@@ -1048,6 +1070,63 @@
         return names
 
     @invalidate_plot
+    def set_font(self, font_dict=None):
+        """set the font and font properties
+
+        Parameters
+        ----------
+        font_dict : dict
+        A dict of keyword parameters to be passed to
+        :py:class:`matplotlib.font_manager.FontProperties`.
+
+        Possible keys include
+        * family - The font family. Can be serif, sans-serif, cursive, 'fantasy' or
+          'monospace'.
+        * style - The font style. Either normal, italic or oblique.
+        * color - A valid color string like 'r', 'g', 'red', 'cobalt', and
+          'orange'.
+        * variant: Either normal or small-caps.
+        * size: Either an relative value of xx-small, x-small, small, medium,
+          large, x-large, xx-large or an absolute font size, e.g. 12
+        * stretch: A numeric value in the range 0-1000 or one of
+          ultra-condensed, extra-condensed, condensed, semi-condensed, normal,
+          semi-expanded, expanded, extra-expanded or ultra-expanded
+        * weight: A numeric value in the range 0-1000 or one of ultralight,
+          light, normal, regular, book, medium, roman, semibold, demibold, demi,
+          bold, heavy, extra bold, or black
+
+        See the matplotlib font manager API documentation for more details.
+        http://matplotlib.org/api/font_manager_api.html
+
+        Notes
+        -----
+        Mathtext axis labels will only obey the `size` and `color` keyword.
+
+        Examples
+        --------
+        This sets the font to be 24-pt, blue, sans-serif, italic, and
+        bold-face.
+
+        >>> prof = ProfilePlot(ds.all_data(), 'density', 'temperature')
+        >>> slc.set_font({'family':'sans-serif', 'style':'italic',
+                          'weight':'bold', 'size':24, 'color':'blue'})
+
+        """
+        from matplotlib.font_manager import FontProperties
+
+        if font_dict is None:
+            font_dict = {}
+        if 'color' in font_dict:
+            self._font_color = font_dict.pop('color')
+        # Set default values if the user does not explicitly set them.
+        # this prevents reverting to the matplotlib defaults.
+        font_dict.setdefault('family', 'stixgeneral')
+        font_dict.setdefault('size', 18)
+        self._font_properties = \
+            FontProperties(**font_dict)
+        return self
+
+    @invalidate_plot
     def set_title(self, field, title):
         """Set a title for the plot.
 

This diff is so big that we needed to truncate the remainder.

https://bitbucket.org/yt_analysis/yt/commits/6a55dc3b5b48/
Changeset:   6a55dc3b5b48
Branch:      yt
User:        MatthewTurk
Date:        2016-06-28 20:44:46+00:00
Summary:     Fixing name
Affected #:  1 file

diff -r 5fe1482b4df00f40f79e3a2faa21a6d4af020ac0 -r 6a55dc3b5b48105798cb8450c2247ca6f6aaea21 yt/geometry/coordinates/geographic_coordinates.py
--- a/yt/geometry/coordinates/geographic_coordinates.py
+++ b/yt/geometry/coordinates/geographic_coordinates.py
@@ -325,7 +325,7 @@
 
 class InternalGeographicCoordinateHandler(GeographicCoordinateHandler):
     radial_axis = "depth"
-    name = "internal"
+    name = "internal_geographic"
     
     def _setup_radial_fields(self, registry):
         # Altitude is the radius from the central zone minus the radius of the


https://bitbucket.org/yt_analysis/yt/commits/1d8f9705433e/
Changeset:   1d8f9705433e
Branch:      yt
User:        ngoldbaum
Date:        2016-06-28 22:09:12+00:00
Summary:     Merged in MatthewTurk/yt (pull request #2212)

Add internal geographic coordinates
Affected #:  5 files

diff -r 3ca8a739e5296422ea7954ce8d2717908be25dd6 -r 1d8f9705433e9ec690848d8698db85e8903335c9 yt/data_objects/static_output.py
--- a/yt/data_objects/static_output.py
+++ b/yt/data_objects/static_output.py
@@ -72,7 +72,8 @@
     CylindricalCoordinateHandler, \
     SphericalCoordinateHandler, \
     GeographicCoordinateHandler, \
-    SpectralCubeCoordinateHandler
+    SpectralCubeCoordinateHandler, \
+    InternalGeographicCoordinateHandler
 
 # We want to support the movie format in the future.
 # When such a thing comes to pass, I'll move all the stuff that is contant up
@@ -491,6 +492,8 @@
             cls = SphericalCoordinateHandler
         elif self.geometry == "geographic":
             cls = GeographicCoordinateHandler
+        elif self.geometry == "internal_geographic":
+            cls = InternalGeographicCoordinateHandler
         elif self.geometry == "spectral_cube":
             cls = SpectralCubeCoordinateHandler
         else:

diff -r 3ca8a739e5296422ea7954ce8d2717908be25dd6 -r 1d8f9705433e9ec690848d8698db85e8903335c9 yt/geometry/coordinates/api.py
--- a/yt/geometry/coordinates/api.py
+++ b/yt/geometry/coordinates/api.py
@@ -23,7 +23,8 @@
 from .spherical_coordinates import \
     SphericalCoordinateHandler
 from .geographic_coordinates import \
-    GeographicCoordinateHandler
+     GeographicCoordinateHandler, \
+     InternalGeographicCoordinateHandler
 from .spec_cube_coordinates import \
     SpectralCubeCoordinateHandler
 

diff -r 3ca8a739e5296422ea7954ce8d2717908be25dd6 -r 1d8f9705433e9ec690848d8698db85e8903335c9 yt/geometry/coordinates/geographic_coordinates.py
--- a/yt/geometry/coordinates/geographic_coordinates.py
+++ b/yt/geometry/coordinates/geographic_coordinates.py
@@ -23,14 +23,17 @@
     pixelize_cylinder, pixelize_aitoff
 
 class GeographicCoordinateHandler(CoordinateHandler):
+    radial_axis = "altitude"
     name = "geographic"
 
-    def __init__(self, ds, ordering = ('latitude', 'longitude', 'altitude')):
+    def __init__(self, ds, ordering = None):
+        if not ordering:
+            ordering = ('latitude', 'longitude', self.radial_axis)
         super(GeographicCoordinateHandler, self).__init__(ds, ordering)
         self.image_units = {}
         self.image_units[self.axis_id['latitude']] = (None, None)
         self.image_units[self.axis_id['longitude']] = (None, None)
-        self.image_units[self.axis_id['altitude']] = ('deg', 'deg')
+        self.image_units[self.axis_id[self.radial_axis]] = ('deg', 'deg')
 
     def setup_fields(self, registry):
         # return the fields for r, z, theta
@@ -56,11 +59,11 @@
                            display_field = False,
                            units = "")
 
-        f1, f2 = _get_coord_fields(self.axis_id['altitude'])
-        registry.add_field(("index", "daltitude"), function = f1,
+        f1, f2 = _get_coord_fields(self.axis_id[self.radial_axis])
+        registry.add_field(("index", "d%s" % (self.radial_axis,)), function = f1,
                            display_field = False,
                            units = "code_length")
-        registry.add_field(("index", "altitude"), function = f2,
+        registry.add_field(("index", self.radial_axis), function = f2,
                            display_field = False,
                            units = "code_length")
 
@@ -77,10 +80,10 @@
                  function=_SphericalVolume,
                  units = "code_length**3")
 
-        def _path_altitude(field, data):
-            return data["index", "daltitude"]
-        registry.add_field(("index", "path_element_altitude"),
-                 function = _path_altitude,
+        def _path_radial_axis(field, data):
+            return data["index", "d%s" % self.radial_axis]
+        registry.add_field(("index", "path_element_%s" % self.radial_axis),
+                 function = _path_radial_axis,
                  units = "code_length")
         def _path_latitude(field, data):
             # We use r here explicitly
@@ -98,21 +101,6 @@
                  function = _path_longitude,
                  units = "code_length")
 
-        # Altitude is the radius from the central zone minus the radius of the
-        # surface.
-        def _altitude_to_radius(field, data):
-            surface_height = data.get_field_parameter("surface_height")
-            if surface_height is None:
-                if hasattr(data.ds, "surface_height"):
-                    surface_height = data.ds.surface_height
-                else:
-                    surface_height = data.ds.quan(0.0, "code_length")
-            return data["altitude"] + surface_height
-        registry.add_field(("index", "r"),
-                 function=_altitude_to_radius,
-                 units = "code_length")
-        registry.alias(("index", "dr"), ("index", "daltitude"))
-
         def _latitude_to_theta(field, data):
             # latitude runs from -90 to 90
             return (data["latitude"] + 90) * np.pi/180.0
@@ -137,12 +125,47 @@
                  function = _dlongitude_to_dphi,
                  units = "")
 
+        self._setup_radial_fields(registry)
+
+    def _setup_radial_fields(self, registry):
+        # This stays here because we don't want to risk the field detector not
+        # properly getting the data_source, etc.
+        def _altitude_to_radius(field, data):
+            surface_height = data.get_field_parameter("surface_height")
+            if surface_height is None:
+                if hasattr(data.ds, "surface_height"):
+                    surface_height = data.ds.surface_height
+                else:
+                    surface_height = data.ds.quan(0.0, "code_length")
+            return data["altitude"] + surface_height
+        registry.add_field(("index", "r"),
+                 function=_altitude_to_radius,
+                 units = "code_length")
+        registry.alias(("index", "dr"), ("index", "daltitude"))
+
+    def _retrieve_radial_offset(self, data_source = None):
+        # This returns the factor by which the radial field is multiplied and
+        # the scalar its offset by.  Typically the "factor" will *only* be
+        # either 1.0 or -1.0.  The order will be factor * r + offset.
+        # Altitude is the radius from the central zone minus the radius of the
+        # surface.  Depth to radius is negative value of depth plus the
+        # outermost radius.
+        surface_height = None
+        if data_source is not None:
+            surface_height = data_source.get_field_parameter("surface_height")
+        if surface_height is None:
+            if hasattr(self.ds, "surface_height"):
+                surface_height = self.ds.surface_height
+            else:
+                surface_height = self.ds.quan(0.0, "code_length")
+        return surface_height, 1.0
+
     def pixelize(self, dimension, data_source, field, bounds, size,
                  antialias = True, periodic = True):
         if self.axis_name[dimension] in ('latitude', 'longitude'):
             return self._cyl_pixelize(data_source, field, bounds, size,
                                           antialias, dimension)
-        elif self.axis_name[dimension] == 'altitude':
+        elif self.axis_name[dimension] == self.radial_axis:
             return self._ortho_pixelize(data_source, field, bounds, size,
                                         antialias, dimension, periodic)
         else:
@@ -164,13 +187,8 @@
 
     def _cyl_pixelize(self, data_source, field, bounds, size, antialias,
                       dimension):
-        surface_height = data_source.get_field_parameter("surface_height")
-        if surface_height is None:
-            if hasattr(data_source.ds, "surface_height"):
-                surface_height = data_source.ds.surface_height
-            else:
-                surface_height = data_source.ds.quan(0.0, "code_length")
-        r = data_source['py'] + surface_height
+        offset, factor = self._retrieve_radial_offset(data_source)
+        r = factor * data_source['py'] + offset
         # Because of the axis-ordering, dimensions 0 and 1 both have r as py
         # and the angular coordinate as px.  But we need to figure out how to
         # convert our coordinate back to an actual angle, based on which
@@ -192,32 +210,28 @@
             buff = buff.transpose()
         return buff
 
-
     def convert_from_cartesian(self, coord):
         raise NotImplementedError
 
     def convert_to_cartesian(self, coord):
-        if hasattr(self.ds, "surface_height"):
-            surface_height = self.ds.surface_height
-        else:
-            surface_height = self.ds.quan(0.0, "code_length")
+        offset, factor = self._retrieve_radial_offset()
         if isinstance(coord, np.ndarray) and len(coord.shape) > 1:
-            alt = self.axis_id['altitude']
+            rad = self.axis_id[self.radial_axis]
             lon = self.axis_id['longitude']
             lat = self.axis_id['latitude']
-            r = coord[:,alt] + surface_height
+            r = factor * coord[:,rad] + offset
             theta = coord[:,lon] * np.pi/180
             phi = coord[:,lat] * np.pi/180
             nc = np.zeros_like(coord)
             # r, theta, phi
             nc[:,lat] = np.cos(phi) * np.sin(theta)*r
             nc[:,lon] = np.sin(phi) * np.sin(theta)*r
-            nc[:,alt] = np.cos(theta) * r
+            nc[:,rad] = np.cos(theta) * r
         else:
             a, b, c = coord
             theta = b * np.pi/180
             phi = a * np.pi/180
-            r = surface_height + c
+            r = factor * c + offset
             nc = (np.cos(phi) * np.sin(theta)*r,
                   np.sin(phi) * np.sin(theta)*r,
                   np.cos(theta) * r)
@@ -248,7 +262,7 @@
                   'y / \\sin(\mathrm{latitude})'),
               self.axis_id['longitude']:
                  ('R', 'z'),
-              self.axis_id['altitude']:
+              self.axis_id[self.radial_axis]:
                  ('longitude', 'latitude')}
         for i in list(rv.keys()):
             rv[self.axis_name[i]] = rv[i]
@@ -272,14 +286,14 @@
         center, display_center = super(
             GeographicCoordinateHandler, self).sanitize_center(center, axis)
         name = self.axis_name[axis]
-        if name == 'altitude':
+        if name == self.radial_axis:
             display_center = center
         elif name == 'latitude':
             display_center = (0.0 * display_center[0],
                               0.0 * display_center[1],
                               0.0 * display_center[2])
         elif name == 'longitude':
-            ri = self.axis_id['altitude']
+            ri = self.axis_id[self.radial_axis]
             c = (self.ds.domain_right_edge[ri] +
                  self.ds.domain_left_edge[ri])/2.0
             display_center = [0.0 * display_center[0], 
@@ -293,17 +307,108 @@
         if width is not None:
             width = super(GeographicCoordinateHandler, self).sanitize_width(
               axis, width, depth)
-        elif name == 'altitude':
-            width = [self.ds.domain_width[self.y_axis['altitude']],
-                     self.ds.domain_width[self.x_axis['altitude']]]
+        elif name == self.radial_axis:
+            rax = self.radial_axis
+            width = [self.ds.domain_width[self.y_axis[rax]],
+                     self.ds.domain_width[self.x_axis[rax]]]
         elif name == 'latitude':
-            ri = self.axis_id['altitude']
+            ri = self.axis_id[self.radial_axis]
             # Remember, in spherical coordinates when we cut in theta,
             # we create a conic section
             width = [2.0*self.ds.domain_width[ri],
                      2.0*self.ds.domain_width[ri]]
         elif name == 'longitude':
-            ri = self.axis_id['altitude']
+            ri = self.axis_id[self.radial_axis]
             width = [self.ds.domain_width[ri],
                      2.0*self.ds.domain_width[ri]]
         return width
+
+class InternalGeographicCoordinateHandler(GeographicCoordinateHandler):
+    radial_axis = "depth"
+    name = "internal_geographic"
+    
+    def _setup_radial_fields(self, registry):
+        # Altitude is the radius from the central zone minus the radius of the
+        # surface.
+        def _depth_to_radius(field, data):
+            outer_radius = data.get_field_parameter("outer_radius")
+            if outer_radius is None:
+                if hasattr(data.ds, "outer_radius"):
+                    outer_radius = data.ds.outer_radius
+                else:
+                    # Otherwise, we assume that the depth goes to full depth,
+                    # so we can look at the domain right edge in depth.
+                    rax = self.axis_id[self.radial_axis]
+                    outer_radius = data.ds.domain_right_edge[rax]
+            return -1.0 * data["depth"] + outer_radius
+        registry.add_field(("index", "r"),
+                 function=_depth_to_radius,
+                 units = "code_length")
+        registry.alias(("index", "dr"), ("index", "ddepth"))
+
+    def _retrieve_radial_offset(self, data_source = None):
+        # Depth means switching sign and adding to full radius
+        outer_radius = None
+        if data_source is not None:
+            outer_radius = data_source.get_field_parameter("outer_radius")
+        if outer_radius is None:
+            if hasattr(self.ds, "outer_radius"):
+                outer_radius = self.ds.outer_radius
+            else:
+                # Otherwise, we assume that the depth goes to full depth,
+                # so we can look at the domain right edge in depth.
+                rax = self.axis_id[self.radial_axis]
+                outer_radius = self.ds.domain_right_edge[rax]
+        return outer_radius, -1.0
+
+    _x_pairs = (('latitude', 'longitude'),
+                ('longitude', 'latitude'),
+                ('depth', 'latitude'))
+
+    _y_pairs = (('latitude', 'depth'),
+                ('longitude', 'depth'),
+                ('depth', 'longitude'))
+
+    def sanitize_center(self, center, axis):
+        center, display_center = super(
+            GeographicCoordinateHandler, self).sanitize_center(center, axis)
+        name = self.axis_name[axis]
+        if name == self.radial_axis:
+            display_center = center
+        elif name == 'latitude':
+            display_center = (0.0 * display_center[0],
+                              0.0 * display_center[1],
+                              0.0 * display_center[2])
+        elif name == 'longitude':
+            ri = self.axis_id[self.radial_axis]
+            offset, factor = self._retrieve_radial_offset()
+            outermost = factor * self.ds.domain_left_edge[ri] + offset
+            display_center = [0.0 * display_center[0], 
+                              0.0 * display_center[1],
+                              0.0 * display_center[2]]
+            display_center[self.axis_id['latitude']] = outermost/2.0
+        return center, display_center
+
+    def sanitize_width(self, axis, width, depth):
+        name = self.axis_name[axis]
+        if width is not None:
+            width = super(GeographicCoordinateHandler, self).sanitize_width(
+              axis, width, depth)
+        elif name == self.radial_axis:
+            rax = self.radial_axis
+            width = [self.ds.domain_width[self.y_axis[rax]],
+                     self.ds.domain_width[self.x_axis[rax]]]
+        elif name == 'latitude':
+            ri = self.axis_id[self.radial_axis]
+            # Remember, in spherical coordinates when we cut in theta,
+            # we create a conic section
+            offset, factor = self._retrieve_radial_offset()
+            outermost = factor * self.ds.domain_left_edge[ri] + offset
+            width = [2.0*outermost, 2.0*outermost]
+        elif name == 'longitude':
+            ri = self.axis_id[self.radial_axis]
+            offset, factor = self._retrieve_radial_offset()
+            outermost = factor * self.ds.domain_left_edge[ri] + offset
+            width = [outermost, 2.0*outermost]
+        return width
+

diff -r 3ca8a739e5296422ea7954ce8d2717908be25dd6 -r 1d8f9705433e9ec690848d8698db85e8903335c9 yt/geometry/coordinates/tests/test_geographic_coordinates.py
--- a/yt/geometry/coordinates/tests/test_geographic_coordinates.py
+++ b/yt/geometry/coordinates/tests/test_geographic_coordinates.py
@@ -54,3 +54,48 @@
     # We also want to check that our radius is correct
     yield assert_equal, dd["index","r"], \
                         dd["index","altitude"] + ds.surface_height
+
+def test_internal_geographic_coordinates():
+    # We're going to load up a simple AMR grid and check its volume
+    # calculations and path length calculations.
+
+    # Note that we are setting it up to have depth of 1000 maximum, which
+    # means our volume will be that of a shell 1000 wide, starting at r of
+    # outer_radius - 1000.
+    ds = fake_amr_ds(geometry="internal_geographic")
+    ds.outer_radius = ds.quan(5000, "code_length")
+    axes = ["latitude", "longitude", "depth"]
+    for i, axis in enumerate(axes):
+        dd = ds.all_data()
+        fi = ("index", axis)
+        fd = ("index", "d%s" % axis)
+        ma = np.argmax(dd[fi])
+        yield assert_equal, dd[fi][ma] + dd[fd][ma] / 2.0, ds.domain_right_edge[i].d
+        mi = np.argmin(dd[fi])
+        yield assert_equal, dd[fi][mi] - dd[fd][mi] / 2.0, ds.domain_left_edge[i].d
+        yield assert_equal, dd[fd].max(), (ds.domain_width/ds.domain_dimensions)[i].d
+    inner_r = ds.outer_radius - ds.domain_right_edge[2]
+    outer_r = ds.outer_radius
+    yield assert_equal, dd["index","dtheta"], \
+                        dd["index","dlatitude"]*np.pi/180.0
+    yield assert_equal, dd["index","dphi"], \
+                        dd["index","dlongitude"]*np.pi/180.0
+    # Note our terrible agreement here.
+    yield assert_rel_equal, dd["cell_volume"].sum(dtype="float64"), \
+                        (4.0/3.0) * np.pi * (outer_r**3 - inner_r**3), \
+                        3
+    yield assert_equal, dd["index", "path_element_depth"], \
+                        dd["index", "ddepth"]
+    yield assert_equal, dd["index", "path_element_depth"], \
+                        dd["index", "dr"]
+    # Note that latitude corresponds to theta, longitude to phi
+    yield assert_equal, dd["index", "path_element_latitude"], \
+                        dd["index", "r"] * \
+                        dd["index", "dlatitude"] * np.pi/180.0
+    yield assert_equal, dd["index", "path_element_longitude"], \
+                        dd["index", "r"] * \
+                        dd["index", "dlongitude"] * np.pi/180.0 * \
+                        np.sin((dd["index", "latitude"] + 90.0) * np.pi/180.0)
+    # We also want to check that our radius is correct
+    yield assert_equal, dd["index","r"], \
+                        -1.0*dd["index","depth"] + ds.outer_radius

diff -r 3ca8a739e5296422ea7954ce8d2717908be25dd6 -r 1d8f9705433e9ec690848d8698db85e8903335c9 yt/testing.py
--- a/yt/testing.py
+++ b/yt/testing.py
@@ -225,6 +225,8 @@
     'cylindrical': ( (0.0, 0.0, 0.0), (1.0, 1.0, 2.0*np.pi) ), # rzt
     'polar'      : ( (0.0, 0.0, 0.0), (1.0, 2.0*np.pi, 1.0) ), # rtz
     'geographic' : ( (-90.0, -180.0, 0.0), (90.0, 180.0, 1000.0) ), # latlonalt
+    'internal_geographic' :
+                   ( (-90.0, -180.0, 0.0), (90.0, 180.0, 1000.0) ), # latlondep
 }
 
 def fake_amr_ds(fields = ("Density",), geometry = "cartesian"):

Repository URL: https://bitbucket.org/yt_analysis/yt/

--

This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.



More information about the yt-svn mailing list