[yt-svn] commit/yt: 5 new changesets

commits-noreply at bitbucket.org commits-noreply at bitbucket.org
Wed Jun 29 11:28:54 PDT 2016


5 new commits in yt:

https://bitbucket.org/yt_analysis/yt/commits/0dc4fdb1a44f/
Changeset:   0dc4fdb1a44f
Branch:      yt
User:        ethlau
Date:        2016-05-27 21:34:23+00:00
Summary:     update light_ray.py for domain width >1
Affected #:  1 file

diff -r b8a09cd382dd34f386ce3634e7f78df3f5d9401d -r 0dc4fdb1a44f6934e574da2dd4df2fc20be6ddfd yt/analysis_modules/cosmological_observation/light_ray/light_ray.py
--- a/yt/analysis_modules/cosmological_observation/light_ray/light_ray.py
+++ b/yt/analysis_modules/cosmological_observation/light_ray/light_ray.py
@@ -200,7 +200,7 @@
                                 np.cos(theta)])
             self.light_ray_solution[0]['traversal_box_fraction'] = \
               vector_length(self.light_ray_solution[0]['start'],
-                            self.light_ray_solution[0]['end'])
+                            self.light_ray_solution[0]['end'])/ ( self.ds.domain_width[0].value )
 
         # the normal way (random start positions and trajectories for each dataset)
         else:


https://bitbucket.org/yt_analysis/yt/commits/144e5495bd04/
Changeset:   144e5495bd04
Branch:      yt
User:        ethlau
Date:        2016-06-18 06:43:10+00:00
Summary:     updated light_ray.py to make unit conversion clearer
Affected #:  1 file

diff -r 0dc4fdb1a44f6934e574da2dd4df2fc20be6ddfd -r 144e5495bd04ec81f2caf580648b464b4c6107a1 yt/analysis_modules/cosmological_observation/light_ray/light_ray.py
--- a/yt/analysis_modules/cosmological_observation/light_ray/light_ray.py
+++ b/yt/analysis_modules/cosmological_observation/light_ray/light_ray.py
@@ -200,7 +200,7 @@
                                 np.cos(theta)])
             self.light_ray_solution[0]['traversal_box_fraction'] = \
               vector_length(self.light_ray_solution[0]['start'],
-                            self.light_ray_solution[0]['end'])/ ( self.ds.domain_width[0].value )
+                            self.light_ray_solution[0]['end'])
 
         # the normal way (random start positions and trajectories for each dataset)
         else:
@@ -289,13 +289,15 @@
         seed : optional, int
             Seed for the random number generator.
             Default: None.
-        start_position : optional, list of floats
+        start_position : optional, iterable of floats or YTArray.
             Used only if creating a light ray from a single dataset.
             The coordinates of the starting position of the ray.
+            If specified without units, it is assumed to be in code units.
             Default: None.
-        end_position : optional, list of floats
+        end_position : optional, iterable of floats or YTArray.
             Used only if creating a light ray from a single dataset.
             The coordinates of the ending position of the ray.
+            If specified without units, it is assumed to be in code units.
             Default: None.
         trajectory : optional, list of floats
             Used only if creating a light ray from a single dataset.
@@ -365,6 +367,19 @@
         ...                       use_peculiar_velocity=True)
 
         """
+
+        if start_position and hasattr(start_position, 'units'):
+            start_position = start_position.to('unitary')
+        elif start_position:
+            start_position = self.ds.arr(
+                start_position, 'code_length').to('unitary')
+
+        if end_position and hasattr(end_position, 'units'):
+            end_position = end_position.to('unitary')
+        elif end_position:
+            end_position = self.ds.arr(
+                end_position, 'code_length').to('unitary')
+
         if get_los_velocity is not None:
             use_peculiar_velocity = get_los_velocity
             mylog.warn("'get_los_velocity' kwarg is deprecated. Use 'use_peculiar_velocity' instead.")
@@ -413,8 +428,8 @@
                 setup_function(ds)
 
             if start_position is not None:
-                my_segment["start"] = ds.arr(my_segment["start"], "code_length")
-                my_segment["end"] = ds.arr(my_segment["end"], "code_length")
+                my_segment["start"] = ds.arr(my_segment["start"], "unitary")
+                my_segment["end"] = ds.arr(my_segment["end"], "unitary")
             else:
                 my_segment["start"] = ds.domain_width * my_segment["start"] + \
                   ds.domain_left_edge
@@ -442,6 +457,10 @@
                        (my_segment['redshift'], my_segment['start'],
                         my_segment['end']))
 
+            # Convert segment units from unitary to code length for sub_ray
+            my_segment['start'] = my_segment['start'].to('code_length')
+            my_segment['end'] = my_segment['end'].to('code_length')
+
             # Break periodic ray into non-periodic segments.
             sub_segments = periodic_ray(my_segment['start'], my_segment['end'],
                                         left=ds.domain_left_edge,
@@ -462,6 +481,7 @@
                 sub_data['dl'].extend(sub_ray['dts'][asort] *
                                       vector_length(sub_ray.start_point,
                                                     sub_ray.end_point))
+
                 for field in data_fields:
                     sub_data[field].extend(sub_ray[field][asort])
 


https://bitbucket.org/yt_analysis/yt/commits/dfe97166551a/
Changeset:   dfe97166551a
Branch:      yt
User:        ethlau
Date:        2016-06-22 18:50:25+00:00
Summary:     fixed condition statement to accept array values of start_position and end_position
Affected #:  1 file

diff -r 144e5495bd04ec81f2caf580648b464b4c6107a1 -r dfe97166551a2c49ed561e450e4d2e7ce4ac68f2 yt/analysis_modules/cosmological_observation/light_ray/light_ray.py
--- a/yt/analysis_modules/cosmological_observation/light_ray/light_ray.py
+++ b/yt/analysis_modules/cosmological_observation/light_ray/light_ray.py
@@ -368,15 +368,15 @@
 
         """
 
-        if start_position and hasattr(start_position, 'units'):
+        if start_position is not None and hasattr(start_position, 'units'):
             start_position = start_position.to('unitary')
-        elif start_position:
+        elif start_position is not None :
             start_position = self.ds.arr(
                 start_position, 'code_length').to('unitary')
 
-        if end_position and hasattr(end_position, 'units'):
+        if end_position is not None and hasattr(end_position, 'units'):
             end_position = end_position.to('unitary')
-        elif end_position:
+        elif end_position is not None :
             end_position = self.ds.arr(
                 end_position, 'code_length').to('unitary')
 


https://bitbucket.org/yt_analysis/yt/commits/7f54e2a05373/
Changeset:   7f54e2a05373
Branch:      yt
User:        ethlau
Date:        2016-06-23 05:34:42+00:00
Summary:     merge recent changes in main branch
Affected #:  77 files

diff -r dfe97166551a2c49ed561e450e4d2e7ce4ac68f2 -r 7f54e2a053739fab6a8ab250514ca4bb36910a7c .hgignore
--- a/.hgignore
+++ b/.hgignore
@@ -45,9 +45,11 @@
 yt/utilities/lib/mesh_intersection.cpp
 yt/utilities/lib/mesh_samplers.cpp
 yt/utilities/lib/mesh_traversal.cpp
+yt/utilities/lib/mesh_triangulation.c
 yt/utilities/lib/mesh_utilities.c
 yt/utilities/lib/misc_utilities.c
 yt/utilities/lib/particle_mesh_operations.c
+yt/utilities/lib/primitives.c
 yt/utilities/lib/origami.c
 yt/utilities/lib/particle_mesh_operations.c
 yt/utilities/lib/pixelization_routines.c

diff -r dfe97166551a2c49ed561e450e4d2e7ce4ac68f2 -r 7f54e2a053739fab6a8ab250514ca4bb36910a7c CONTRIBUTING.rst
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -651,7 +651,7 @@
 .. _multiple-PRs:
 
 Working with Multiple BitBucket Pull Requests
-+++++++++++++++++++++++++++++++++++++++++++++
+---------------------------------------------
 
 Once you become active developing for yt, you may be working on
 various aspects of the code or bugfixes at the same time.  Currently,

diff -r dfe97166551a2c49ed561e450e4d2e7ce4ac68f2 -r 7f54e2a053739fab6a8ab250514ca4bb36910a7c doc/install_script.sh
--- a/doc/install_script.sh
+++ b/doc/install_script.sh
@@ -704,7 +704,7 @@
 if type -P curl &>/dev/null
 then
     echo "Using curl"
-    export GETFILE="curl -sSO"
+    export GETFILE="curl -sSOL"
 else
     echo "Using wget"
     export GETFILE="wget -nv"

diff -r dfe97166551a2c49ed561e450e4d2e7ce4ac68f2 -r 7f54e2a053739fab6a8ab250514ca4bb36910a7c doc/source/_static/yt_icon.png
Binary file doc/source/_static/yt_icon.png has changed

diff -r dfe97166551a2c49ed561e450e4d2e7ce4ac68f2 -r 7f54e2a053739fab6a8ab250514ca4bb36910a7c doc/source/analyzing/fields.rst
--- a/doc/source/analyzing/fields.rst
+++ b/doc/source/analyzing/fields.rst
@@ -98,22 +98,31 @@
 .. code-block:: python
 
     ad = ds.all_data()
+
+    # just a field name
     density = ad['density']
+
+    # field tuple with no parentheses
     density = ad['gas', 'density']
+
+    # full field tuple
     density = ad[('gas', 'density')]
-    dnesity = ad[ds.fields.gas.density]
+
+    # through the ds.fields object
+    density = ad[ds.fields.gas.density]
 
 The first data access example is the simplest. In that example, the field type
 is inferred from the name of the field. The next two examples use the field type
 explicitly, this might be necessary if there is more than one field type with a
-"density" field defined in the same simulation. The third example is a slightly
-more verbose and is syntactically identical to the second example due to the way
-indexing functions in Python. The final example uses the ``ds.fields` object
-described above. This way of accessing fields lends itself to interactive use,
-especially if you make heavy use of IPython's tab completion features. Any of
-these ways of denoting the ``('gas', 'density')`` field can be used when
-supplying a field name to a yt data object, analysis routines, or plotting and
-visualization function.
+"density" field defined in the same dataset. The third example is slightly more
+verbose but is syntactically identical to the second example due to the way
+indexing works in the Python language.
+
+The final example uses the ``ds.fields`` object described above. This way of
+accessing fields lends itself to interactive use, especially if you make heavy
+use of IPython's tab completion features. Any of these ways of denoting the
+``('gas', 'density')`` field can be used when supplying a field name to a yt
+data object, analysis routines, or plotting and visualization function.
 
 Accessing Fields without a Field Type
 -------------------------------------

diff -r dfe97166551a2c49ed561e450e4d2e7ce4ac68f2 -r 7f54e2a053739fab6a8ab250514ca4bb36910a7c doc/source/cookbook/various_lens.py
--- a/doc/source/cookbook/various_lens.py
+++ b/doc/source/cookbook/various_lens.py
@@ -85,6 +85,7 @@
 cam = sc.add_camera(ds, lens_type='spherical')
 # Set the size ratio of the final projection to be 2:1, since spherical lens
 # will generate the final image with length of 2*pi and height of pi.
+# Recommended resolution for YouTube 360-degree videos is [3840, 2160]
 cam.resolution = [500, 250]
 # Standing at (x=0.4, y=0.5, z=0.5), we look in all the radial directions
 # from this point in spherical coordinate.
@@ -99,9 +100,11 @@
 
 # Stereo-spherical lens
 cam = sc.add_camera(ds, lens_type='stereo-spherical')
-# Set the size ratio of the final projection to be 4:1, since spherical-perspective lens
-# will generate the final image with both left-eye and right-eye ones jointed together.
-cam.resolution = [1000, 250]
+# Set the size ratio of the final projection to be 1:1, since spherical-perspective lens
+# will generate the final image with both left-eye and right-eye ones jointed together,
+# with left-eye image on top and right-eye image on bottom.
+# Recommended resolution for YouTube virtual reality videos is [3840, 2160]
+cam.resolution = [500, 500]
 cam.position = ds.arr([0.4, 0.5, 0.5], 'code_length')
 cam.switch_orientation(normal_vector=normal_vector,
                        north_vector=north_vector)

diff -r dfe97166551a2c49ed561e450e4d2e7ce4ac68f2 -r 7f54e2a053739fab6a8ab250514ca4bb36910a7c doc/source/visualizing/volume_rendering.rst
--- a/doc/source/visualizing/volume_rendering.rst
+++ b/doc/source/visualizing/volume_rendering.rst
@@ -31,8 +31,8 @@
 as grid or continent lines, and then to render a production-quality
 visualization.  By changing the "lens" used, a single camera path can output
 images suitable for planetarium domes, immersive and head tracking systems
-(such as the Oculus Rift or recent "spherical" movie viewers such as the
-mobile YouTube app), as well as standard screens.
+(such as the Oculus Rift or recent 360-degree/virtual reality movie viewers
+such as the mobile YouTube app), as well as standard screens.
 
 .. image:: _images/scene_diagram.svg
    :width: 50%
@@ -327,13 +327,19 @@
 
 The :class:`~yt.visualization.volume_rendering.lens.SphericalLens` produces
 a cylindrical-spherical projection.  Movies rendered in this way can be
-displayed in head-tracking devices (e.g. Oculus Rift) or in YouTube 360 view
-(for more information see `the YouTube help
-<https://support.google.com/youtube/answer/6178631?hl=en>`, but it's a
-simple matter of running a script on an encoded movie file.)
+displayed as YouTube 360-degree videos (for more information see
+`the YouTube help: Upload 360-degree videos
+<https://support.google.com/youtube/answer/6178631?hl=en>`_).
 :class:`~yt.visualization.volume_rendering.lens.StereoSphericalLens`
 is identical to :class:`~yt.visualization.volume_rendering.lens.SphericalLens`
-but it produces two images from nearby camera positions for use in 3D viewing.
+but it produces two images from nearby camera positions for virtual reality
+movies, which can be displayed in head-tracking devices (e.g. Oculus Rift)
+or in mobile YouTube app with Google Cardboard (for more information
+see `the YouTube help: Upload virtual reality videos
+<https://support.google.com/youtube/answer/6316263?hl=en>`_).
+`This virtual reality video
+<https://youtu.be/ZYWY53X7UQE>`_ on YouTube is an example produced with
+:class:`~yt.visualization.volume_rendering.lens.StereoSphericalLens`.
 
 .. _annotated-vr-example:
 

diff -r dfe97166551a2c49ed561e450e4d2e7ce4ac68f2 -r 7f54e2a053739fab6a8ab250514ca4bb36910a7c setup.cfg
--- a/setup.cfg
+++ b/setup.cfg
@@ -14,6 +14,6 @@
 #      unused import errors
 #      autogenerated __config__.py files
 #      vendored libraries
-exclude = */api.py,*/__init__.py,*/__config__.py,yt/visualization/_mpl_imports.py,yt/utilities/lodgeit.py,yt/utilities/lru_cache.py,yt/utilities/poster/*,yt/extern/*,yt/mods.py
+exclude = doc,benchmarks,*/api.py,*/__init__.py,*/__config__.py,yt/visualization/_mpl_imports.py,yt/utilities/lodgeit.py,yt/utilities/lru_cache.py,yt/utilities/poster/*,yt/extern/*,yt/mods.py
 max-line-length=999
 ignore = E111,E121,E122,E123,E124,E125,E126,E127,E128,E129,E131,E201,E202,E211,E221,E222,E227,E228,E241,E301,E203,E225,E226,E231,E251,E261,E262,E265,E266,E302,E303,E402,E502,E701,E703,E731,W291,W292,W293,W391,W503
\ No newline at end of file

diff -r dfe97166551a2c49ed561e450e4d2e7ce4ac68f2 -r 7f54e2a053739fab6a8ab250514ca4bb36910a7c setup.py
--- a/setup.py
+++ b/setup.py
@@ -83,114 +83,70 @@
     Extension("yt.geometry.grid_visitors",
               ["yt/geometry/grid_visitors.pyx"],
               include_dirs=["yt/utilities/lib"],
-              libraries=std_libs,
-              depends=["yt/utilities/lib/fp_utils.pxd",
-                       "yt/geometry/grid_visitors.pxd"]),
+              libraries=std_libs),
     Extension("yt.geometry.grid_container",
               ["yt/geometry/grid_container.pyx"],
               include_dirs=["yt/utilities/lib/"],
-              libraries=std_libs,
-              depends=["yt/utilities/lib/fp_utils.pxd",
-                       "yt/geometry/grid_container.pxd",
-                       "yt/geometry/grid_visitors.pxd"]),
+              libraries=std_libs),
     Extension("yt.geometry.oct_container",
               ["yt/geometry/oct_container.pyx",
                "yt/utilities/lib/tsearch.c"],
               include_dirs=["yt/utilities/lib"],
-              libraries=std_libs,
-              depends=["yt/utilities/lib/fp_utils.pxd",
-                       "yt/geometry/oct_container.pxd",
-                       "yt/geometry/selection_routines.pxd"]),
+              libraries=std_libs),
     Extension("yt.geometry.oct_visitors",
               ["yt/geometry/oct_visitors.pyx"],
               include_dirs=["yt/utilities/lib/"],
-              libraries=std_libs,
-              depends=["yt/utilities/lib/fp_utils.pxd",
-                       "yt/geometry/oct_container.pxd",
-                       "yt/geometry/selection_routines.pxd"]),
+              libraries=std_libs),
     Extension("yt.geometry.particle_oct_container",
               ["yt/geometry/particle_oct_container.pyx"],
               include_dirs=["yt/utilities/lib/"],
-              libraries=std_libs,
-              depends=["yt/utilities/lib/fp_utils.pxd",
-                       "yt/geometry/oct_container.pxd",
-                       "yt/geometry/selection_routines.pxd"]),
+              libraries=std_libs),
     Extension("yt.geometry.selection_routines",
               ["yt/geometry/selection_routines.pyx"],
               include_dirs=["yt/utilities/lib/"],
-              libraries=std_libs,
-              depends=["yt/utilities/lib/fp_utils.pxd",
-                       "yt/utilities/lib/grid_traversal.pxd",
-                       "yt/geometry/oct_container.pxd",
-                       "yt/geometry/oct_visitors.pxd",
-                       "yt/geometry/grid_container.pxd",
-                       "yt/geometry/grid_visitors.pxd",
-                       "yt/geometry/selection_routines.pxd"]),
+              libraries=std_libs),
     Extension("yt.geometry.particle_deposit",
               ["yt/geometry/particle_deposit.pyx"],
               include_dirs=["yt/utilities/lib/"],
-              libraries=std_libs,
-              depends=["yt/utilities/lib/fp_utils.pxd",
-                       "yt/geometry/oct_container.pxd",
-                       "yt/geometry/selection_routines.pxd",
-                       "yt/geometry/particle_deposit.pxd"]),
+              libraries=std_libs),
     Extension("yt.geometry.particle_smooth",
               ["yt/geometry/particle_smooth.pyx"],
               include_dirs=["yt/utilities/lib/"],
-              libraries=std_libs,
-              depends=["yt/utilities/lib/fp_utils.pxd",
-                       "yt/geometry/oct_container.pxd",
-                       "yt/geometry/selection_routines.pxd",
-                       "yt/geometry/particle_deposit.pxd",
-                       "yt/geometry/particle_smooth.pxd"]),
+              libraries=std_libs),
     Extension("yt.geometry.fake_octree",
               ["yt/geometry/fake_octree.pyx"],
               include_dirs=["yt/utilities/lib/"],
-              libraries=std_libs,
-              depends=["yt/utilities/lib/fp_utils.pxd",
-                       "yt/geometry/oct_container.pxd",
-                       "yt/geometry/selection_routines.pxd"]),
+              libraries=std_libs),
     Extension("yt.utilities.spatial.ckdtree",
               ["yt/utilities/spatial/ckdtree.pyx"],
               include_dirs=["yt/utilities/lib/"],
               libraries=std_libs),
     Extension("yt.utilities.lib.bitarray",
               ["yt/utilities/lib/bitarray.pyx"],
-              libraries=std_libs, depends=["yt/utilities/lib/bitarray.pxd"]),
+              libraries=std_libs),
     Extension("yt.utilities.lib.bounding_volume_hierarchy",
               ["yt/utilities/lib/bounding_volume_hierarchy.pyx"],
               include_dirs=["yt/utilities/lib/"],
               extra_compile_args=omp_args,
               extra_link_args=omp_args,
               libraries=std_libs,
-              depends=["yt/utilities/lib/element_mappings.pxd",
-                       "yt/utilities/lib/mesh_triangulation.h",
-                       "yt/utilities/lib/vec3_ops.pxd",
-                       "yt/utilities/lib/primitives.pxd"]),
+              depends=["yt/utilities/lib/mesh_triangulation.h"]),
     Extension("yt.utilities.lib.contour_finding",
               ["yt/utilities/lib/contour_finding.pyx"],
               include_dirs=["yt/utilities/lib/",
                             "yt/geometry/"],
-              libraries=std_libs,
-              depends=["yt/utilities/lib/fp_utils.pxd",
-                       "yt/utilities/lib/amr_kdtools.pxd",
-                       "yt/utilities/lib/grid_traversal.pxd",
-                       "yt/utilities/lib/contour_finding.pxd",
-                       "yt/geometry/oct_container.pxd"]),
+              libraries=std_libs),
     Extension("yt.utilities.lib.geometry_utils",
               ["yt/utilities/lib/geometry_utils.pyx"],
               extra_compile_args=omp_args,
               extra_link_args=omp_args,
-              libraries=std_libs, depends=["yt/utilities/lib/fp_utils.pxd"]),
+              libraries=std_libs),
     Extension("yt.utilities.lib.marching_cubes",
               ["yt/utilities/lib/marching_cubes.pyx",
                "yt/utilities/lib/fixed_interpolator.c"],
               include_dirs=["yt/utilities/lib/"],
               libraries=std_libs,
-              depends=["yt/utilities/lib/fp_utils.pxd",
-                       "yt/utilities/lib/fixed_interpolator.pxd",
-                       "yt/utilities/lib/fixed_interpolator.h",
-                       ]),
+              depends=["yt/utilities/lib/fixed_interpolator.h"]),
     Extension("yt.utilities.lib.mesh_triangulation",
               ["yt/utilities/lib/mesh_triangulation.pyx"],
               depends=["yt/utilities/lib/mesh_triangulation.h"]),
@@ -198,15 +154,11 @@
               ["yt/utilities/lib/pixelization_routines.pyx",
                "yt/utilities/lib/pixelization_constants.c"],
               include_dirs=["yt/utilities/lib/"],
-              libraries=std_libs, depends=["yt/utilities/lib/fp_utils.pxd",
-                                        "yt/utilities/lib/pixelization_constants.h",
-                                        "yt/utilities/lib/element_mappings.pxd"]),
+              libraries=std_libs,
+              depends=["yt/utilities/lib/pixelization_constants.h"]),
     Extension("yt.utilities.lib.primitives",
               ["yt/utilities/lib/primitives.pyx"],
-              libraries=std_libs, 
-              depends=["yt/utilities/lib/primitives.pxd",
-                       "yt/utilities/lib/vec3_ops.pxd",
-                       "yt/utilities/lib/bounding_volume_hierarchy.pxd"]),
+              libraries=std_libs),
     Extension("yt.utilities.lib.origami",
               ["yt/utilities/lib/origami.pyx",
                "yt/utilities/lib/origami_tags.c"],
@@ -220,15 +172,11 @@
               libraries=std_libs,
               extra_compile_args=omp_args,
               extra_link_args=omp_args,
-              depends=["yt/utilities/lib/fp_utils.pxd",
-                       "yt/utilities/lib/kdtree.h",
-                       "yt/utilities/lib/fixed_interpolator.h",
-                       "yt/utilities/lib/fixed_interpolator.pxd",
-                       "yt/utilities/lib/field_interpolation_tables.pxd",
-                       "yt/utilities/lib/vec3_ops.pxd"]),
+              depends=["yt/utilities/lib/kdtree.h",
+                       "yt/utilities/lib/fixed_interpolator.h"]),
     Extension("yt.utilities.lib.element_mappings",
               ["yt/utilities/lib/element_mappings.pyx"],
-              libraries=std_libs, depends=["yt/utilities/lib/element_mappings.pxd"]),
+              libraries=std_libs),
     Extension("yt.utilities.lib.alt_ray_tracers",
               ["yt/utilities/lib/alt_ray_tracers.pyx"],
               libraries=std_libs),
@@ -244,7 +192,7 @@
     cython_extensions.append(
         Extension("yt.utilities.lib.{}".format(ext_name),
                   ["yt/utilities/lib/{}.pyx".format(ext_name)],
-                  libraries=std_libs, depends=["yt/utilities/lib/fp_utils.pxd"]))
+                  libraries=std_libs))
 
 lib_exts = ["write_array", "ragged_arrays", "line_integral_convolution"]
 for ext_name in lib_exts:
@@ -265,19 +213,9 @@
               include_dirs=["yt/frontends/artio/artio_headers/",
                             "yt/geometry/",
                             "yt/utilities/lib/"],
-              depends=glob.glob("yt/frontends/artio/artio_headers/*.c") +
-              ["yt/utilities/lib/fp_utils.pxd",
-               "yt/geometry/oct_container.pxd",
-               "yt/geometry/selection_routines.pxd",
-               "yt/geometry/particle_deposit.pxd"]),
+              depends=glob.glob("yt/frontends/artio/artio_headers/*.c")),
     Extension("yt.utilities.spatial._distance_wrap",
               glob.glob("yt/utilities/spatial/src/*.c")),
-    Extension("yt.visualization._MPL",
-              ["yt/visualization/_MPL.c"],
-              libraries=std_libs),
-    Extension("yt.utilities.data_point_utilities",
-              ["yt/utilities/data_point_utilities.c"],
-              libraries=std_libs),
 ]
 
 # EMBREE
@@ -285,31 +223,13 @@
     embree_extensions = [
         Extension("yt.utilities.lib.mesh_construction",
                   ["yt/utilities/lib/mesh_construction.pyx"],
-                  depends=["yt/utilities/lib/mesh_construction.pxd",
-                           "yt/utilities/lib/mesh_triangulation.h",
-                           "yt/utilities/lib/mesh_intersection.pxd",
-                           "yt/utlilites/lib/mesh_samplers.pxd",
-                           "yt/utlilites/lib/mesh_traversal.pxd"]),
+                  depends=["yt/utilities/lib/mesh_triangulation.h"]),
         Extension("yt.utilities.lib.mesh_traversal",
-                  ["yt/utilities/lib/mesh_traversal.pyx"],
-                  depends=["yt/utilities/lib/mesh_traversal.pxd",
-                           "yt/utilities/lib/grid_traversal.pxd",
-                           "yt/utilities/lib/bounding_volume_hierarchy.pxd"]),
+                  ["yt/utilities/lib/mesh_traversal.pyx"]),
         Extension("yt.utilities.lib.mesh_samplers",
-                  ["yt/utilities/lib/mesh_samplers.pyx"],
-                  depends=["yt/utilities/lib/mesh_samplers.pxd",
-                           "yt/utilities/lib/element_mappings.pxd",
-                           "yt/utilities/lib/mesh_construction.pxd",
-                           "yt/utilities/lib/bounding_volume_hierarchy.pxd",
-                           "yt/utilities/lib/primitives.pxd"]),
+                  ["yt/utilities/lib/mesh_samplers.pyx"]),
         Extension("yt.utilities.lib.mesh_intersection",
-                  ["yt/utilities/lib/mesh_intersection.pyx"],
-                  depends=["yt/utilities/lib/mesh_intersection.pxd",
-                           "yt/utilities/lib/mesh_construction.pxd",
-                           "yt/utilities/lib/bounding_volume_hierarchy.pxd",
-                           "yt/utilities/lib/mesh_samplers.pxd",
-                           "yt/utilities/lib/primitives.pxd",
-                           "yt/utilities/lib/vec3_ops.pxd"]),
+                  ["yt/utilities/lib/mesh_intersection.pyx"]),
     ]
 
     embree_prefix = os.path.abspath(read_embree_location())
@@ -385,9 +305,12 @@
         _build_py.run(self)
 
 class build_ext(_build_ext):
-    # subclass setuptools extension builder to avoid importing numpy
+    # subclass setuptools extension builder to avoid importing cython and numpy
     # at top level in setup.py. See http://stackoverflow.com/a/21621689/1382869
     def finalize_options(self):
+        from Cython.Build import cythonize
+        self.distribution.ext_modules[:] = cythonize(
+                self.distribution.ext_modules)
         _build_ext.finalize_options(self)
         # Prevent numpy from thinking it is still in its setup process
         # see http://stackoverflow.com/a/21621493/1382869
@@ -437,6 +360,7 @@
     ]
     },
     packages=find_packages(),
+    package_data = {'':['*.pxd']},
     setup_requires=[
         'numpy',
         'cython>=0.22',
@@ -457,7 +381,7 @@
     zip_safe=False,
     scripts=["scripts/iyt"],
     data_files=MAPSERVER_FILES + [(SHADERS_DIR, SHADERS_FILES)],
-    ext_modules=cython_extensions + extensions
+    ext_modules=cython_extensions + extensions,
 )
 
 # This info about 'ckdtree' should be incorporated somehow...

diff -r dfe97166551a2c49ed561e450e4d2e7ce4ac68f2 -r 7f54e2a053739fab6a8ab250514ca4bb36910a7c tests/tests.yaml
--- a/tests/tests.yaml
+++ b/tests/tests.yaml
@@ -14,7 +14,7 @@
   local_fits_000:
     - yt/frontends/fits/tests/test_outputs.py
 
-  local_flash_001:
+  local_flash_002:
     - yt/frontends/flash/tests/test_outputs.py
 
   local_gadget_000:
@@ -26,7 +26,7 @@
   local_gdf_000:
     - yt/frontends/gdf/tests/test_outputs.py
 
-  local_gizmo_000:
+  local_gizmo_001:
     - yt/frontends/gizmo/tests/test_outputs.py
 
   local_halos_000:
@@ -39,7 +39,7 @@
   local_owls_000:
     - yt/frontends/owls/tests/test_outputs.py
   
-  local_pw_000:
+  local_pw_001:
     - yt/visualization/tests/test_plotwindow.py:test_attributes
     - yt/visualization/tests/test_plotwindow.py:test_attributes_wt
     - yt/visualization/tests/test_profile_plots.py:test_phase_plot_attributes
@@ -47,7 +47,7 @@
     - yt/visualization/tests/test_particle_plot.py:test_particle_projection_filter
     - yt/visualization/tests/test_particle_plot.py:test_particle_phase_answers
   
-  local_tipsy_000:
+  local_tipsy_001:
     - yt/frontends/tipsy/tests/test_outputs.py
   
   local_varia_001:

diff -r dfe97166551a2c49ed561e450e4d2e7ce4ac68f2 -r 7f54e2a053739fab6a8ab250514ca4bb36910a7c yt/analysis_modules/level_sets/clump_handling.py
--- a/yt/analysis_modules/level_sets/clump_handling.py
+++ b/yt/analysis_modules/level_sets/clump_handling.py
@@ -57,6 +57,9 @@
         self.min_val = self.data[field].min()
         self.max_val = self.data[field].max()
 
+        if parent is not None:
+            self.data.parent = self.parent.data
+
         # List containing characteristics about clumps that are to be written 
         # out by the write routines.
         if clump_info is None:

diff -r dfe97166551a2c49ed561e450e4d2e7ce4ac68f2 -r 7f54e2a053739fab6a8ab250514ca4bb36910a7c yt/analysis_modules/level_sets/tests/test_clump_finding.py
--- /dev/null
+++ b/yt/analysis_modules/level_sets/tests/test_clump_finding.py
@@ -0,0 +1,74 @@
+"""
+Clump finder tests
+
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2016, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import numpy as np
+
+from yt.analysis_modules.level_sets.api import \
+    Clump, \
+    find_clumps, \
+    get_lowest_clumps
+from yt.frontends.stream.api import \
+    load_uniform_grid
+from yt.testing import \
+    assert_array_equal, \
+    assert_equal
+
+def test_clump_finding():
+    n_c = 8
+    n_p = 1
+    dims = (n_c, n_c, n_c)
+
+    density = np.ones(dims)
+    high_rho = 10.
+    # add a couple disconnected density enhancements
+    density[2, 2, 2] = high_rho
+    density[6, 6, 6] = high_rho
+
+    # put a particle at the center of one of them
+    dx = 1. / n_c
+    px = 2.5 * dx * np.ones(n_p)
+    
+    data = {"density": density,
+            "particle_mass": np.ones(n_p),
+            "particle_position_x": px,
+            "particle_position_y": px,
+            "particle_position_z": px,
+            "number_of_particles": n_p}
+
+    ds = load_uniform_grid(data, dims)
+
+    ad = ds.all_data()
+    master_clump = Clump(ad, ("gas", "density"))
+    master_clump.add_validator("min_cells", 1)
+
+    find_clumps(master_clump, 0.5, 2. * high_rho, 10.)
+
+    # there should be two children
+    assert_equal(len(master_clump.children), 2)
+
+    leaf_clumps = get_lowest_clumps(master_clump)
+    # two leaf clumps
+    assert_equal(len(leaf_clumps), 2)
+
+
+    # check some clump fields
+    assert_equal(master_clump.children[0]["density"][0].size, 1)
+    assert_equal(master_clump.children[0]["density"][0], ad["density"].max())
+    assert_equal(master_clump.children[0]["particle_mass"].size, 1)
+    assert_array_equal(master_clump.children[0]["particle_mass"], ad["particle_mass"])
+    assert_equal(master_clump.children[1]["density"][0].size, 1)
+    assert_equal(master_clump.children[1]["density"][0], ad["density"].max())
+    assert_equal(master_clump.children[1]["particle_mass"].size, 0)

diff -r dfe97166551a2c49ed561e450e4d2e7ce4ac68f2 -r 7f54e2a053739fab6a8ab250514ca4bb36910a7c yt/analysis_modules/photon_simulator/photon_simulator.py
--- a/yt/analysis_modules/photon_simulator/photon_simulator.py
+++ b/yt/analysis_modules/photon_simulator/photon_simulator.py
@@ -1194,8 +1194,9 @@
         col6 = pyfits.Column(name='FLUX', format='D', array=np.array([flux.value]))
         col7 = pyfits.Column(name='SPECTRUM', format='80A', array=np.array([phfile+"[PHLIST,1]"]))
         col8 = pyfits.Column(name='IMAGE', format='80A', array=np.array([phfile+"[PHLIST,1]"]))
+        col9 = pyfits.Column(name='SRC_NAME', format='80A', array=np.array(["yt_src"]))
 
-        coldefs = pyfits.ColDefs([col1, col2, col3, col4, col5, col6, col7, col8])
+        coldefs = pyfits.ColDefs([col1, col2, col3, col4, col5, col6, col7, col8, col9])
 
         wrhdu = pyfits.BinTableHDU.from_columns(coldefs)
         wrhdu.update_ext_name("SRC_CAT")
@@ -1350,13 +1351,17 @@
             f = pyfits.open(self.parameters["RMF"])
             nchan = int(f["EBOUNDS"].header["DETCHANS"])
             num = 0
-            for i in range(1,len(f["EBOUNDS"].columns)+1):
-                if f["EBOUNDS"].header["TTYPE%d" % i] == "CHANNEL":
+            if "MATRIX" in f:
+                mat_key = "MATRIX"
+            elif "SPECRESP MATRIX" in f:
+                mat_key = "SPECRESP MATRIX"
+            for i in range(1,len(f[mat_key].columns)+1):
+                if f[mat_key].header["TTYPE%d" % i] == "F_CHAN":
                     num = i
                     break
             if num > 0:
                 tlmin = "TLMIN%d" % num
-                cmin = int(f["EBOUNDS"].header[tlmin])
+                cmin = int(f[mat_key].header[tlmin])
             else:
                 mylog.warning("Cannot determine minimum allowed value for channel. " +
                               "Setting to 0, which may be wrong.")

diff -r dfe97166551a2c49ed561e450e4d2e7ce4ac68f2 -r 7f54e2a053739fab6a8ab250514ca4bb36910a7c yt/analysis_modules/photon_simulator/spectral_models.py
--- a/yt/analysis_modules/photon_simulator/spectral_models.py
+++ b/yt/analysis_modules/photon_simulator/spectral_models.py
@@ -264,7 +264,7 @@
         emid = self.emid.d
         if self.thermal_broad:
             sigma = E0*np.sqrt(2.*kT*erg_per_keV/(self.A[element]*amu_grams))/cl
-            vec = broaden_lines(E0, sigma, amp, emid)*de
+            vec = broaden_lines(E0, sigma, amp, ebins)
         else:
             vec = np.histogram(E0, ebins, weights=amp)[0]
         tmpspec += vec

diff -r dfe97166551a2c49ed561e450e4d2e7ce4ac68f2 -r 7f54e2a053739fab6a8ab250514ca4bb36910a7c yt/analysis_modules/photon_simulator/utils.pyx
--- a/yt/analysis_modules/photon_simulator/utils.pyx
+++ b/yt/analysis_modules/photon_simulator/utils.pyx
@@ -1,31 +1,30 @@
 import numpy as np
 cimport numpy as np
 cimport cython
-from libc.math cimport exp
-
-cdef double gfac = 1.0/np.sqrt(np.pi)
-
+from libc.math cimport erf
+    
 @cython.cdivision(True)
 @cython.boundscheck(False)
 @cython.wraparound(False)
 def broaden_lines(np.ndarray[np.float64_t, ndim=1] E0,
                   np.ndarray[np.float64_t, ndim=1] sigma,
                   np.ndarray[np.float64_t, ndim=1] amp,
-                  np.ndarray[np.float64_t, ndim=1] E):
+                  np.ndarray[np.float64_t, ndim=1] ebins):
 
-    cdef int i, j, n
-    cdef double x, isigma, iamp
-    cdef np.ndarray[np.float64_t, ndim=1] lines
+    cdef int i, j, n, m
+    cdef double x, isigma
+    cdef np.ndarray[np.float64_t, ndim=1] cdf, vec
 
     n = E0.shape[0]
-    m = E.shape[0]
-    lines = np.zeros(m)
-
+    m = ebins.shape[0]
+    cdf = np.zeros(m)
+    vec = np.zeros(m-1)
+    
     for i in range(n):
         isigma = 1.0/sigma[i]
-        iamp = gfac*amp[i]*isigma
         for j in range(m):
-            x = (E[j]-E0[i])*isigma
-            lines[j] += iamp*exp(-x*x)
-
-    return lines
+            x = (ebins[j]-E0[i])*isigma
+            cdf[j] = 0.5*(1+erf(x))
+        for j in range(m-1):
+            vec[j] = vec[j] + (cdf[j+1] - cdf[j])*amp[i]
+    return vec

diff -r dfe97166551a2c49ed561e450e4d2e7ce4ac68f2 -r 7f54e2a053739fab6a8ab250514ca4bb36910a7c yt/data_objects/selection_data_containers.py
--- a/yt/data_objects/selection_data_containers.py
+++ b/yt/data_objects/selection_data_containers.py
@@ -24,12 +24,13 @@
     iterable, \
     validate_width_tuple, \
     fix_length
+from yt.geometry.selection_routines import \
+    points_in_cells
 from yt.units.yt_array import \
     YTArray
 from yt.utilities.exceptions import \
     YTSphereTooSmall, \
     YTIllDefinedCutRegion, \
-    YTMixedCutRegion, \
     YTEllipsoidOrdering
 from yt.utilities.minimal_representation import \
     MinimalSliceData
@@ -793,8 +794,10 @@
         for field in fields:
             f = self.base_object[field]
             if f.shape != ind.shape:
-                raise YTMixedCutRegion(self.conditionals, field)
-            self.field_data[field] = self.base_object[field][ind]
+                parent = getattr(self, "parent", self.base_object)
+                self.field_data[field] = parent[field][self._part_ind]
+            else:
+                self.field_data[field] = self.base_object[field][ind]
 
     @property
     def blocks(self):
@@ -822,6 +825,22 @@
                 np.logical_and(res, ind, ind)
         return ind
 
+    _particle_mask = None
+    @property
+    def _part_ind(self):
+        if self._particle_mask is None:
+            parent = getattr(self, "parent", self.base_object)
+            units = "code_length"
+            mask = points_in_cells(
+                self["x"].to(units), self["y"].to(units),
+                self["z"].to(units), self["dx"].to(units),
+                self["dy"].to(units), self["dz"].to(units),
+                parent["particle_position_x"].to(units),
+                parent["particle_position_y"].to(units),
+                parent["particle_position_z"].to(units))
+            self._particle_mask = mask
+        return self._particle_mask
+
     @property
     def icoords(self):
         return self.base_object.icoords[self._cond_ind,:]

diff -r dfe97166551a2c49ed561e450e4d2e7ce4ac68f2 -r 7f54e2a053739fab6a8ab250514ca4bb36910a7c yt/data_objects/tests/test_covering_grid.py
--- a/yt/data_objects/tests/test_covering_grid.py
+++ b/yt/data_objects/tests/test_covering_grid.py
@@ -1,7 +1,10 @@
 import numpy as np
 
+from yt import \
+    load
 from yt.frontends.stream.data_structures import load_particles
 from yt.testing import \
+    requires_file, \
     fake_random_ds, \
     assert_equal, \
     assert_almost_equal
@@ -120,3 +123,11 @@
             ag = ds.arbitrary_grid([0.0, 0.0, 0.0], [1.0, 1.0, 1.0],
                     2**ref_level * ds.domain_dimensions)
             yield assert_almost_equal, cg["density"], ag["density"]
+
+output_00080 = "output_00080/info_00080.txt"
+ at requires_file(output_00080)
+def test_octree_cg():
+    ds = load(output_00080)
+    cgrid = ds.covering_grid(0, left_edge=ds.domain_left_edge, dims=ds.domain_dimensions)
+    density_field = cgrid["density"]
+    assert_equal((density_field == 0.0).sum(), 0)

diff -r dfe97166551a2c49ed561e450e4d2e7ce4ac68f2 -r 7f54e2a053739fab6a8ab250514ca4bb36910a7c yt/fields/astro_fields.py
--- a/yt/fields/astro_fields.py
+++ b/yt/fields/astro_fields.py
@@ -136,7 +136,8 @@
     registry.add_field((ftype, "sz_kinetic"),
                        function=_sz_kinetic,
                        units=unit_system["length"]**-1,
-                       validators=[ValidateParameter("axis")])
+                       validators=[
+                           ValidateParameter("axis", {'axis': [0, 1, 2]})])
 
     def _szy(field, data):
         scale = 0.88 / mh * kboltz / (me * clight*clight) * sigma_thompson

diff -r dfe97166551a2c49ed561e450e4d2e7ce4ac68f2 -r 7f54e2a053739fab6a8ab250514ca4bb36910a7c yt/fields/derived_field.py
--- a/yt/fields/derived_field.py
+++ b/yt/fields/derived_field.py
@@ -23,6 +23,7 @@
     NeedsDataField, \
     NeedsProperty, \
     NeedsParameter, \
+    NeedsParameterValue, \
     FieldUnitsError
 from .field_detector import \
     FieldDetector
@@ -256,14 +257,21 @@
     pass
 
 class ValidateParameter(FieldValidator):
-    def __init__(self, parameters):
+    def __init__(self, parameters, parameter_values=None):
         """
         This validator ensures that the dataset has a given parameter.
+
+        If *parameter_values* is supplied, this will also ensure that the field
+        is available for all permutations of the field parameter.
         """
         FieldValidator.__init__(self)
         self.parameters = ensure_list(parameters)
+        self.parameter_values = parameter_values
     def __call__(self, data):
         doesnt_have = []
+        if self.parameter_values is not None:
+            if isinstance(data, FieldDetector):
+                raise NeedsParameterValue(self.parameter_values)
         for p in self.parameters:
             if not data.has_field_parameter(p):
                 doesnt_have.append(p)

diff -r dfe97166551a2c49ed561e450e4d2e7ce4ac68f2 -r 7f54e2a053739fab6a8ab250514ca4bb36910a7c yt/fields/field_detector.py
--- a/yt/fields/field_detector.py
+++ b/yt/fields/field_detector.py
@@ -17,7 +17,8 @@
 from collections import defaultdict
 from yt.units.yt_array import YTArray
 from .field_exceptions import \
-    NeedsGridType
+    NeedsGridType, \
+    NeedsParameterValue
 
 class FieldDetector(defaultdict):
     Level = 1
@@ -26,7 +27,7 @@
     _id_offset = 0
     domain_id = 0
 
-    def __init__(self, nd = 16, ds = None, flat = False):
+    def __init__(self, nd = 16, ds = None, flat = False, field_parameters=None):
         self.nd = nd
         self.flat = flat
         self._spatial = not flat
@@ -36,6 +37,7 @@
         self.LeftEdge = [0.0, 0.0, 0.0]
         self.RightEdge = [1.0, 1.0, 1.0]
         self.dds = np.ones(3, "float64")
+        self.field_parameters = field_parameters
         class fake_dataset(defaultdict):
             pass
 
@@ -106,6 +108,32 @@
                 for i in nfd.requested_parameters:
                     if i not in self.requested_parameters:
                         self.requested_parameters.append(i)
+            except NeedsParameterValue as npv:
+                # redo field detection with a new FieldDetector, ensuring
+                # all needed field parameter values are set
+                for param in npv.parameter_values:
+                    # temporarily remove any ValidateParameter instances for
+                    # this field to avoid infinitely re-raising
+                    # NeedsParameterValue exceptions
+                    saved_validators = []
+                    for i, validator in enumerate(finfo.validators):
+                        params = getattr(validator, 'parameters', [])
+                        if param in params:
+                            saved_validators.append(validator)
+                            del finfo.validators[i]
+
+                    for pv in npv.parameter_values[param]:
+                        nfd = FieldDetector(self.nd, ds=self.ds,
+                                            field_parameters={param: pv})
+                        vv = finfo(nfd)
+                        for i in nfd.requested:
+                            if i not in self.requested:
+                                self.requested.append(i)
+                        for i in nfd.requested_parameters:
+                            if i not in self.requested_parameters:
+                                self.requested_parameters.append(i)
+
+                    finfo.validators.extend(saved_validators)
             if vv is not None:
                 if not self.flat: self[item] = vv
                 else: self[item] = vv.ravel()
@@ -176,6 +204,8 @@
         }
 
     def get_field_parameter(self, param, default = 0.0):
+        if self.field_parameters and param in self.field_parameters:
+            return self.field_parameters[param]
         self.requested_parameters.append(param)
         if param in ['bulk_velocity', 'center', 'normal']:
             return self.ds.arr(np.random.random(3) * 1e-2, self.fp_units[param])

diff -r dfe97166551a2c49ed561e450e4d2e7ce4ac68f2 -r 7f54e2a053739fab6a8ab250514ca4bb36910a7c yt/fields/field_exceptions.py
--- a/yt/fields/field_exceptions.py
+++ b/yt/fields/field_exceptions.py
@@ -46,6 +46,10 @@
     def __str__(self):
         return "(%s)" % (self.missing_parameters)
 
+class NeedsParameterValue(ValidationException):
+    def __init__(self, parameter_values):
+        self.parameter_values = parameter_values
+
 class NeedsConfiguration(ValidationException):
     def __init__(self, parameter, value):
         self.parameter = parameter

diff -r dfe97166551a2c49ed561e450e4d2e7ce4ac68f2 -r 7f54e2a053739fab6a8ab250514ca4bb36910a7c yt/fields/species_fields.py
--- a/yt/fields/species_fields.py
+++ b/yt/fields/species_fields.py
@@ -90,6 +90,10 @@
                        particle_type = particle_type,
                        units = unit_system["number_density"])
 
+    return [(ftype, "%s_number_density" % species),
+            (ftype, "%s_density" % species),
+            (ftype, "%s_mass" % species)]
+
 def add_species_field_by_fraction(registry, ftype, species, 
                                   particle_type = False):
     """
@@ -114,6 +118,10 @@
                        particle_type = particle_type,
                        units = unit_system["number_density"])
 
+    return [(ftype, "%s_number_density" % species),
+            (ftype, "%s_density" % species),
+            (ftype, "%s_mass" % species)]
+
 def add_species_aliases(registry, ftype, alias_species, species):
     """
     This takes a field registry, a fluid type, and two species names.  

diff -r dfe97166551a2c49ed561e450e4d2e7ce4ac68f2 -r 7f54e2a053739fab6a8ab250514ca4bb36910a7c yt/frontends/art/api.py
--- a/yt/frontends/art/api.py
+++ b/yt/frontends/art/api.py
@@ -17,7 +17,8 @@
       ARTDomainFile,\
       ARTDomainSubset,\
       ARTIndex,\
-      ARTDataset
+      ARTDataset, \
+      DarkMatterARTDataset
 
 from .fields import \
       ARTFieldInfo

diff -r dfe97166551a2c49ed561e450e4d2e7ce4ac68f2 -r 7f54e2a053739fab6a8ab250514ca4bb36910a7c yt/frontends/art/data_structures.py
--- a/yt/frontends/art/data_structures.py
+++ b/yt/frontends/art/data_structures.py
@@ -633,6 +633,10 @@
             return False
         if not f.endswith(suffix):
             return False
+        if "s0" not in f:
+            # ATOMIC.DAT, for instance, passes the other tests, but then dies
+            # during _find_files because it can't be split.
+            return False
         with open(f, 'rb') as fh:
             try:
                 amr_prefix, amr_suffix = filename_pattern['amr']

diff -r dfe97166551a2c49ed561e450e4d2e7ce4ac68f2 -r 7f54e2a053739fab6a8ab250514ca4bb36910a7c yt/frontends/eagle/tests/test_outputs.py
--- a/yt/frontends/eagle/tests/test_outputs.py
+++ b/yt/frontends/eagle/tests/test_outputs.py
@@ -24,3 +24,10 @@
 @requires_file(s28)
 def test_EagleDataset():
     assert isinstance(data_dir_load(s28), EagleDataset)
+
+s399 = "snipshot_399_z000p000/snip_399_z000p000.0.hdf5"
+ at requires_file(s399)
+def test_Snipshot():
+    ds = data_dir_load(s399)
+    ds.index
+    assert isinstance(ds, EagleDataset)

diff -r dfe97166551a2c49ed561e450e4d2e7ce4ac68f2 -r 7f54e2a053739fab6a8ab250514ca4bb36910a7c yt/frontends/gadget/tests/test_outputs.py
--- a/yt/frontends/gadget/tests/test_outputs.py
+++ b/yt/frontends/gadget/tests/test_outputs.py
@@ -25,7 +25,6 @@
 
 isothermal_h5 = "IsothermalCollapse/snap_505.hdf5"
 isothermal_bin = "IsothermalCollapse/snap_505"
-g64 = "gizmo_64/output/snap_N64L16_135.hdf5"
 
 # This maps from field names to weight field names to use for projections
 iso_fields = OrderedDict(
@@ -42,11 +41,6 @@
 )
 iso_kwargs = dict(bounding_box=[[-3, 3], [-3, 3], [-3, 3]])
 
-g64_fields = iso_fields.copy()
-g64_fields["deposit", "PartType4_density"] = None
-g64_kwargs = dict(bounding_box=[[-1e5, 1e5], [-1e5, 1e5], [-1e5, 1e5]])
-
-
 @requires_file(isothermal_h5)
 @requires_file(isothermal_bin)
 def test_GadgetDataset():
@@ -62,10 +56,3 @@
     for test in sph_answer(ds, 'snap_505', 2**17, iso_fields):
         test_iso_collapse.__name__ = test.description
         yield test
-
- at requires_ds(g64, big_data=True)
-def test_gizmo_64():
-    ds = data_dir_load(g64, kwargs=g64_kwargs)
-    for test in sph_answer(ds, 'snap_N64L16_135', 524288, g64_fields):
-        test_gizmo_64.__name__ = test.description
-        yield test

diff -r dfe97166551a2c49ed561e450e4d2e7ce4ac68f2 -r 7f54e2a053739fab6a8ab250514ca4bb36910a7c yt/frontends/gdf/api.py
--- a/yt/frontends/gdf/api.py
+++ b/yt/frontends/gdf/api.py
@@ -23,3 +23,5 @@
 
 from .io import \
       IOHandlerGDFHDF5
+
+from . import tests

diff -r dfe97166551a2c49ed561e450e4d2e7ce4ac68f2 -r 7f54e2a053739fab6a8ab250514ca4bb36910a7c yt/frontends/gizmo/fields.py
--- a/yt/frontends/gizmo/fields.py
+++ b/yt/frontends/gizmo/fields.py
@@ -99,6 +99,15 @@
               data[ptype, "%s_metallicity" % species]
 
         num_neighbors = 64
+        for species in ['H', 'H_p0', 'H_p1']:
+            for suf in ["_density", "_number_density"]:
+                field = "%s%s" % (species, suf)
+                fn = add_volume_weighted_smoothed_field(
+                    ptype, "particle_position", "particle_mass",
+                    "smoothing_length", "density", field,
+                    self, num_neighbors)
+                self.alias(("gas", field), fn[0])
+
         for species in self.nuclei_names:
             self.add_field(
                 (ptype, "%s_nuclei_mass_density" % species),

diff -r dfe97166551a2c49ed561e450e4d2e7ce4ac68f2 -r 7f54e2a053739fab6a8ab250514ca4bb36910a7c yt/frontends/gizmo/tests/test_outputs.py
--- a/yt/frontends/gizmo/tests/test_outputs.py
+++ b/yt/frontends/gizmo/tests/test_outputs.py
@@ -34,13 +34,16 @@
         (('gas', 'velocity_magnitude'), None),
         (("deposit", "all_count"), None),
         (("deposit", "all_cic"), None),
+        (("deposit", "PartType0_density"), None),
     ]
 )
 
- at requires_ds(FIRE_m12i)
-def test_GizmoDataset():
-    ds = data_dir_load(FIRE_m12i)
+g64 = "gizmo_64/output/snap_N64L16_135.hdf5"
+
+ at requires_ds(g64, big_data=True)
+def test_gizmo_64():
+    ds = data_dir_load(g64)
     assert isinstance(ds, GizmoDataset)
-    for test in sph_answer(ds, 'snapshot_600', 4786950, fields):
-        test_GizmoDataset.__name__ = test.description
+    for test in sph_answer(ds, 'snap_N64L16_135', 524288, fields):
+        test_gizmo_64.__name__ = test.description
         yield test

diff -r dfe97166551a2c49ed561e450e4d2e7ce4ac68f2 -r 7f54e2a053739fab6a8ab250514ca4bb36910a7c yt/frontends/owls/fields.py
--- a/yt/frontends/owls/fields.py
+++ b/yt/frontends/owls/fields.py
@@ -76,8 +76,6 @@
 
         smoothed_suffixes = ("_number_density", "_density", "_mass")
 
-
-
         # we add particle element fields for stars and gas
         #-----------------------------------------------------
         if ptype in self._add_elements:
@@ -144,6 +142,9 @@
                     symbol = ion[0:1].capitalize()
                     roman = int(ion[1:])
 
+                if (ptype, symbol + "_fraction") not in self.field_aliases:
+                    continue
+
                 pstr = "_p" + str(roman-1)
                 yt_ion = symbol + pstr
 
@@ -166,6 +167,9 @@
                     symbol = ion[0:1].capitalize()
                     roman = int(ion[1:])
 
+                if (ptype, symbol + "_fraction") not in self.field_aliases:
+                    continue
+
                 pstr = "_p" + str(roman-1)
                 yt_ion = symbol + pstr
 
@@ -201,6 +205,9 @@
                 symbol = ion[0:1].capitalize()
                 roman = int(ion[1:])
 
+            if (ptype, symbol + "_fraction") not in self.field_aliases:
+                continue
+
             pstr = "_p" + str(roman-1)
             yt_ion = symbol + pstr
             ftype = ptype

diff -r dfe97166551a2c49ed561e450e4d2e7ce4ac68f2 -r 7f54e2a053739fab6a8ab250514ca4bb36910a7c yt/funcs.py
--- a/yt/funcs.py
+++ b/yt/funcs.py
@@ -963,3 +963,24 @@
     except ImportError:
         requests = None
     return requests
+
+ at contextlib.contextmanager
+def dummy_context_manager(*args, **kwargs):
+    yield
+
+def matplotlib_style_context(style_name=None, after_reset=True):
+    """Returns a context manager for controlling matplotlib style.
+
+    Arguments are passed to matplotlib.style.context() if specified. Defaults
+    to setting "classic" style, after resetting to the default config parameters.
+
+    On older matplotlib versions (<=1.5.0) where matplotlib.style isn't
+    available, returns a dummy context manager.
+    """
+    if style_name is None:
+        style_name = 'classic'
+    try:
+        import matplotlib.style
+        return matplotlib.style.context(style_name, after_reset=after_reset)
+    except ImportError:
+        return dummy_context_manager()

diff -r dfe97166551a2c49ed561e450e4d2e7ce4ac68f2 -r 7f54e2a053739fab6a8ab250514ca4bb36910a7c yt/geometry/coordinates/cartesian_coordinates.py
--- a/yt/geometry/coordinates/cartesian_coordinates.py
+++ b/yt/geometry/coordinates/cartesian_coordinates.py
@@ -23,12 +23,13 @@
     cylindrical_to_cartesian
 from yt.funcs import mylog
 from yt.utilities.lib.pixelization_routines import \
-    pixelize_element_mesh, pixelize_off_axis_cartesian
+    pixelize_element_mesh, pixelize_off_axis_cartesian, \
+    pixelize_cartesian
 from yt.data_objects.unstructured_mesh import SemiStructuredMesh
-import yt.visualization._MPL as _MPL
 
 
 class CartesianCoordinateHandler(CoordinateHandler):
+    name = "cartesian"
 
     def __init__(self, ds, ordering = ('x','y','z')):
         super(CartesianCoordinateHandler, self).__init__(ds, ordering)
@@ -127,7 +128,7 @@
         period[1] = self.period[self.y_axis[dim]]
         if hasattr(period, 'in_units'):
             period = period.in_units("code_length").d
-        buff = _MPL.Pixelize(data_source['px'], data_source['py'],
+        buff = pixelize_cartesian(data_source['px'], data_source['py'],
                              data_source['pdx'], data_source['pdy'],
                              data_source[field], size[0], size[1],
                              bounds, int(antialias),

diff -r dfe97166551a2c49ed561e450e4d2e7ce4ac68f2 -r 7f54e2a053739fab6a8ab250514ca4bb36910a7c yt/geometry/coordinates/coordinate_handler.py
--- a/yt/geometry/coordinates/coordinate_handler.py
+++ b/yt/geometry/coordinates/coordinate_handler.py
@@ -71,6 +71,7 @@
                     ds.quan(width[0], fix_unitary(width[1])))
 
 class CoordinateHandler(object):
+    name = None
     
     def __init__(self, ds, ordering):
         self.ds = weakref.proxy(ds)
@@ -132,10 +133,13 @@
         self._axis_id = ai
         return ai
 
+    _image_axis_name = None
     @property
     def image_axis_name(self):
         # Default
-        rv = {}
+        if self._image_axis_name is not None:
+            return self._image_axis_name
+        self._image_axis_name = rv = {}
         for i in range(3):
             rv[i] = (self.axis_name[self.x_axis[i]],
                      self.axis_name[self.y_axis[i]])

diff -r dfe97166551a2c49ed561e450e4d2e7ce4ac68f2 -r 7f54e2a053739fab6a8ab250514ca4bb36910a7c yt/geometry/coordinates/cylindrical_coordinates.py
--- a/yt/geometry/coordinates/cylindrical_coordinates.py
+++ b/yt/geometry/coordinates/cylindrical_coordinates.py
@@ -21,14 +21,14 @@
     _get_coord_fields, \
     cylindrical_to_cartesian, \
     cartesian_to_cylindrical
-import yt.visualization._MPL as _MPL
 from yt.utilities.lib.pixelization_routines import \
-    pixelize_cylinder
+    pixelize_cartesian, pixelize_cylinder
 #
 # Cylindrical fields
 #
 
 class CylindricalCoordinateHandler(CoordinateHandler):
+    name = "cylindrical"
 
     def __init__(self, ds, ordering = ('r', 'z', 'theta')):
         super(CylindricalCoordinateHandler, self).__init__(ds, ordering)
@@ -113,11 +113,11 @@
         period[1] = self.period[self.y_axis[dim]]
         if hasattr(period, 'in_units'):
             period = period.in_units("code_length").d
-        buff = _MPL.Pixelize(data_source['px'], data_source['py'],
-                             data_source['pdx'], data_source['pdy'],
-                             data_source[field], size[0], size[1],
-                             bounds, int(antialias),
-                             period, int(periodic)).transpose()
+        buff = pixelize_cartesian(data_source['px'], data_source['py'],
+                                  data_source['pdx'], data_source['pdy'],
+                                  data_source[field], size[0], size[1],
+                                  bounds, int(antialias),
+                                  period, int(periodic)).transpose()
         return buff
 
     def _cyl_pixelize(self, data_source, field, bounds, size, antialias):

diff -r dfe97166551a2c49ed561e450e4d2e7ce4ac68f2 -r 7f54e2a053739fab6a8ab250514ca4bb36910a7c yt/geometry/coordinates/geographic_coordinates.py
--- a/yt/geometry/coordinates/geographic_coordinates.py
+++ b/yt/geometry/coordinates/geographic_coordinates.py
@@ -23,6 +23,7 @@
     pixelize_cylinder, pixelize_aitoff
 
 class GeographicCoordinateHandler(CoordinateHandler):
+    name = "geographic"
 
     def __init__(self, ds, ordering = ('latitude', 'longitude', 'altitude')):
         super(GeographicCoordinateHandler, self).__init__(ds, ordering)

diff -r dfe97166551a2c49ed561e450e4d2e7ce4ac68f2 -r 7f54e2a053739fab6a8ab250514ca4bb36910a7c yt/geometry/coordinates/polar_coordinates.py
--- a/yt/geometry/coordinates/polar_coordinates.py
+++ b/yt/geometry/coordinates/polar_coordinates.py
@@ -18,6 +18,7 @@
 
 
 class PolarCoordinateHandler(CylindricalCoordinateHandler):
+    name = "polar"
 
     def __init__(self, ds, ordering = ('r', 'theta', 'z')):
         super(PolarCoordinateHandler, self).__init__(ds, ordering)

diff -r dfe97166551a2c49ed561e450e4d2e7ce4ac68f2 -r 7f54e2a053739fab6a8ab250514ca4bb36910a7c yt/geometry/coordinates/spec_cube_coordinates.py
--- a/yt/geometry/coordinates/spec_cube_coordinates.py
+++ b/yt/geometry/coordinates/spec_cube_coordinates.py
@@ -20,6 +20,7 @@
     _get_coord_fields
 
 class SpectralCubeCoordinateHandler(CartesianCoordinateHandler):
+    name = "spectral_cube"
 
     def __init__(self, ds, ordering = ('x', 'y', 'z')):
         ordering = tuple("xyz"[axis] for axis in

diff -r dfe97166551a2c49ed561e450e4d2e7ce4ac68f2 -r 7f54e2a053739fab6a8ab250514ca4bb36910a7c yt/geometry/coordinates/spherical_coordinates.py
--- a/yt/geometry/coordinates/spherical_coordinates.py
+++ b/yt/geometry/coordinates/spherical_coordinates.py
@@ -23,6 +23,7 @@
     pixelize_cylinder, pixelize_aitoff
 
 class SphericalCoordinateHandler(CoordinateHandler):
+    name = "spherical"
 
     def __init__(self, ds, ordering = ('r', 'theta', 'phi')):
         super(SphericalCoordinateHandler, self).__init__(ds, ordering)

diff -r dfe97166551a2c49ed561e450e4d2e7ce4ac68f2 -r 7f54e2a053739fab6a8ab250514ca4bb36910a7c yt/geometry/particle_deposit.pyx
--- a/yt/geometry/particle_deposit.pyx
+++ b/yt/geometry/particle_deposit.pyx
@@ -329,6 +329,11 @@
     cdef np.float64_t[:,:,:,:] field
     cdef public object ofield
     def initialize(self):
+        if not all(_ > 1 for _ in self.nvals):
+            from yt.utilities.exceptions import YTBoundsDefinitionError
+            raise YTBoundsDefinitionError(
+                "CIC requires minimum of 2 zones in all dimensions",
+                self.nvals)
         self.field = append_axes(
             np.zeros(self.nvals, dtype="float64", order='F'), 4)
 

diff -r dfe97166551a2c49ed561e450e4d2e7ce4ac68f2 -r 7f54e2a053739fab6a8ab250514ca4bb36910a7c yt/geometry/selection_routines.pyx
--- a/yt/geometry/selection_routines.pyx
+++ b/yt/geometry/selection_routines.pyx
@@ -239,7 +239,9 @@
                     spos[2] = pos[2] - sdds[2]/2.0
                     for k in range(2):
                         ch = NULL
-                        if root.children != NULL:
+                        # We only supply a child if we are actually going to
+                        # look at the next level.
+                        if root.children != NULL and next_level == 1:
                             ch = root.children[cind(i, j, k)]
                         if iter == 1 and next_level == 1 and ch != NULL:
                             # Note that visitor.pos is always going to be the
@@ -2046,3 +2048,42 @@
         return ("halo_particles", self.halo_id)
 
 halo_particles_selector = HaloParticlesSelector
+
+ at cython.cdivision(True)
+ at cython.boundscheck(False)
+ at cython.wraparound(False)
+def points_in_cells(
+        np.float64_t[:] cx,
+        np.float64_t[:] cy,
+        np.float64_t[:] cz,
+        np.float64_t[:] dx,
+        np.float64_t[:] dy,
+        np.float64_t[:] dz,
+        np.float64_t[:] px,
+        np.float64_t[:] py,
+        np.float64_t[:] pz):
+    # Take a list of cells and particles and calculate which particles
+    # are enclosed within one of the cells.  This is used for querying
+    # particle fields on clump/contour objects.
+    # We use brute force since the cells are a relatively unordered collection.
+
+    cdef int p, c, n_p, n_c
+
+    n_p = px.size
+    n_c = cx.size
+    mask = np.ones(n_p, dtype="bool")
+
+    for p in range(n_p):
+        for c in range(n_c):
+            if fabs(px[p] - cx[c]) > 0.5 * dx[c]:
+                mask[p] = False
+                continue
+            if fabs(py[p] - cy[c]) > 0.5 * dy[c]:
+                mask[p] = False
+                continue
+            if fabs(pz[p] - cz[c]) > 0.5 * dz[c]:
+                mask[p] = False
+                continue
+            if mask[p]: break
+
+    return mask

diff -r dfe97166551a2c49ed561e450e4d2e7ce4ac68f2 -r 7f54e2a053739fab6a8ab250514ca4bb36910a7c yt/geometry/tests/test_particle_deposit.py
--- /dev/null
+++ b/yt/geometry/tests/test_particle_deposit.py
@@ -0,0 +1,14 @@
+from yt.utilities.exceptions import \
+    YTBoundsDefinitionError
+
+from yt.testing import \
+    fake_random_ds
+from numpy.testing import \
+    assert_raises
+
+def test_cic_deposit():
+    ds = fake_random_ds(64, nprocs = 8, particles=64**3)
+    my_reg = ds.arbitrary_grid(ds.domain_left_edge, ds.domain_right_edge,
+            dims=[1, 800, 800])
+    f = ("deposit", "all_cic")
+    assert_raises(YTBoundsDefinitionError, my_reg.__getitem__, f)

diff -r dfe97166551a2c49ed561e450e4d2e7ce4ac68f2 -r 7f54e2a053739fab6a8ab250514ca4bb36910a7c yt/units/tests/test_units.py
--- a/yt/units/tests/test_units.py
+++ b/yt/units/tests/test_units.py
@@ -477,6 +477,9 @@
     test_unit = Unit('cm**-3', base_value=1.0, registry=ds.unit_registry)
     assert_equal(test_unit.latex_repr, '\\frac{1}{\\rm{cm}^{3}}')
 
+    test_unit = Unit('m_geom/l_geom**3')
+    assert_equal(test_unit.latex_repr, '\\frac{1}{M_\\odot^{2}}')
+
 def test_latitude_longitude():
     lat = unit_symbols.lat
     lon = unit_symbols.lon

diff -r dfe97166551a2c49ed561e450e4d2e7ce4ac68f2 -r 7f54e2a053739fab6a8ab250514ca4bb36910a7c yt/units/tests/test_ytarray.py
--- a/yt/units/tests/test_ytarray.py
+++ b/yt/units/tests/test_ytarray.py
@@ -626,6 +626,29 @@
     new_length = fix_length(length, ds=ds)
     yield assert_equal, YTQuantity(10, 'cm'), new_length
 
+def test_code_unit_combinations():
+    """
+    Test comparing code units coming from different datasets
+    """
+    ds1 = fake_random_ds(64, nprocs=1, length_unit=1)
+    ds2 = fake_random_ds(64, nprocs=1, length_unit=10)
+
+    q1 = ds1.quan(1, 'code_length')
+    q2 = ds2.quan(1, 'code_length')
+
+    assert_equal(10*q1, q2)
+    assert_equal(q1/q2, 0.1)
+    assert_true(q1 < q2)
+    assert_true(q2 > q1)
+    assert_true(not bool(q1 > q2))
+    assert_true(not bool(q2 < q1))
+    assert_true(q1 != q2)
+    assert_true(not bool(q1 == q2))
+
+    assert_equal((q1 + q2).in_cgs().value, 11)
+    assert_equal((q2 + q1).in_cgs().value, 11)
+    assert_equal((q1 - q2).in_cgs().value, -9)
+    assert_equal((q2 - q1).in_cgs().value, 9)
 
 def test_ytarray_pickle():
     ds = fake_random_ds(64, nprocs=1)

diff -r dfe97166551a2c49ed561e450e4d2e7ce4ac68f2 -r 7f54e2a053739fab6a8ab250514ca4bb36910a7c yt/units/unit_lookup_table.py
--- a/yt/units/unit_lookup_table.py
+++ b/yt/units/unit_lookup_table.py
@@ -23,7 +23,7 @@
     planck_charge_esu, planck_energy_erg, planck_mass_grams, \
     planck_temperature_K, planck_time_s, mass_hydrogen_grams, \
     grams_per_pound, standard_gravity_cm_per_s2, pascal_per_atm, \
-    newton_cgs
+    newton_cgs, cm_per_rearth, cm_per_rjup
 import numpy as np
 
 # Lookup a unit symbol with the symbol string, and provide a tuple with the
@@ -87,6 +87,8 @@
     "msun": (mass_sun_grams, dimensions.mass, 0.0, r"M_\odot"),
     "Rsun": (cm_per_rsun, dimensions.length, 0.0, r"R_\odot"),
     "rsun": (cm_per_rsun, dimensions.length, 0.0, r"R_\odot"),
+    "R_sun": (cm_per_rsun, dimensions.length, 0.0, r"R_\odot"),
+    "r_sun": (cm_per_rsun, dimensions.length, 0.0, r"R_\odot"),
     "Lsun": (luminosity_sun_ergs_per_sec, dimensions.power, 0.0, r"L_\odot"),
     "Tsun": (temp_sun_kelvin, dimensions.temperature, 0.0, r"T_\odot"),
     "Zsun": (metallicity_sun, dimensions.dimensionless, 0.0, r"Z_\odot"),
@@ -151,6 +153,12 @@
     "m_geom": (mass_sun_grams, dimensions.mass, 0.0, r"M_\odot"),
     "l_geom": (newton_cgs*mass_sun_grams/speed_of_light_cm_per_s**2, dimensions.length, 0.0, r"M_\odot"),
     "t_geom": (newton_cgs*mass_sun_grams/speed_of_light_cm_per_s**3, dimensions.time, 0.0, r"M_\odot"),
+
+    # Some Solar System units
+    "R_earth": (cm_per_rearth, dimensions.length, 0.0, r"R_\oplus"),
+    "r_earth": (cm_per_rearth, dimensions.length, 0.0, r"R_\oplus"),
+    "R_jup": (cm_per_rjup, dimensions.length, 0.0, r"R_\mathrm{Jup}"),
+    "r_jup": (cm_per_rjup, dimensions.length, 0.0, r"R_\mathrm{Jup}"),
 }
 
 # This dictionary formatting from magnitude package, credit to Juan Reyero.

diff -r dfe97166551a2c49ed561e450e4d2e7ce4ac68f2 -r 7f54e2a053739fab6a8ab250514ca4bb36910a7c yt/units/unit_object.py
--- a/yt/units/unit_object.py
+++ b/yt/units/unit_object.py
@@ -110,8 +110,25 @@
             symbol_table[ex] = registry.lut[str(ex)][3]
         except:
             symbol_table[ex] = r"\rm{" + str(ex).replace('_', '\ ') + "}"
+
+    # invert the symbol table dict to look for keys with identical values
+    invert_symbols = {}
+    for key, value in symbol_table.items():
+        if value not in invert_symbols:
+            invert_symbols[value] = [key]
+        else:
+            invert_symbols[value].append(key)
+
+    # if there are any units with identical latex representations, substitute
+    # units to avoid  uncanceled terms in the final latex expresion.
+    for val in invert_symbols:
+        symbols = invert_symbols[val]
+        for i in range(1, len(symbols)):
+            expr = expr.subs(symbols[i], symbols[0])
+
     latex_repr = latex(expr, symbol_names=symbol_table, mul_symbol="dot",
                        fold_frac_powers=True, fold_short_frac=True)
+
     if latex_repr == '1':
         return ''
     else:
@@ -258,7 +275,11 @@
     def latex_repr(self):
         if self._latex_repr is not None:
             return self._latex_repr
-        self._latex_repr = get_latex_representation(self.expr, self.registry)
+        if self.expr.is_Atom:
+            expr = self.expr
+        else:
+            expr = self.expr.copy()
+        self._latex_repr = get_latex_representation(expr, self.registry)
         return self._latex_repr
 
     ### Some sympy conventions

diff -r dfe97166551a2c49ed561e450e4d2e7ce4ac68f2 -r 7f54e2a053739fab6a8ab250514ca4bb36910a7c yt/units/unit_symbols.py
--- a/yt/units/unit_symbols.py
+++ b/yt/units/unit_symbols.py
@@ -116,11 +116,11 @@
 
 Msun = solar_mass = quan(1.0, "Msun")
 msun = quan(1.0, "msun")
-Rsun = solar_radius = quan(1.0, "Rsun")
-rsun = quan(1.0, "rsun")
-Lsun = lsun = solar_luminosity = quan(1.0, "Lsun")
-Tsun = solar_temperature = quan(1.0, "Tsun")
-Zsun = solar_metallicity = quan(1.0, "Zsun")
+Rsun = R_sun = solar_radius = quan(1.0, "Rsun")
+rsun = r_sun = quan(1.0, "rsun")
+Lsun = lsun = l_sun = solar_luminosity = quan(1.0, "Lsun")
+Tsun = T_sun = solar_temperature = quan(1.0, "Tsun")
+Zsun = Z_sun = solar_metallicity = quan(1.0, "Zsun")
 
 #
 # Misc Astronomical units
@@ -129,6 +129,10 @@
 AU = astronomical_unit = quan(1.0, "AU")
 au = quan(1.0, "au")
 ly = light_year = quan(1.0, "ly")
+Rearth = R_earth = earth_radius = quan(1.0, 'R_earth')
+rearth = r_earth = quan(1.0, 'r_earth')
+Rjup = R_jup = jupiter_radius = quan(1.0, 'R_jup')
+rjup = r_jup = quan(1.0, 'r_jup')
 
 #
 # Physical units

diff -r dfe97166551a2c49ed561e450e4d2e7ce4ac68f2 -r 7f54e2a053739fab6a8ab250514ca4bb36910a7c yt/units/yt_array.py
--- a/yt/units/yt_array.py
+++ b/yt/units/yt_array.py
@@ -167,7 +167,8 @@
     # Check that other is a YTArray.
     if hasattr(other, 'units'):
         if this.units.expr is other.units.expr:
-            return other
+            if this.units.base_value == other.units.base_value:
+                return other
         if not this.units.same_dimensions_as(other.units):
             raise YTUnitOperationError(op_string, this.units, other.units)
         return other.in_units(this.units)

diff -r dfe97166551a2c49ed561e450e4d2e7ce4ac68f2 -r 7f54e2a053739fab6a8ab250514ca4bb36910a7c yt/utilities/amr_kdtree/amr_kdtree.py
--- a/yt/utilities/amr_kdtree/amr_kdtree.py
+++ b/yt/utilities/amr_kdtree/amr_kdtree.py
@@ -25,7 +25,7 @@
     scatter_image
 from yt.utilities.lib.amr_kdtools import \
     Node, \
-    add_pygrids, \
+    add_grids, \
     find_node, \
     kd_is_leaf, \
     set_dirty, \
@@ -95,7 +95,7 @@
         gles = np.array([g.LeftEdge for g in grids])
         gres = np.array([g.RightEdge for g in grids])
         gids = np.array([g.id for g in grids], dtype="int64")
-        add_pygrids(self.trunk, gids.size, gles, gres, gids,
+        add_grids(self.trunk, gids.size, gles, gres, gids,
                     self.comm_rank, self.comm_size)
         del gles, gres, gids, grids
 

This diff is so big that we needed to truncate the remainder.

https://bitbucket.org/yt_analysis/yt/commits/3b23684c3c29/
Changeset:   3b23684c3c29
Branch:      yt
User:        ngoldbaum
Date:        2016-06-29 18:28:37+00:00
Summary:     Merged in ethlau/yt (pull request #2201)

update light_ray.py for domain width != 1
Affected #:  1 file

diff -r 448e7a7cdb901d9921c111f2c5e3aa991de205f7 -r 3b23684c3c29c28a888fbfec978120700de134a1 yt/analysis_modules/cosmological_observation/light_ray/light_ray.py
--- a/yt/analysis_modules/cosmological_observation/light_ray/light_ray.py
+++ b/yt/analysis_modules/cosmological_observation/light_ray/light_ray.py
@@ -289,13 +289,15 @@
         seed : optional, int
             Seed for the random number generator.
             Default: None.
-        start_position : optional, list of floats
+        start_position : optional, iterable of floats or YTArray.
             Used only if creating a light ray from a single dataset.
             The coordinates of the starting position of the ray.
+            If specified without units, it is assumed to be in code units.
             Default: None.
-        end_position : optional, list of floats
+        end_position : optional, iterable of floats or YTArray.
             Used only if creating a light ray from a single dataset.
             The coordinates of the ending position of the ray.
+            If specified without units, it is assumed to be in code units.
             Default: None.
         trajectory : optional, list of floats
             Used only if creating a light ray from a single dataset.
@@ -365,6 +367,19 @@
         ...                       use_peculiar_velocity=True)
 
         """
+
+        if start_position is not None and hasattr(start_position, 'units'):
+            start_position = start_position.to('unitary')
+        elif start_position is not None :
+            start_position = self.ds.arr(
+                start_position, 'code_length').to('unitary')
+
+        if end_position is not None and hasattr(end_position, 'units'):
+            end_position = end_position.to('unitary')
+        elif end_position is not None :
+            end_position = self.ds.arr(
+                end_position, 'code_length').to('unitary')
+
         if get_los_velocity is not None:
             use_peculiar_velocity = get_los_velocity
             mylog.warn("'get_los_velocity' kwarg is deprecated. Use 'use_peculiar_velocity' instead.")
@@ -413,8 +428,8 @@
                 setup_function(ds)
 
             if start_position is not None:
-                my_segment["start"] = ds.arr(my_segment["start"], "code_length")
-                my_segment["end"] = ds.arr(my_segment["end"], "code_length")
+                my_segment["start"] = ds.arr(my_segment["start"], "unitary")
+                my_segment["end"] = ds.arr(my_segment["end"], "unitary")
             else:
                 my_segment["start"] = ds.domain_width * my_segment["start"] + \
                   ds.domain_left_edge
@@ -442,6 +457,10 @@
                        (my_segment['redshift'], my_segment['start'],
                         my_segment['end']))
 
+            # Convert segment units from unitary to code length for sub_ray
+            my_segment['start'] = my_segment['start'].to('code_length')
+            my_segment['end'] = my_segment['end'].to('code_length')
+
             # Break periodic ray into non-periodic segments.
             sub_segments = periodic_ray(my_segment['start'], my_segment['end'],
                                         left=ds.domain_left_edge,
@@ -462,6 +481,7 @@
                 sub_data['dl'].extend(sub_ray['dts'][asort] *
                                       vector_length(sub_ray.start_point,
                                                     sub_ray.end_point))
+
                 for field in data_fields:
                     sub_data[field].extend(sub_ray[field][asort])

Repository URL: https://bitbucket.org/yt_analysis/yt/

--

This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.



More information about the yt-svn mailing list