[yt-svn] commit/yt: 14 new changesets

commits-noreply at bitbucket.org commits-noreply at bitbucket.org
Tue Nov 4 06:25:57 PST 2014


14 new commits in yt:

https://bitbucket.org/yt_analysis/yt/commits/57dc3fdacbce/
Changeset:   57dc3fdacbce
Branch:      yt
User:        drudd
Date:        2014-10-16 20:48:47+00:00
Summary:     Added ComposeSelector which allows YTSelectionContainers to draw from a data_source. Some overlap in functionality with CuttingRegion and BooleanRegion
Affected #:  4 files

diff -r d412f59ea351ac348d98970a350a206ab677822b -r 57dc3fdacbce0eb4dfb734a73b75a2b3a66a7f77 yt/data_objects/construction_data_containers.py
--- a/yt/data_objects/construction_data_containers.py
+++ b/yt/data_objects/construction_data_containers.py
@@ -42,7 +42,7 @@
 from yt.utilities.minimal_representation import \
     MinimalProjectionData
 from yt.utilities.parallel_tools.parallel_analysis_interface import \
-    parallel_objects, parallel_root_only, ParallelAnalysisInterface
+    parallel_objects, parallel_root_only 
 from yt.units.unit_object import Unit
 import yt.geometry.particle_deposit as particle_deposit
 from yt.utilities.grid_data_format.writer import write_to_gdf
@@ -833,7 +833,7 @@
             new_fields.append(output_field)
         level_state.fields = new_fields
 
-class YTSurfaceBase(YTSelectionContainer3D, ParallelAnalysisInterface):
+class YTSurfaceBase(YTSelectionContainer3D):
     r"""This surface object identifies isocontours on a cell-by-cell basis,
     with no consideration of global connectedness, and returns the vertices
     of the Triangles in that isocontour.
@@ -886,7 +886,6 @@
                          ("index", "z"))
     vertices = None
     def __init__(self, data_source, surface_field, field_value):
-        ParallelAnalysisInterface.__init__(self)
         self.data_source = data_source
         self.surface_field = surface_field
         self.field_value = field_value

diff -r d412f59ea351ac348d98970a350a206ab677822b -r 57dc3fdacbce0eb4dfb734a73b75a2b3a66a7f77 yt/data_objects/data_containers.py
--- a/yt/data_objects/data_containers.py
+++ b/yt/data_objects/data_containers.py
@@ -41,6 +41,8 @@
 from yt.fields.derived_field import \
     ValidateSpatial
 import yt.geometry.selection_routines
+from yt.geometry.selection_routines import \
+    compose_selector
 from yt.extern.six import add_metaclass
 
 def force_array(item, shape):
@@ -542,9 +544,12 @@
     _sort_by = None
     _selector = None
     _current_chunk = None
+    _data_source = None
 
-    def __init__(self, *args, **kwargs):
-        super(YTSelectionContainer, self).__init__(*args, **kwargs)
+    def __init__(self, ds, field_parameters, data_source=None):
+        ParallelAnalysisInterface.__init__(self)
+        super(YTSelectionContainer, self).__init__(ds, field_parameters)
+        self._data_source = data_source
 
     @property
     def selector(self):
@@ -555,7 +560,11 @@
                          "%s_selector" % self._type_name, None)
         if sclass is None:
             raise YTDataSelectorNotImplemented(self._type_name)
-        self._selector = sclass(self)
+
+        if self._data_source is not None:
+            self._selector = compose_selector(self, self._data_source.selector, sclass(self))
+        else:
+            self._selector = sclass(self)
         return self._selector
 
     def chunks(self, fields, chunking_style, **kwargs):
@@ -765,15 +774,15 @@
 
 class YTSelectionContainer0D(YTSelectionContainer):
     _spatial = False
-    def __init__(self, ds, field_parameters):
+    def __init__(self, ds, field_parameters = None, data_source = None):
         super(YTSelectionContainer0D, self).__init__(
-            ds, field_parameters)
+            ds, field_parameters, data_source)
 
 class YTSelectionContainer1D(YTSelectionContainer):
     _spatial = False
-    def __init__(self, ds, field_parameters):
+    def __init__(self, ds, field_parameters = None, data_source = None):
         super(YTSelectionContainer1D, self).__init__(
-            ds, field_parameters)
+            ds, field_parameters, data_source)
         self._grids = None
         self._sortkey = None
         self._sorted = {}
@@ -785,10 +794,9 @@
     aligned with any axis.
     """
     _spatial = False
-    def __init__(self, axis, ds, field_parameters):
-        ParallelAnalysisInterface.__init__(self)
+    def __init__(self, axis, ds, field_parameters = None, data_source = None):
         super(YTSelectionContainer2D, self).__init__(
-            ds, field_parameters)
+            ds, field_parameters, data_source)
         # We need the ds, which will exist by now, for fix_axis.
         self.axis = fix_axis(axis, self.ds)
         self.set_field_parameter("axis", axis)
@@ -908,9 +916,8 @@
     _key_fields = ['x','y','z','dx','dy','dz']
     _spatial = False
     _num_ghost_zones = 0
-    def __init__(self, center, ds = None, field_parameters = None):
-        ParallelAnalysisInterface.__init__(self)
-        super(YTSelectionContainer3D, self).__init__(ds, field_parameters)
+    def __init__(self, center, ds, field_parameters = None, data_source = None):
+        super(YTSelectionContainer3D, self).__init__(ds, field_parameters, data_source)
         self._set_center(center)
         self.coords = None
         self._grids = None
@@ -1271,9 +1278,9 @@
     """
     _type_name = "boolean"
     _con_args = ("regions",)
-    def __init__(self, regions, fields = None, ds = None, **kwargs):
+    def __init__(self, regions, fields = None, ds = None, field_parameters = None, data_source = None):
         # Center is meaningless, but we'll define it all the same.
-        YTSelectionContainer3D.__init__(self, [0.5]*3, fields, ds, **kwargs)
+        YTSelectionContainer3D.__init__(self, [0.5]*3, fields, ds, field_parameters, data_source)
         self.regions = regions
         self._all_regions = []
         self._some_overlap = []

diff -r d412f59ea351ac348d98970a350a206ab677822b -r 57dc3fdacbce0eb4dfb734a73b75a2b3a66a7f77 yt/data_objects/selection_data_containers.py
--- a/yt/data_objects/selection_data_containers.py
+++ b/yt/data_objects/selection_data_containers.py
@@ -51,8 +51,11 @@
     ds: Dataset, optional
         An optional dataset to use rather than self.ds
     field_parameters : dictionary
-         A dictionary of field parameters than can be accessed by derived
-         fields.
+        A dictionary of field parameters than can be accessed by derived
+        fields.
+    data_source: optional
+        Draw the selection from the provided data source rather than
+        all data associated with the data_set
 
     Examples
     --------
@@ -64,8 +67,8 @@
     """
     _type_name = "point"
     _con_args = ('p',)
-    def __init__(self, p, ds = None, field_parameters = None):
-        super(YTPointBase, self).__init__(ds, field_parameters)
+    def __init__(self, p, ds = None, field_parameters = None, data_source = None):
+        super(YTPointBase, self).__init__(ds, field_parameters, data_source)
         self.p = p
 
 class YTOrthoRayBase(YTSelectionContainer1D):
@@ -92,6 +95,9 @@
     field_parameters : dictionary
          A dictionary of field parameters than can be accessed by derived
          fields.
+    data_source: optional
+        Draw the selection from the provided data source rather than
+        all data associated with the data_set
 
     Examples
     --------
@@ -104,8 +110,8 @@
     _key_fields = ['x','y','z','dx','dy','dz']
     _type_name = "ortho_ray"
     _con_args = ('axis', 'coords')
-    def __init__(self, axis, coords, ds=None, field_parameters=None):
-        super(YTOrthoRayBase, self).__init__(ds, field_parameters)
+    def __init__(self, axis, coords, ds=None, field_parameters=None, data_source = None):
+        super(YTOrthoRayBase, self).__init__(ds, field_parameters, data_source)
         self.axis = axis
         xax = self.ds.coordinates.x_axis[self.axis]
         yax = self.ds.coordinates.y_axis[self.axis]
@@ -144,6 +150,9 @@
     field_parameters : dictionary
          A dictionary of field parameters than can be accessed by derived
          fields.
+    data_source: optional
+        Draw the selection from the provided data source rather than
+        all data associated with the data_set
 
     Examples
     --------
@@ -156,8 +165,8 @@
     _type_name = "ray"
     _con_args = ('start_point', 'end_point')
     _container_fields = ("t", "dts")
-    def __init__(self, start_point, end_point, ds=None, field_parameters=None):
-        super(YTRayBase, self).__init__(ds, field_parameters)
+    def __init__(self, start_point, end_point, ds=None, field_parameters=None, data_source = None):
+        super(YTRayBase, self).__init__(ds, field_parameters, data_source)
         self.start_point = self.ds.arr(start_point,
                             'code_length', dtype='float64')
         self.end_point = self.ds.arr(end_point,
@@ -204,6 +213,9 @@
     field_parameters : dictionary
          A dictionary of field parameters than can be accessed by derived
          fields.
+    data_source: optional
+        Draw the selection from the provided data source rather than
+        all data associated with the data_set
 
     Examples
     --------
@@ -219,8 +231,8 @@
     _container_fields = ("px", "py", "pdx", "pdy")
 
     def __init__(self, axis, coord, center=None, ds=None,
-                 field_parameters = None):
-        YTSelectionContainer2D.__init__(self, axis, ds, field_parameters)
+                 field_parameters = None, data_source=None):
+        YTSelectionContainer2D.__init__(self, axis, ds, field_parameters, data_source)
         self._set_center(center)
         self.coord = coord
 
@@ -285,6 +297,9 @@
     field_parameters : dictionary
          A dictionary of field parameters than can be accessed by derived
          fields.
+    data_source: optional
+        Draw the selection from the provided data source rather than
+        all data associated with the data_set
 
     Notes
     -----
@@ -310,8 +325,8 @@
     _container_fields = ("px", "py", "pz", "pdx", "pdy", "pdz")
 
     def __init__(self, normal, center, north_vector = None, 
-                 ds = None, field_parameters = None):
-        YTSelectionContainer2D.__init__(self, 4, ds, field_parameters)
+                 ds = None, field_parameters = None, data_source = None):
+        YTSelectionContainer2D.__init__(self, 4, ds, field_parameters, data_source)
         self._set_center(center)
         self.set_field_parameter('center',center)
         # Let's set up our plane equation
@@ -482,6 +497,9 @@
     field_parameters : dictionary
          A dictionary of field parameters than can be accessed by derived
          fields.
+    data_source: optional
+        Draw the selection from the provided data source rather than
+        all data associated with the data_set
 
     Examples
     --------
@@ -494,8 +512,8 @@
     _type_name = "disk"
     _con_args = ('center', '_norm_vec', 'radius', 'height')
     def __init__(self, center, normal, radius, height, fields=None,
-                 ds=None, **kwargs):
-        YTSelectionContainer3D.__init__(self, center, fields, ds, **kwargs)
+                 ds=None, field_parameters = None, data_source = None):
+        YTSelectionContainer3D.__init__(self, center, fields, ds, field_parameters, data_source)
         self._norm_vec = np.array(normal)/np.sqrt(np.dot(normal,normal))
         self.set_field_parameter("normal", self._norm_vec)
         self.set_field_parameter("center", self.center)
@@ -524,8 +542,8 @@
     _type_name = "region"
     _con_args = ('center', 'left_edge', 'right_edge')
     def __init__(self, center, left_edge, right_edge, fields = None,
-                 ds = None, **kwargs):
-        YTSelectionContainer3D.__init__(self, center, ds, **kwargs)
+                 ds = None, field_parameters = None, data_source = None):
+        YTSelectionContainer3D.__init__(self, center, ds, field_parameters, data_source)
         if not isinstance(left_edge, YTArray):
             self.left_edge = self.ds.arr(left_edge, 'code_length')
         else:
@@ -542,8 +560,8 @@
     """
     _type_name = "data_collection"
     _con_args = ("_obj_list",)
-    def __init__(self, center, obj_list, ds = None, field_parameters = None):
-        YTSelectionContainer3D.__init__(self, center, ds, field_parameters)
+    def __init__(self, center, obj_list, ds = None, field_parameters = None, data_source = None):
+        YTSelectionContainer3D.__init__(self, center, ds, field_parameters, data_source)
         self._obj_ids = np.array([o.id - o._id_offset for o in obj_list],
                                 dtype="int64")
         self._obj_list = obj_list
@@ -569,8 +587,8 @@
     """
     _type_name = "sphere"
     _con_args = ('center', 'radius')
-    def __init__(self, center, radius, ds = None, field_parameters = None):
-        super(YTSphereBase, self).__init__(center, ds, field_parameters)
+    def __init__(self, center, radius, ds = None, field_parameters = None, data_source = None):
+        super(YTSphereBase, self).__init__(center, ds, field_parameters, data_source)
         # Unpack the radius, if necessary
         radius = fix_length(radius, self.ds)
         if radius < self.index.get_smallest_dx():
@@ -615,8 +633,8 @@
     _type_name = "ellipsoid"
     _con_args = ('center', '_A', '_B', '_C', '_e0', '_tilt')
     def __init__(self, center, A, B, C, e0, tilt, fields=None,
-                 ds=None, field_parameters = None):
-        YTSelectionContainer3D.__init__(self, center, ds, field_parameters)
+                 ds=None, field_parameters = None, data_source = None):
+        YTSelectionContainer3D.__init__(self, center, ds, field_parameters, data_source)
         # make sure the magnitudes of semi-major axes are in order
         if A<B or B<C:
             raise YTEllipsoidOrdering(ds, A, B, C)
@@ -685,8 +703,8 @@
     _type_name = "cut_region"
     _con_args = ("base_object", "conditionals")
     def __init__(self, base_object, conditionals, ds = None,
-                 field_parameters = None):
-        super(YTCutRegionBase, self).__init__(base_object.center, ds, field_parameters)
+                 field_parameters = None, data_source = None):
+        super(YTCutRegionBase, self).__init__(base_object.center, ds, field_parameters, data_source)
         self.conditionals = ensure_list(conditionals)
         self.base_object = base_object
         self._selector = None

diff -r d412f59ea351ac348d98970a350a206ab677822b -r 57dc3fdacbce0eb4dfb734a73b75a2b3a66a7f77 yt/geometry/selection_routines.pyx
--- a/yt/geometry/selection_routines.pyx
+++ b/yt/geometry/selection_routines.pyx
@@ -112,7 +112,7 @@
 
 cdef class SelectorObject:
 
-    def __cinit__(self, dobj):
+    def __cinit__(self, dobj, *args):
         self.min_level = getattr(dobj, "min_level", 0)
         self.max_level = getattr(dobj, "max_level", 99)
         self.overlap_cells = 0
@@ -1721,6 +1721,66 @@
 
 always_selector = AlwaysSelector
 
+cdef class ComposeSelector(SelectorObject):
+    cdef SelectorObject selector1
+    cdef SelectorObject selector2
+
+    def __init__(self, dobj, selector1, selector2):
+        self.selector1 = selector1
+        self.selector2 = selector2
+        self.overlap_cells = 1
+
+    def select_grids(self,
+                     np.ndarray[np.float64_t, ndim=2] left_edges,
+                     np.ndarray[np.float64_t, ndim=2] right_edges,
+                     np.ndarray[np.int32_t, ndim=2] levels):
+        return np.logical_or(
+                    self.selector1.select_grids(left_edges, right_edges, levels),
+                    self.selector2.select_grids(left_edges, right_edges, levels))
+
+    cdef int select_cell(self, np.float64_t pos[3], np.float64_t dds[3]) nogil:
+        if self.selector1.select_cell(pos, dds) and \
+                self.selector2.select_cell(pos, dds):
+            return 1
+        else:
+            return 0
+
+    cdef int select_grid(self, np.float64_t left_edge[3],
+                         np.float64_t right_edge[3], np.int32_t level,
+                         Oct *o = NULL) nogil:
+        if self.selector1.select_grid(left_edge, right_edge, level, o) or \
+                self.selector2.select_grid(left_edge, right_edge, level, o):
+            return 1
+        else:
+            return 0
+        
+    cdef int select_point(self, np.float64_t pos[3]) nogil:
+        if self.selector1.select_point(pos) and \
+                self.selector2.select_point(pos):
+            return 1
+        else:
+            return 0
+
+    cdef int select_sphere(self, np.float64_t pos[3], np.float64_t radius) nogil:
+        if self.selector1.select_sphere(pos, radius) and \
+                self.selector2.select_sphere(pos, radius):
+            return 1
+        else:
+            return 0
+
+    cdef int select_bbox(self, np.float64_t left_edge[3],
+                               np.float64_t right_edge[3]) nogil:
+        if self.selector1.select_bbox(left_edge, right_edge) and \
+                self.selector2.select_bbox(left_edge, right_edge):
+            return 1
+        else:
+            return 0
+
+    def _hash_vals(self):
+        return (hash(self.selector1), hash(self.selector2))
+
+compose_selector = ComposeSelector
+
 cdef class HaloParticlesSelector(SelectorObject):
     cdef public object base_source
     cdef SelectorObject base_selector


https://bitbucket.org/yt_analysis/yt/commits/73b4d7cd6714/
Changeset:   73b4d7cd6714
Branch:      yt
User:        drudd
Date:        2014-10-16 20:49:16+00:00
Summary:     Merged yt_analysis/yt into yt
Affected #:  2 files

diff -r 57dc3fdacbce0eb4dfb734a73b75a2b3a66a7f77 -r 73b4d7cd67144e860277569e73a1e5f09a99143b yt/frontends/fits/data_structures.py
--- a/yt/frontends/fits/data_structures.py
+++ b/yt/frontends/fits/data_structures.py
@@ -157,6 +157,8 @@
             naxis4 = 1
         for i, fits_file in enumerate(self.dataset._handle._fits_files):
             for j, hdu in enumerate(fits_file):
+                if isinstance(hdu, _astropy.pyfits.BinTableHDU):
+                    continue
                 if self._ensure_same_dims(hdu):
                     units = self._determine_image_units(hdu.header, known_units)
                     try:

diff -r 57dc3fdacbce0eb4dfb734a73b75a2b3a66a7f77 -r 73b4d7cd67144e860277569e73a1e5f09a99143b yt/frontends/stream/data_structures.py
--- a/yt/frontends/stream/data_structures.py
+++ b/yt/frontends/stream/data_structures.py
@@ -473,7 +473,8 @@
             field_units[k] = v.units
             new_data[k] = v.copy().d
         data = new_data
-    elif all([(len(val) == 2) for val in data.values()]):
+    elif all([((not isinstance(val, np.ndarray)) and (len(val) == 2))
+             for val in data.values()]):
         new_data, field_units = {}, {}
         for field in data:
             try:


https://bitbucket.org/yt_analysis/yt/commits/d1af3b660812/
Changeset:   d1af3b660812
Branch:      yt
User:        drudd
Date:        2014-10-16 22:16:12+00:00
Summary:     Add data_source to doccumentation and note existence in filtering section
Affected #:  1 file

diff -r 57dc3fdacbce0eb4dfb734a73b75a2b3a66a7f77 -r d1af3b660812f972e643e94a044b2e334b239ea2 doc/source/analyzing/objects.rst
--- a/doc/source/analyzing/objects.rst
+++ b/doc/source/analyzing/objects.rst
@@ -96,7 +96,7 @@
 
 **Point** 
     | Class :class:`~yt.data_objects.selection_data_containers.YTPointBase`    
-    | Usage: ``point(coord, ds=None, field_parameters=None)``
+    | Usage: ``point(coord, ds=None, field_parameters=None, data_source=None)``
     | A point defined by a single cell at specified coordinates.
 
 1D Objects
@@ -104,14 +104,14 @@
 
 **Ray (Axis-Aligned)** 
     | Class :class:`~yt.data_objects.selection_data_containers.YTOrthoRayBase`
-    | Usage: ``ortho_ray(axis, coord, ds=None, field_parameters=None)``
+    | Usage: ``ortho_ray(axis, coord, ds=None, field_parameters=None, data_source=None)``
     | A line (of data cells) stretching through the full domain 
       aligned with one of the x,y,z axes.  Defined by an axis and a point
       to be intersected.
 
 **Ray (Arbitrarily-Aligned)** 
     | Class :class:`~yt.data_objects.selection_data_containers.YTRayBase`
-    | Usage: ``ray(start_coord, end_coord, ds=None, field_parameters=None)``
+    | Usage: ``ray(start_coord, end_coord, ds=None, field_parameters=None, data_source=None)``
     | A line (of data cells) defined by arbitrary start and end coordinates. 
 
 2D Objects
@@ -119,13 +119,13 @@
 
 **Slice (Axis-Aligned)** 
     | Class :class:`~yt.data_objects.selection_data_containers.YTSliceBase`
-    | Usage: ``slice(axis, coord, center=None, ds=None, field_parameters=None)``
+    | Usage: ``slice(axis, coord, center=None, ds=None, field_parameters=None, data_source=None)``
     | A plane normal to one of the axes and intersecting a particular 
       coordinate.
 
 **Slice (Arbitrarily-Aligned)** 
     | Class :class:`~yt.data_objects.selection_data_containers.YTCuttingPlaneBase`
-    | Usage: ``cutting(normal, coord, north_vector=None, ds=None, field_parameters=None)``
+    | Usage: ``cutting(normal, coord, north_vector=None, ds=None, field_parameters=None, data_source=None)``
     | A plane normal to a specified vector and intersecting a particular 
       coordinate.
 
@@ -141,8 +141,8 @@
 
 **Box Region** 
     | Class :class:`~yt.data_objects.selection_data_containers.YTRegionBase`
-    | Usage: ``region(center, left_edge, right_edge, fields=None, ds=None, field_parameters=None)``
-    | Alternatively: ``box(left_edge, right_edge, fields=None, ds=None, field_parameters=None)``
+    | Usage: ``region(center, left_edge, right_edge, fields=None, ds=None, field_parameters=None, data_source=None)``
+    | Alternatively: ``box(left_edge, right_edge, fields=None, ds=None, field_parameters=None, data_source=None)``
     | A box-like region aligned with the grid axis orientation.  It is 
       defined by a left_edge, a right_edge, and a center.  The left_edge
       and right_edge are the minimum and maximum bounds in the three axes
@@ -152,14 +152,14 @@
 
 **Disk/Cylinder** 
     | Class: :class:`~yt.data_objects.selection_data_containers.YTDiskBase`
-    | Usage: ``disk(center, normal, radius, height, fields=None, ds=None, field_parameters=None)``
+    | Usage: ``disk(center, normal, radius, height, fields=None, ds=None, field_parameters=None, data_source=None)``
     | A cylinder defined by a point at the center of one of the circular bases,
       a normal vector to it defining the orientation of the length of the
       cylinder, and radius and height values for the cylinder's dimensions.
 
 **Ellipsoid** 
     | Class :class:`~yt.data_objects.selection_data_containers.YTEllipsoidBase`
-    | Usage: ``ellipsoid(center, semi_major_axis_length, semi_medium_axis_length, semi_minor_axis_length, semi_major_vector, tilt, fields=None, ds=None, field_parameters=None)``
+    | Usage: ``ellipsoid(center, semi_major_axis_length, semi_medium_axis_length, semi_minor_axis_length, semi_major_vector, tilt, fields=None, ds=None, field_parameters=None, data_source=None)``
     | An ellipsoid with axis magnitudes set by semi_major_axis_length, 
      semi_medium_axis_length, and semi_minor_axis_length.  semi_major_vector 
      sets the direction of the semi_major_axis.  tilt defines the orientation 
@@ -167,7 +167,7 @@
 
 **Sphere** 
     | Class :class:`~yt.data_objects.selection_data_containers.YTSphereBase`
-    | Usage: ``sphere(center, radius, ds=None, field_parameters=None)``
+    | Usage: ``sphere(center, radius, ds=None, field_parameters=None, data_source=None)``
     | A sphere defined by a central coordinate and a radius.
 
 
@@ -176,6 +176,12 @@
 
 See also the section on :ref:`filtering-data`.
 
+**Intersecting Regions**
+    | Most Region objects provide a data_source parameter, which allows you to subselect
+    | one region from another (in the coordinate system of the DataSet). Note, this can
+    | easily lead to empty data for non-intersecting regions.
+    | Usage: ``slice(axis, coord, ds, data_source=sph)``
+
 **Boolean Regions** 
     | **Note: not yet implemented in yt 3.0**
     | Usage: ``boolean()``


https://bitbucket.org/yt_analysis/yt/commits/587c1155d237/
Changeset:   587c1155d237
Branch:      yt
User:        drudd
Date:        2014-10-16 22:20:04+00:00
Summary:     Merged with upstream
Affected #:  2 files

diff -r d1af3b660812f972e643e94a044b2e334b239ea2 -r 587c1155d237921ded0a066034c6f7cc03f8c25f yt/frontends/fits/data_structures.py
--- a/yt/frontends/fits/data_structures.py
+++ b/yt/frontends/fits/data_structures.py
@@ -157,6 +157,8 @@
             naxis4 = 1
         for i, fits_file in enumerate(self.dataset._handle._fits_files):
             for j, hdu in enumerate(fits_file):
+                if isinstance(hdu, _astropy.pyfits.BinTableHDU):
+                    continue
                 if self._ensure_same_dims(hdu):
                     units = self._determine_image_units(hdu.header, known_units)
                     try:

diff -r d1af3b660812f972e643e94a044b2e334b239ea2 -r 587c1155d237921ded0a066034c6f7cc03f8c25f yt/frontends/stream/data_structures.py
--- a/yt/frontends/stream/data_structures.py
+++ b/yt/frontends/stream/data_structures.py
@@ -473,7 +473,8 @@
             field_units[k] = v.units
             new_data[k] = v.copy().d
         data = new_data
-    elif all([(len(val) == 2) for val in data.values()]):
+    elif all([((not isinstance(val, np.ndarray)) and (len(val) == 2))
+             for val in data.values()]):
         new_data, field_units = {}, {}
         for field in data:
             try:


https://bitbucket.org/yt_analysis/yt/commits/9392da475d27/
Changeset:   9392da475d27
Branch:      yt
User:        drudd
Date:        2014-10-17 14:00:08+00:00
Summary:     Removed extraneous parameter to YTSelectionContainer3D constructor call in YTDiskBase
Affected #:  1 file

diff -r 587c1155d237921ded0a066034c6f7cc03f8c25f -r 9392da475d279024b30c2df129f34deda9788daf yt/data_objects/selection_data_containers.py
--- a/yt/data_objects/selection_data_containers.py
+++ b/yt/data_objects/selection_data_containers.py
@@ -513,7 +513,7 @@
     _con_args = ('center', '_norm_vec', 'radius', 'height')
     def __init__(self, center, normal, radius, height, fields=None,
                  ds=None, field_parameters = None, data_source = None):
-        YTSelectionContainer3D.__init__(self, center, fields, ds, field_parameters, data_source)
+        YTSelectionContainer3D.__init__(self, center, ds, field_parameters, data_source)
         self._norm_vec = np.array(normal)/np.sqrt(np.dot(normal,normal))
         self.set_field_parameter("normal", self._norm_vec)
         self.set_field_parameter("center", self.center)


https://bitbucket.org/yt_analysis/yt/commits/209200b7826e/
Changeset:   209200b7826e
Branch:      yt
User:        drudd
Date:        2014-10-17 14:34:35+00:00
Summary:     Fallback to default value for overlapcells
Affected #:  1 file

diff -r 9392da475d279024b30c2df129f34deda9788daf -r 209200b7826e664c2741b181f7de7d563a267003 yt/geometry/selection_routines.pyx
--- a/yt/geometry/selection_routines.pyx
+++ b/yt/geometry/selection_routines.pyx
@@ -1728,7 +1728,6 @@
     def __init__(self, dobj, selector1, selector2):
         self.selector1 = selector1
         self.selector2 = selector2
-        self.overlap_cells = 1
 
     def select_grids(self,
                      np.ndarray[np.float64_t, ndim=2] left_edges,


https://bitbucket.org/yt_analysis/yt/commits/afb594fda12e/
Changeset:   afb594fda12e
Branch:      yt
User:        drudd
Date:        2014-10-17 18:28:29+00:00
Summary:     Add check in YTSelectionContainer to ensure consistency between ds and data_source.ds
Affected #:  1 file

diff -r 209200b7826e664c2741b181f7de7d563a267003 -r afb594fda12ef4490791f4217af06423ba6df6a4 yt/data_objects/data_containers.py
--- a/yt/data_objects/data_containers.py
+++ b/yt/data_objects/data_containers.py
@@ -550,6 +550,8 @@
         ParallelAnalysisInterface.__init__(self)
         super(YTSelectionContainer, self).__init__(ds, field_parameters)
         self._data_source = data_source
+        if data_source is not None and data_source.ds is not ds:
+            raise RuntimeError("Attempted to construct a DataContainer with a data_source from a different DataSet")
 
     @property
     def selector(self):


https://bitbucket.org/yt_analysis/yt/commits/65cae75f2b89/
Changeset:   65cae75f2b89
Branch:      yt
User:        drudd
Date:        2014-10-17 21:46:58+00:00
Summary:     Improve catching of inconsistencies defining dataset in data container objects
Affected #:  2 files

diff -r afb594fda12ef4490791f4217af06423ba6df6a4 -r 65cae75f2b89ca9748c86d52b397e01658e76a2a yt/data_objects/data_containers.py
--- a/yt/data_objects/data_containers.py
+++ b/yt/data_objects/data_containers.py
@@ -103,8 +103,15 @@
         sets its initial set of fields, and the remainder of the arguments
         are passed as field_parameters.
         """
-        if ds != None:
+        # ds is typically set in the new object type created in Dataset._add_object_class
+        # but it can also be passed as a parameter to the constructor, in which case it will 
+        # override the default. This code ensures it is never not set.
+        if ds is not None:
             self.ds = ds
+        else:
+            if not hasattr(self, "ds"):
+                raise RuntimeError("Error: ds must be set either through class type or parameter to the constructor")
+
         self._current_particle_type = "all"
         self._current_fluid_type = self.ds.default_fluid_type
         self.ds.objects.append(weakref.proxy(self))
@@ -545,14 +552,21 @@
     _selector = None
     _current_chunk = None
     _data_source = None
+    _dimensionality = None
 
     def __init__(self, ds, field_parameters, data_source=None):
         ParallelAnalysisInterface.__init__(self)
         super(YTSelectionContainer, self).__init__(ds, field_parameters)
         self._data_source = data_source
-        if data_source is not None and data_source.ds is not ds:
-            raise RuntimeError("Attempted to construct a DataContainer with a data_source from a different DataSet")
-
+        if data_source is not None:
+            if data_source.ds is not self.ds:
+                raise RuntimeError("Attempted to construct a DataContainer with a data_source from a different DataSet", ds, data_source.ds)
+            else:
+                print "DataSets: ", self.ds, data_source.ds
+            if data_source._dimensionality < self._dimensionality:
+                raise RuntimeError("Attempted to construct a DataContainer with a data_source of lower dimensionality (%u vs %u)" %
+                                    (data_source._dimensionality, self._dimensionality))
+ 
     @property
     def selector(self):
         if self._selector is not None: return self._selector
@@ -776,12 +790,14 @@
 
 class YTSelectionContainer0D(YTSelectionContainer):
     _spatial = False
+    _dimensionality = 0
     def __init__(self, ds, field_parameters = None, data_source = None):
         super(YTSelectionContainer0D, self).__init__(
             ds, field_parameters, data_source)
 
 class YTSelectionContainer1D(YTSelectionContainer):
     _spatial = False
+    _dimensionality = 1
     def __init__(self, ds, field_parameters = None, data_source = None):
         super(YTSelectionContainer1D, self).__init__(
             ds, field_parameters, data_source)
@@ -791,6 +807,7 @@
 
 class YTSelectionContainer2D(YTSelectionContainer):
     _key_fields = ['px','py','pdx','pdy']
+    _dimensionality = 2
     """
     Prepares the YTSelectionContainer2D, normal to *axis*.  If *axis* is 4, we are not
     aligned with any axis.
@@ -918,6 +935,7 @@
     _key_fields = ['x','y','z','dx','dy','dz']
     _spatial = False
     _num_ghost_zones = 0
+    _dimensionality = 3
     def __init__(self, center, ds, field_parameters = None, data_source = None):
         super(YTSelectionContainer3D, self).__init__(ds, field_parameters, data_source)
         self._set_center(center)

diff -r afb594fda12ef4490791f4217af06423ba6df6a4 -r 65cae75f2b89ca9748c86d52b397e01658e76a2a yt/data_objects/selection_data_containers.py
--- a/yt/data_objects/selection_data_containers.py
+++ b/yt/data_objects/selection_data_containers.py
@@ -633,7 +633,7 @@
     _type_name = "ellipsoid"
     _con_args = ('center', '_A', '_B', '_C', '_e0', '_tilt')
     def __init__(self, center, A, B, C, e0, tilt, fields=None,
-                 ds=None, field_parameters = None, data_source = None):
+                 ds = None, field_parameters = None, data_source = None):
         YTSelectionContainer3D.__init__(self, center, ds, field_parameters, data_source)
         # make sure the magnitudes of semi-major axes are in order
         if A<B or B<C:
@@ -643,7 +643,7 @@
         self._B = self.ds.quan(B, 'code_length')
         self._C = self.ds.quan(C, 'code_length')
         if self._C < self.index.get_smallest_dx():
-            raise YTSphereTooSmall(ds, self._C, self.index.get_smallest_dx())
+            raise YTSphereTooSmall(self.ds, self._C, self.index.get_smallest_dx())
         self._e0 = e0 = e0 / (e0**2.0).sum()**0.5
         self._tilt = tilt
         


https://bitbucket.org/yt_analysis/yt/commits/b6617604e57a/
Changeset:   b6617604e57a
Branch:      yt
User:        drudd
Date:        2014-10-25 21:39:48+00:00
Summary:     Fixed order of parameters and pep8 cleanup
Affected #:  1 file

diff -r 65cae75f2b89ca9748c86d52b397e01658e76a2a -r b6617604e57a67ef9d0bd803a9f704babe1b4528 yt/data_objects/selection_data_containers.py
--- a/yt/data_objects/selection_data_containers.py
+++ b/yt/data_objects/selection_data_containers.py
@@ -67,7 +67,7 @@
     """
     _type_name = "point"
     _con_args = ('p',)
-    def __init__(self, p, ds = None, field_parameters = None, data_source = None):
+    def __init__(self, p, ds=None, field_parameters=None, data_source=None):
         super(YTPointBase, self).__init__(ds, field_parameters, data_source)
         self.p = p
 
@@ -110,7 +110,8 @@
     _key_fields = ['x','y','z','dx','dy','dz']
     _type_name = "ortho_ray"
     _con_args = ('axis', 'coords')
-    def __init__(self, axis, coords, ds=None, field_parameters=None, data_source = None):
+    def __init__(self, axis, coords, ds=None, 
+                 field_parameters=None, data_source=None):
         super(YTOrthoRayBase, self).__init__(ds, field_parameters, data_source)
         self.axis = axis
         xax = self.ds.coordinates.x_axis[self.axis]
@@ -165,7 +166,8 @@
     _type_name = "ray"
     _con_args = ('start_point', 'end_point')
     _container_fields = ("t", "dts")
-    def __init__(self, start_point, end_point, ds=None, field_parameters=None, data_source = None):
+    def __init__(self, start_point, end_point, ds=None,
+                 field_parameters=None, data_source=None):
         super(YTRayBase, self).__init__(ds, field_parameters, data_source)
         self.start_point = self.ds.arr(start_point,
                             'code_length', dtype='float64')
@@ -229,10 +231,10 @@
     _type_name = "slice"
     _con_args = ('axis', 'coord')
     _container_fields = ("px", "py", "pdx", "pdy")
-
     def __init__(self, axis, coord, center=None, ds=None,
-                 field_parameters = None, data_source=None):
-        YTSelectionContainer2D.__init__(self, axis, ds, field_parameters, data_source)
+                 field_parameters=None, data_source=None):
+        YTSelectionContainer2D.__init__(self, axis, ds,
+                                        field_parameters, data_source)
         self._set_center(center)
         self.coord = coord
 
@@ -323,10 +325,10 @@
     _type_name = "cutting"
     _con_args = ('normal', 'center')
     _container_fields = ("px", "py", "pz", "pdx", "pdy", "pdz")
-
-    def __init__(self, normal, center, north_vector = None, 
-                 ds = None, field_parameters = None, data_source = None):
-        YTSelectionContainer2D.__init__(self, 4, ds, field_parameters, data_source)
+    def __init__(self, normal, center, north_vector=None,
+                 ds=None, field_parameters=None, data_source=None):
+        YTSelectionContainer2D.__init__(self, 4, ds,
+                                        field_parameters, data_source)
         self._set_center(center)
         self.set_field_parameter('center',center)
         # Let's set up our plane equation
@@ -480,7 +482,7 @@
 
     Parameters
     ----------
-    center : array_like 
+    center : array_like
         coordinate to which the normal, radius, and height all reference
     normal : array_like
         the normal vector defining the direction of lengthwise part of the 
@@ -512,8 +514,9 @@
     _type_name = "disk"
     _con_args = ('center', '_norm_vec', 'radius', 'height')
     def __init__(self, center, normal, radius, height, fields=None,
-                 ds=None, field_parameters = None, data_source = None):
-        YTSelectionContainer3D.__init__(self, center, ds, field_parameters, data_source)
+                 ds=None, field_parameters=None, data_source=None):
+        YTSelectionContainer3D.__init__(self, center, ds,
+                                        field_parameters, data_source)
         self._norm_vec = np.array(normal)/np.sqrt(np.dot(normal,normal))
         self.set_field_parameter("normal", self._norm_vec)
         self.set_field_parameter("center", self.center)
@@ -541,9 +544,10 @@
     """
     _type_name = "region"
     _con_args = ('center', 'left_edge', 'right_edge')
-    def __init__(self, center, left_edge, right_edge, fields = None,
-                 ds = None, field_parameters = None, data_source = None):
-        YTSelectionContainer3D.__init__(self, center, ds, field_parameters, data_source)
+    def __init__(self, center, left_edge, right_edge, fields=None,
+                 ds=None, field_parameters=None, data_source=None):
+        YTSelectionContainer3D.__init__(self, center, ds,
+                                        field_parameters, data_source)
         if not isinstance(left_edge, YTArray):
             self.left_edge = self.ds.arr(left_edge, 'code_length')
         else:
@@ -560,8 +564,10 @@
     """
     _type_name = "data_collection"
     _con_args = ("_obj_list",)
-    def __init__(self, center, obj_list, ds = None, field_parameters = None, data_source = None):
-        YTSelectionContainer3D.__init__(self, center, ds, field_parameters, data_source)
+    def __init__(self, obj_list, ds=None, field_parameters=None,
+                 data_source=None, center=None):
+        YTSelectionContainer3D.__init__(self, center, ds,
+                                        field_parameters, data_source)
         self._obj_ids = np.array([o.id - o._id_offset for o in obj_list],
                                 dtype="int64")
         self._obj_list = obj_list
@@ -587,8 +593,10 @@
     """
     _type_name = "sphere"
     _con_args = ('center', 'radius')
-    def __init__(self, center, radius, ds = None, field_parameters = None, data_source = None):
-        super(YTSphereBase, self).__init__(center, ds, field_parameters, data_source)
+    def __init__(self, center, radius, ds=None,
+                 field_parameters=None, data_source=None):
+        super(YTSphereBase, self).__init__(center, ds,
+                                           field_parameters, data_source)
         # Unpack the radius, if necessary
         radius = fix_length(radius, self.ds)
         if radius < self.index.get_smallest_dx():
@@ -633,8 +641,9 @@
     _type_name = "ellipsoid"
     _con_args = ('center', '_A', '_B', '_C', '_e0', '_tilt')
     def __init__(self, center, A, B, C, e0, tilt, fields=None,
-                 ds = None, field_parameters = None, data_source = None):
-        YTSelectionContainer3D.__init__(self, center, ds, field_parameters, data_source)
+                 ds=None, field_parameters=None, data_source=None):
+        YTSelectionContainer3D.__init__(self, center, ds,
+                                        field_parameters, data_source)
         # make sure the magnitudes of semi-major axes are in order
         if A<B or B<C:
             raise YTEllipsoidOrdering(ds, A, B, C)
@@ -646,7 +655,7 @@
             raise YTSphereTooSmall(self.ds, self._C, self.index.get_smallest_dx())
         self._e0 = e0 = e0 / (e0**2.0).sum()**0.5
         self._tilt = tilt
-        
+ 
         # find the t1 angle needed to rotate about z axis to align e0 to x
         t1 = np.arctan(e0[1] / e0[0])
         # rotate e0 by -t1
@@ -702,9 +711,10 @@
     """
     _type_name = "cut_region"
     _con_args = ("base_object", "conditionals")
-    def __init__(self, base_object, conditionals, ds = None,
-                 field_parameters = None, data_source = None):
-        super(YTCutRegionBase, self).__init__(base_object.center, ds, field_parameters, data_source)
+    def __init__(self, base_object, conditionals, ds=None,
+                 field_parameters=None, data_source=None):
+        super(YTCutRegionBase, self).__init__(base_object.center, ds,
+                                              field_parameters, data_source)
         self.conditionals = ensure_list(conditionals)
         self.base_object = base_object
         self._selector = None
@@ -780,4 +790,3 @@
     @property
     def fwidth(self):
         return self.base_object.fwidth[self._cond_ind,:]
-


https://bitbucket.org/yt_analysis/yt/commits/23318b2b9fda/
Changeset:   23318b2b9fda
Branch:      yt
User:        drudd
Date:        2014-10-27 19:04:19+00:00
Summary:     Fix test_data_collection test
Affected #:  1 file

diff -r b6617604e57a67ef9d0bd803a9f704babe1b4528 -r 23318b2b9fda54f5d9fd314529c4a49a8fc3bdb4 yt/data_objects/tests/test_data_collection.py
--- a/yt/data_objects/tests/test_data_collection.py
+++ b/yt/data_objects/tests/test_data_collection.py
@@ -8,7 +8,7 @@
     # We decompose in different ways
     for nprocs in [1, 2, 4, 8]:
         ds = fake_random_ds(16, nprocs = nprocs)
-        coll = ds.data_collection(ds.domain_center, ds.index.grids)
+        coll = ds.data_collection(ds.index.grids)
         crho = coll["density"].sum(dtype="float64").to_ndarray()
         grho = np.sum([g["density"].sum(dtype="float64") for g in ds.index.grids],
                       dtype="float64")


https://bitbucket.org/yt_analysis/yt/commits/18c5f92a3bab/
Changeset:   18c5f92a3bab
Branch:      yt
User:        drudd
Date:        2014-10-27 19:15:19+00:00
Summary:     Add tests for compose selector
Affected #:  1 file

diff -r 23318b2b9fda54f5d9fd314529c4a49a8fc3bdb4 -r 18c5f92a3babc86b0e3015e2d0889ff46136bba5 yt/data_objects/tests/test_compose.py
--- /dev/null
+++ b/yt/data_objects/tests/test_compose.py
@@ -0,0 +1,162 @@
+from yt.testing import *
+from yt.fields.local_fields import add_field
+from yt.units.yt_array import YTArray, uintersect1d
+
+def setup_foo():
+    from yt.config import ytcfg
+    ytcfg["yt","__withintesting"] = "True"
+    def _ID(field, data):
+        width = data.ds.domain_right_edge - data.ds.domain_left_edge
+        min_dx = YTArray(1.0/8192, input_units='code_length',
+                         registry=data.ds.unit_registry)
+        delta = width / min_dx
+        x = data['x'] - min_dx / 2.
+        y = data['y'] - min_dx / 2.
+        z = data['z'] - min_dx / 2.
+        xi = x / min_dx
+        yi = y / min_dx
+        zi = z / min_dx
+        index = xi + delta[0] * (yi + delta[1] * zi)
+        index = index.astype('int64')
+        return index
+
+    add_field("ID", function=_ID, units=None)
+
+def test_compose_no_overlap():
+    r"""Test to make sure that composed data objects that don't
+    overlap behave the way we expect (return empty collections)
+    """
+
+    def _ID(field, data):
+        width = data.ds.domain_right_edge - data.ds.domain_left_edge
+        min_dx = YTArray(1.0/8192, input_units='code_length',
+                         registry=data.ds.unit_registry)
+        delta = width / min_dx
+        x = data['x'] - min_dx / 2.
+        y = data['y'] - min_dx / 2.
+        z = data['z'] - min_dx / 2.
+        xi = x / min_dx
+        yi = y / min_dx
+        zi = z / min_dx
+        index = xi + delta[0] * (yi + delta[1] * zi)
+        index = index.astype('int64')
+        return index
+
+    empty = np.array([])
+    for n in [1, 2, 4, 8]:
+        ds = fake_random_ds(64, nprocs=n)
+        ds.add_field("ID", function=_ID)
+        ds.index
+
+        # position parameters for initial region
+        center = [0.25]*3
+        left_edge = [0.1]*3
+        right_edge = [0.4]*3
+        normal = [1, 0, 0]
+        radius = height = 0.15
+
+        # initial 3D regions
+        sources = [ds.sphere(center, radius),
+                   ds.region(center, left_edge, right_edge),
+                   ds.disk(center, normal, radius, height)]
+
+        # position parameters for non-overlapping regions
+        center = [0.75]*3
+        left_edge = [0.6]*3
+        right_edge = [0.9]*3
+
+        # subselect non-overlapping 0, 1, 2, 3D regions
+        for data1 in sources:
+            data2 = ds.sphere(center, radius, data_source=data1)
+            yield assert_array_equal, data2['ID'], empty
+
+            data2 = ds.region(center, left_edge, right_edge, data_source=data1)
+            yield assert_array_equal, data2['ID'], empty  
+
+            data2 = ds.disk(center, normal, radius, height, data_source=data1)
+            yield assert_array_equal, data2['ID'], empty
+
+            for d in range(3):
+                data2 = ds.slice(d, center[d], data_source=data1)
+                yield assert_array_equal, data2['ID'], empty
+
+            for d in range(3):
+                data2 = ds.ortho_ray(d, center[0:d] + center[d+1:], data_source=data1)
+                yield assert_array_equal, data2['ID'], empty
+
+            data2 = ds.point(center, data_source=data1)
+            yield assert_array_equal, data2['ID'], empty
+
+def test_compose_overlap():
+    r"""Test to make sure that composed data objects that do
+    overlap behave the way we expect 
+    """
+    empty = np.array([])
+    for n in [1, 2, 4, 8]:
+        ds = fake_random_ds(64, nprocs=n)
+        ds.index
+
+        # position parameters for initial region
+        center = [0.4, 0.5, 0.5]
+        left_edge = [0.1]*3
+        right_edge = [0.7]*3
+        normal = [1, 0, 0]
+        radius = height = 0.15
+
+        # initial 3D regions
+        sources = [ds.sphere(center, radius),
+                   ds.region(center, left_edge, right_edge),
+                   ds.disk(center, normal, radius, height)]
+
+        # position parameters for overlapping regions
+        center = [0.6, 0.5, 0.5]
+        left_edge = [0.3]*3
+        right_edge = [0.9]*3
+
+        # subselect non-overlapping 0, 1, 2, 3D regions
+        for data1 in sources:
+            id1 = data1['ID']
+
+            data2 = ds.sphere(center, radius)
+            data3 = ds.sphere(center, radius, data_source=data1)
+            id2 = data2['ID']
+            id3 = data3['ID']
+            id3.sort()
+            yield assert_array_equal, uintersect1d(id1, id2), id3
+
+            data2 = ds.region(center, left_edge, right_edge)
+            data3 = ds.region(center, left_edge, right_edge, data_source=data1)
+            id2 = data2['ID']
+            id3 = data3['ID']
+            id3.sort()
+            yield assert_array_equal, uintersect1d(id1, id2), id3
+
+            data2 = ds.disk(center, normal, radius, height)
+            data3 = ds.disk(center, normal, radius, height, data_source=data1)
+            id2 = data2['ID']
+            id3 = data3['ID']
+            id3.sort()
+            yield assert_array_equal, uintersect1d(id1, id2), id3
+
+            for d in range(3):
+                data2 = ds.slice(d, center[d])
+                data3 = ds.slice(d, center[d], data_source=data1)
+                id2 = data2['ID']
+                id3 = data3['ID']
+                id3.sort()
+                yield assert_array_equal, uintersect1d(id1, id2), id3
+
+            for d in range(3):
+                data2 = ds.ortho_ray(d, center[0:d] + center[d+1:])
+                data3 = ds.ortho_ray(d, center[0:d] + center[d+1:], data_source=data1)
+                id2 = data2['ID']
+                id3 = data3['ID']
+                id3.sort()
+                yield assert_array_equal, uintersect1d(id1, id2), id3
+
+            data2 = ds.point(center)
+            data3 = ds.point(center, data_source=data1)
+            id2 = data2['ID']
+            id3 = data3['ID']
+            id3.sort()
+            yield assert_array_equal, uintersect1d(id1, id2), id3


https://bitbucket.org/yt_analysis/yt/commits/0f47064b69d2/
Changeset:   0f47064b69d2
Branch:      yt
User:        drudd
Date:        2014-10-27 19:18:18+00:00
Summary:     Merged with upstream, including PR #1287
Affected #:  81 files

diff -r 18c5f92a3babc86b0e3015e2d0889ff46136bba5 -r 0f47064b69d24e80aad7fc463ac97c5a82affc61 doc/source/analyzing/analysis_modules/halo_transition.rst
--- a/doc/source/analyzing/analysis_modules/halo_transition.rst
+++ b/doc/source/analyzing/analysis_modules/halo_transition.rst
@@ -52,7 +52,7 @@
    data_ds = yt.load('Enzo_64/RD0006/RedshiftOutput0006')
    hc = HaloCatalog(data_ds=data_ds, finder_method='hop')
    hc.create()
-   ad = hc.all_data()
+   ad = hc.halos_ds.all_data()
    masses = ad['particle_mass'][:]
 
 

diff -r 18c5f92a3babc86b0e3015e2d0889ff46136bba5 -r 0f47064b69d24e80aad7fc463ac97c5a82affc61 doc/source/analyzing/units/3)_Comoving_units_and_code_units.ipynb
--- a/doc/source/analyzing/units/3)_Comoving_units_and_code_units.ipynb
+++ b/doc/source/analyzing/units/3)_Comoving_units_and_code_units.ipynb
@@ -1,7 +1,7 @@
 {
  "metadata": {
   "name": "",
-  "signature": "sha256:8ba193cc3867e2185133bbf3952bd5834e6c63993208635c71cf55fa6f27b491"
+  "signature": "sha256:67eb4b2a3d1017bac09209ebc939e8c1fe154660fa15f76862019dfc8652ec32"
  },
  "nbformat": 3,
  "nbformat_minor": 0,
@@ -305,9 +305,95 @@
      "language": "python",
      "metadata": {},
      "outputs": []
+    },
+    {
+     "cell_type": "heading",
+     "level": 3,
+     "metadata": {},
+     "source": [
+      "Overriding Code Unit Definitions"
+     ]
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "On occasion, you might have a dataset for a supported frontend that does not have the conversions to code units accessible (for example, Athena data) or you may want to change them outright. `yt` provides a mechanism so that one may provide their own code unit definitions to `load`, which override the default rules for a given frontend for defining code units. This is provided through the `units_override` dictionary. We'll use an example of an Athena dataset. First, a call to `load` without `units_override`:"
+     ]
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "ds1 = yt.load(\"MHDSloshing/virgo_low_res.0054.vtk\")\n",
+      "print ds1.length_unit\n",
+      "print ds1.mass_unit\n",
+      "print ds1.time_unit\n",
+      "sp1 = ds1.sphere(\"c\",(0.1,\"unitary\"))\n",
+      "print sp1[\"density\"]"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "This is a galaxy cluster dataset, so it is not likely that the units of density are correct. We happen to know that the unit definitions are different, so we can override the units:"
+     ]
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "units_override = {\"length_unit\":(1.0,\"Mpc\"),\n",
+      "                  \"time_unit\":(1.0,\"Myr\"),\n",
+      "                  \"mass_unit\":(1.0e14,\"Msun\")}"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "`units_override` can take the following keys:\n",
+      "\n",
+      "* `length_unit`\n",
+      "* `time_unit`\n",
+      "* `mass_unit`\n",
+      "* `magnetic_unit`\n",
+      "* `temperature_unit`\n",
+      "\n",
+      "and the associated values can be (value, unit) tuples, `YTQuantities`, or floats (in the latter case they are assumed to have the corresponding cgs unit). "
+     ]
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "ds2 = yt.load(\"MHDSloshing/virgo_low_res.0054.vtk\", units_override=units_override)\n",
+      "print ds2.length_unit\n",
+      "print ds2.mass_unit\n",
+      "print ds2.time_unit\n",
+      "sp2 = ds2.sphere(\"c\",(0.1,\"unitary\"))\n",
+      "print sp2[\"density\"]"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "This option should be used very carefully, and *only* if you know that the dataset does not provide units or that the unit definitions generated are incorrect for some reason. "
+     ]
     }
    ],
    "metadata": {}
   }
  ]
-}
+}
\ No newline at end of file

diff -r 18c5f92a3babc86b0e3015e2d0889ff46136bba5 -r 0f47064b69d24e80aad7fc463ac97c5a82affc61 doc/source/cookbook/gadget_notebook.rst
--- /dev/null
+++ b/doc/source/cookbook/gadget_notebook.rst
@@ -0,0 +1,7 @@
+.. _gadget-notebook:
+
+Using yt to view and analyze Gadget outputs
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. notebook:: yt_gadget_analysis.ipynb
+

diff -r 18c5f92a3babc86b0e3015e2d0889ff46136bba5 -r 0f47064b69d24e80aad7fc463ac97c5a82affc61 doc/source/cookbook/tipsy_and_yt.ipynb
--- a/doc/source/cookbook/tipsy_and_yt.ipynb
+++ b/doc/source/cookbook/tipsy_and_yt.ipynb
@@ -1,7 +1,16 @@
 {
  "metadata": {
+  "kernelspec": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 2
+   },
+   "display_name": "IPython (Python 2)",
+   "language": "python",
+   "name": "python2"
+  },
   "name": "",
-  "signature": "sha256:2ae8b1599fa35495fa1bb8deb1c67094e3529e70093b30e20354122cd9403d9d"
+  "signature": "sha256:1f6e5cf50123ad75676f035a2a36cd60f4987832462907b9cb78cb25548d8afd"
  },
  "nbformat": 3,
  "nbformat_minor": 0,
@@ -10,14 +19,6 @@
    "cells": [
     {
      "cell_type": "heading",
-     "level": 1,
-     "metadata": {},
-     "source": [
-      "Using yt to view and analyze Tipsy outputs from Gasoline"
-     ]
-    },
-    {
-     "cell_type": "heading",
      "level": 2,
      "metadata": {},
      "source": [
@@ -193,4 +194,4 @@
    "metadata": {}
   }
  ]
-}
+}
\ No newline at end of file

diff -r 18c5f92a3babc86b0e3015e2d0889ff46136bba5 -r 0f47064b69d24e80aad7fc463ac97c5a82affc61 doc/source/cookbook/yt_gadget_analysis.ipynb
--- /dev/null
+++ b/doc/source/cookbook/yt_gadget_analysis.ipynb
@@ -0,0 +1,263 @@
+{
+ "metadata": {
+  "kernelspec": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 2
+   },
+   "display_name": "IPython (Python 2)",
+   "language": "python",
+   "name": "python2"
+  },
+  "name": "",
+  "signature": "sha256:42e2b7cc4c70a501432f24bc0d62d0723605d50196399148dd365d28387dd55d"
+ },
+ "nbformat": 3,
+ "nbformat_minor": 0,
+ "worksheets": [
+  {
+   "cells": [
+    {
+     "cell_type": "heading",
+     "level": 2,
+     "metadata": {},
+     "source": [
+      "Loading the data"
+     ]
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "First we set up our imports:"
+     ]
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "import yt\n",
+      "import numpy as np\n",
+      "import yt.units as units\n",
+      "import pylab"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "First we load the data set, specifying both the unit length/mass/velocity, as well as the size of the bounding box (which should encapsulate all the particles in the data set)\n",
+      "\n",
+      "At the end, we flatten the data into \"ad\" in case we want access to the raw simulation data"
+     ]
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      ">This dataset is available for download at http://yt-project.org/data/GadgetDiskGalaxy.tar.gz (430 MB)."
+     ]
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "fname = 'GadgetDiskGalaxy/snapshot_200.hdf5'\n",
+      "\n",
+      "unit_base = {'UnitLength_in_cm'         : 3.08568e+21,\n",
+      "             'UnitMass_in_g'            :   1.989e+43,\n",
+      "             'UnitVelocity_in_cm_per_s' :      100000}\n",
+      "\n",
+      "bbox_lim = 1e5 #kpc\n",
+      "\n",
+      "bbox = [[-bbox_lim,bbox_lim],\n",
+      "        [-bbox_lim,bbox_lim],\n",
+      "        [-bbox_lim,bbox_lim]]\n",
+      " \n",
+      "ds = yt.load(fname,unit_base=unit_base,bounding_box=bbox)\n",
+      "ds.index\n",
+      "ad= ds.all_data()"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "Let's make a projection plot to look at the entire volume"
+     ]
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "px = yt.ProjectionPlot(ds, 'x', ('gas', 'density'))\n",
+      "px.show()"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "Let's print some quantities about the domain, as well as the physical properties of the simulation\n"
+     ]
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "print 'left edge: ',ds.domain_left_edge\n",
+      "print 'right edge: ',ds.domain_right_edge\n",
+      "print 'center: ',ds.domain_center"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "We can also see the fields that are available to query in the dataset"
+     ]
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "sorted(ds.field_list)"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "Let's create a data object that represents the full simulation domain, and find the total mass in gas and dark matter particles contained in it:"
+     ]
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "ad = ds.all_data()\n",
+      "\n",
+      "# total_mass returns a list, representing the total gas and dark matter + stellar mass, respectively\n",
+      "print [tm.in_units('Msun') for tm in ad.quantities.total_mass()]"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "Now let's say we want to zoom in on the box (since clearly the bounding we chose initially is much larger than the volume containing the gas particles!), and center on wherever the highest gas density peak is.  First, let's find this peak:"
+     ]
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "density = ad[(\"PartType0\",\"density\")]\n",
+      "wdens = np.where(density == np.max(density))\n",
+      "coordinates = ad[(\"PartType0\",\"Coordinates\")]\n",
+      "center = coordinates[wdens][0]\n",
+      "print 'center = ',center"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "Set up the box to zoom into"
+     ]
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "new_box_size = ds.quan(250,'code_length')\n",
+      "\n",
+      "left_edge = center - new_box_size/2\n",
+      "right_edge = center + new_box_size/2\n",
+      "\n",
+      "print new_box_size.in_units('Mpc')\n",
+      "print left_edge.in_units('Mpc')\n",
+      "print right_edge.in_units('Mpc')"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "ad2= ds.region(center=center, left_edge=left_edge, right_edge=right_edge)"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "Using this new data object, let's confirm that we're only looking at a subset of the domain by first calculating thte total mass in gas and particles contained in the subvolume:"
+     ]
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "print [tm.in_units('Msun') for tm in ad.quantities.total_mass()]"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "And then by visualizing what the new zoomed region looks like"
+     ]
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "px = yt.ProjectionPlot(ds, 'x', ('gas', 'density'), center=center, width=new_box_size)\n",
+      "px.show()"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "Cool - there's a disk galaxy there!"
+     ]
+    }
+   ],
+   "metadata": {}
+  }
+ ]
+}
\ No newline at end of file

diff -r 18c5f92a3babc86b0e3015e2d0889ff46136bba5 -r 0f47064b69d24e80aad7fc463ac97c5a82affc61 doc/source/developing/creating_frontend.rst
--- a/doc/source/developing/creating_frontend.rst
+++ b/doc/source/developing/creating_frontend.rst
@@ -7,14 +7,14 @@
             have a question about making a custom derived quantity, please
             contact the mailing list.
 
-yt is designed to support analysis and visualization of data from multiple
-different simulation codes, although it has so far been most successfully
-applied to Adaptive Mesh Refinement (AMR) data. For a list of codes and the
-level of support they enjoy, see :ref:`code-support`.
+yt is designed to support analysis and visualization of data from
+multiple different simulation codes. For a list of codes and the level
+of support they enjoy, see :ref:`code-support`.
 
-We'd like to support a broad range of codes, both AMR-based and otherwise. To
-add support for a new code, a few things need to be put into place. These
-necessary structures can be classified into a couple categories:
+We'd like to support a broad range of codes, both Adaptive Mesh
+Refinement (AMR)-based and otherwise. To add support for a new code, a
+few things need to be put into place. These necessary structures can
+be classified into a couple categories:
 
  * Data meaning: This is the set of parameters that convert the data into
    physically relevant units; things like spatial and mass conversions, time
@@ -33,73 +33,147 @@
 If you are interested in adding a new code, be sure to drop us a line on
 `yt-dev <http://lists.spacepope.org/listinfo.cgi/yt-dev-spacepope.org>`_!
 
-To get started, make a new directory in ``yt/frontends`` with the name of your
-code -- you can start by copying into it the contents of the ``stream``
-directory, which is a pretty empty format. You'll then have to create a subclass
-of ``Dataset``. This subclass will need to handle conversion between the
-different physical units and the code units; for the most part, the examples of
-``OrionDataset`` and ``EnzoDataset`` should be followed, but
-``ChomboDataset``, as a slightly newer addition, can also be used as an
-instructive example -- be sure to add an ``_is_valid`` classmethod that will
-verify if a filename is valid for that output type, as that is how "load" works.
+To get started, make a new directory in ``yt/frontends`` with the name
+of your code.  Copying the contents of the ``yt/frontends/_skeleton``
+directory will add a lot of boilerplate for the required classes and
+methods that are needed.  In particular, you'll have to create a
+subclass of ``Dataset`` in the data_structures.py file. This subclass
+will need to handle conversion between the different physical units
+and the code units (typically in the ``_set_code_unit_attributes()``
+method), read in metadata describing the overall data on disk (via the
+``_parse_parameter_file()`` method), and provide a ``classmethod``
+called ``_is_valid()`` that lets the ``yt.load`` method help identify an
+input file as belonging to *this* particular ``Dataset`` subclass.
+For the most part, the examples of
+``yt.frontends.boxlib.data_structures.OrionDataset`` and
+``yt.frontends.enzo.data_structures.EnzoDataset`` should be followed,
+but ``yt.frontends.chombo.data_structures.ChomboDataset``, as a
+slightly newer addition, can also be used as an instructive example.
 
-A new set of fields must be added in the file ``fields.py`` in that directory.
-For the most part this means subclassing ``CodeFieldInfoContainer`` and adding
-the necessary fields specific to that code. Here is the Chombo field container:
+A new set of fields must be added in the file ``fields.py`` in your
+new directory.  For the most part this means subclassing 
+``FieldInfoContainer`` and adding the necessary fields specific to
+your code. Here is a snippet from the base BoxLib field container:
 
 .. code-block:: python
 
-    from UniversalFields import *
-    class ChomboFieldContainer(CodeFieldInfoContainer):
-        _shared_state = {}
-        _field_list = {}
-    ChomboFieldInfo = ChomboFieldContainer()
-    add_chombo_field = ChomboFieldInfo.add_field
+    from yt.fields.field_info_container import FieldInfoContainer
+    class BoxlibFieldInfo(FieldInfoContainer):
+        known_other_fields = (
+            ("density", (rho_units, ["density"], None)),
+	    ("eden", (eden_units, ["energy_density"], None)),
+	    ("xmom", (mom_units, ["momentum_x"], None)),
+	    ("ymom", (mom_units, ["momentum_y"], None)),
+	    ("zmom", (mom_units, ["momentum_z"], None)),
+	    ("temperature", ("K", ["temperature"], None)),
+	    ("Temp", ("K", ["temperature"], None)),
+	    ("x_velocity", ("cm/s", ["velocity_x"], None)),
+	    ("y_velocity", ("cm/s", ["velocity_y"], None)),
+	    ("z_velocity", ("cm/s", ["velocity_z"], None)),
+	    ("xvel", ("cm/s", ["velocity_x"], None)),
+	    ("yvel", ("cm/s", ["velocity_y"], None)),
+	    ("zvel", ("cm/s", ["velocity_z"], None)),
+	)
 
-The field container is a shared state object, which is why we explicitly set
-``_shared_state`` equal to a mutable.
+	known_particle_fields = (
+	    ("particle_mass", ("code_mass", [], None)),
+	    ("particle_position_x", ("code_length", [], None)),
+	    ("particle_position_y", ("code_length", [], None)),
+	    ("particle_position_z", ("code_length", [], None)),
+	    ("particle_momentum_x", (mom_units, [], None)),
+	    ("particle_momentum_y", (mom_units, [], None)),
+	    ("particle_momentum_z", (mom_units, [], None)),
+	    ("particle_angmomen_x", ("code_length**2/code_time", [], None)),
+	    ("particle_angmomen_y", ("code_length**2/code_time", [], None)),
+	    ("particle_angmomen_z", ("code_length**2/code_time", [], None)),
+	    ("particle_id", ("", ["particle_index"], None)),
+	    ("particle_mdot", ("code_mass/code_time", [], None)),
+	)
+
+The tuples, ``known_other_fields`` and ``known_particle_fields``
+contain entries, which are tuples of the form ``("name", ("units",
+["fields", "to", "alias"], "display_name"))``.  ``"name"`` is the name
+of a field stored on-disk in the dataset. ``"units"`` corresponds to
+the units of that field.  The list ``["fields", "to", "alias"]``
+allows you to specify additional aliases to this particular field; for
+example, if your on-disk field for the x-direction velocity were
+``"x-direction-velocity"``, maybe you'd prefer to alias to the more
+terse name of ``"xvel"``.  ``"display_name"`` is an optional parameter
+that can be used to specify how you want the field to be displayed on
+a plot; this can be LaTeX code, for example the density field could
+have a display name of ``r"\rho"``.  Omitting the ``"display_name"``
+will result in using a capitalized version of the ``"name"``.
 
 Data Localization Structures
 ----------------------------
 
-As of right now, the "grid patch" mechanism is going to remain in yt, however in
-the future that may change. As such, some other output formats -- like Gadget --
-may be shoe-horned in, slightly.
+These functions and classes let yt know about how the arrangement of
+data on disk corresponds to the physical arrangement of data within
+the simulation.  yt has grid datastructures for handling both
+patch-based and octree-based AMR codes.  The terms 'patch-based'
+and 'octree-based' are used somewhat loosely here.  For example,
+traditionally, the FLASH code used the paramesh AMR library, which is
+based on a tree structure, but the FLASH frontend in yt utilizes yt's
+patch-based datastructures.  It is up to the frontend developer to
+determine which yt datastructures best match the datastructures of
+their simulation code.
 
-Hierarchy
-^^^^^^^^^
+Both approaches -- patch-based and octree-based -- have a concept of a
+*Hierarchy* or *Index* (used somewhat interchangeably in the code) of
+datastructures and something that describes the elements that make up
+the Hierarchy or Index.  For patch-based codes, the Index is a
+collection of ``AMRGridPatch`` objects that describe a block of zones.
+For octree-based codes, the Index contains datastructures that hold
+information about the individual octs, namely an ``OctreeContainer``.
 
-To set up data localization, an ``AMRHierarchy`` subclass must be added in the
-file ``data_structures.py``. The index object must override the following
-methods:
+Hierarchy or Index
+^^^^^^^^^^^^^^^^^^
 
- * ``_detect_fields``: ``self.field_list`` must be populated as a list of
-   strings corresponding to "native" fields in the data files.
- * ``_setup_classes``: it's probably safe to crib this from one of the other
-   ``AMRHierarchy`` subclasses.
- * ``_count_grids``: this must set self.num_grids to be the total number of
-   grids in the simulation.
- * ``_parse_index``: this must fill in ``grid_left_edge``,
+To set up data localization, a ``GridIndex`` subclass for patch-based
+codes or an ``OctreeIndex`` subclass for octree-based codes must be
+added in the file ``data_structures.py``. Examples of these different
+types of ``Index`` can be found in, for example, the
+``yt.frontends.chombo.data_structures.ChomboHierarchy`` for patch-based
+codes and ``yt.frontends.ramses.data_structures.RAMSESIndex`` for
+octree-based codes.  
+
+For the most part, the ``GridIndex`` subclass must override (at a
+minimum) the following methods:
+
+ * ``_detect_output_fields()``: ``self.field_list`` must be populated as a list
+   of strings corresponding to "native" fields in the data files.
+ * ``_count_grids()``: this must set ``self.num_grids`` to be the total number
+   of grids (equivalently ``AMRGridPatch``'es) in the simulation.
+ * ``_parse_index()``: this must fill in ``grid_left_edge``,
    ``grid_right_edge``, ``grid_particle_count``, ``grid_dimensions`` and
-   ``grid_levels`` with the appropriate information. Additionally, ``grids``
-   must be an array of grid objects that already know their IDs.
- * ``_populate_grid_objects``: this initializes the grids by calling
-   ``_prepare_grid`` and ``_setup_dx`` on all of them.  Additionally, it should
-   set up ``Children`` and ``Parent`` lists on each grid object.
- * ``_setup_unknown_fields``: If a field is in the data file that yt doesn't
-   already know, this is where you make a guess at it.
- * ``_setup_derived_fields``: ``self.derived_field_list`` needs to be made a
-   list of strings that correspond to all derived fields valid for this
-   index.
+   ``grid_levels`` with the appropriate information.  Each of these variables 
+   is an array, with an entry for each of the ``self.num_grids`` grids.  
+   Additionally, ``grids``  must be an array of ``AMRGridPatch`` objects that 
+   already know their IDs.
+ * ``_populate_grid_objects()``: this initializes the grids by calling
+   ``_prepare_grid()`` and ``_setup_dx()`` on all of them.  Additionally, it 
+   should set up ``Children`` and ``Parent`` lists on each grid object.
 
-For the most part, the ``ChomboHierarchy`` should be the first place to look for
-hints on how to do this; ``EnzoHierarchy`` is also instructive.
+The ``OctreeIndex`` has somewhat analogous methods, but often with
+different names; both ``OctreeIndex`` and ``GridIndex`` are subclasses
+of the ``Index`` class.  In particular, for the ``OctreeIndex``, the
+method ``_initialize_oct_handler()`` setups up much of the oct
+metadata that is analogous to the grid metadata created in the
+``GridIndex`` methods ``_count_grids()``, ``_parse_index()``, and
+``_populate_grid_objects()``.
 
 Grids
 ^^^^^
 
-A new grid object, subclassing ``AMRGridPatch``, will also have to be added.
-This should go in ``data_structures.py``. For the most part, this may be all
+.. note:: This section only applies to the approach using yt's patch-based
+	  datastructures.  For the octree-based approach, one does not create
+	  a grid object, but rather an ``OctreeSubset``, which has methods
+	  for filling out portions of the octree structure.  Again, see the
+	  code in ``yt.frontends.ramses.data_structures`` for an example of
+	  the octree approach.
+
+A new grid object, subclassing ``AMRGridPatch``, will also have to be added in
+``data_structures.py``. For the most part, this may be all
 that is needed:
 
 .. code-block:: python
@@ -115,32 +189,46 @@
             self.Level = level
 
 
-Even the most complex grid object, ``OrionGrid``, is still relatively simple.
+Even one of the more complex grid objects,
+``yt.frontends.boxlib.BoxlibGrid``, is still relatively simple.
 
 Data Reading Functions
 ----------------------
 
-In ``io.py``, there are a number of IO handlers that handle the mechanisms by
-which data is read off disk.  To implement a new data reader, you must subclass
-``BaseIOHandler`` and override the following methods:
+In ``io.py``, there are a number of IO handlers that handle the
+mechanisms by which data is read off disk.  To implement a new data
+reader, you must subclass ``BaseIOHandler``.  The various frontend IO
+handlers are stored in an IO registry - essentially a dictionary that
+uses the name of the frontend as a key, and the specific IO handler as
+a value.  It is important, therefore, to set the ``dataset_type``
+attribute of your subclass, which is what is used as the key in the IO
+registry.  For example:
 
- * ``_read_field_names``: this routine accepts a grid object and must return all
-   the fields in the data file affiliated with that grid. It is used at the
-   initialization of the ``AMRHierarchy`` but likely not later.
- * ``modify``: This accepts a field from a data file and returns it ready to be
-   used by yt. This is used in Enzo data for preloading.
- * ``_read_data_set``: This accepts a grid object and a field name and must
-   return that field, ready to be used by yt as a NumPy array. Note that this
-   presupposes that any actions done in ``modify`` (above) have been executed.
- * ``_read_data_slice``: This accepts a grid object, a field name, an axis and
-   an (integer) coordinate, and it must return a slice through the array at that
-   value.
- * ``preload``: (optional) This accepts a list of grids and a list of datasets
-   and it populates ``self.queue`` (a dict keyed by grid id) with dicts of
-   datasets.
- * ``_read_exception``: (property) This is a tuple of exceptions that can be
-   raised by the data reading to indicate a field does not exist in the file.
+.. code-block:: python
 
+    class IOHandlerBoxlib(BaseIOHandler):
+        _dataset_type = "boxlib_native"
+	...
+
+At a minimum, one should also override the following methods
+
+* ``_read_fluid_selection()``: this receives a collection of data "chunks", a 
+  selector describing which "chunks" you are concerned with, a list of fields,
+  and the size of the data to read.  It should create and return a dictionary 
+  whose keys are the fields, and whose values are numpy arrays containing the 
+  data.  The data should actually be read via the ``_read_chunk_data()`` 
+  method.
+* ``_read_chunk_data()``: this method receives a "chunk" of data along with a 
+  list of fields we want to read.  It loops over all the grid objects within 
+  the "chunk" of data and reads from disk the specific fields, returning a 
+  dictionary whose keys are the fields and whose values are numpy arrays of
+  the data.
+
+If your dataset has particle information, you'll want to override the
+``_read_particle_coords()`` and ``read_particle_fields()`` methods as
+well.  Each code is going to read data from disk in a different
+fashion, but the ``yt.frontends.boxlib.io.IOHandlerBoxlib`` is a
+decent place to start.
 
 And that just about covers it. Please feel free to email
 `yt-users <http://lists.spacepope.org/listinfo.cgi/yt-users-spacepope.org>`_ or

diff -r 18c5f92a3babc86b0e3015e2d0889ff46136bba5 -r 0f47064b69d24e80aad7fc463ac97c5a82affc61 doc/source/examining/loading_data.rst
--- a/doc/source/examining/loading_data.rst
+++ b/doc/source/examining/loading_data.rst
@@ -113,29 +113,56 @@
 
 yt works in cgs ("Gaussian") units by default, but Athena data is not
 normally stored in these units. If you would like to convert data to
-cgs units, you may supply conversions for length, time, and mass to ``load``:
+cgs units, you may supply conversions for length, time, and mass to ``load`` using
+the ``units_override`` functionality:
 
 .. code-block:: python
 
    import yt
-   ds = yt.load("id0/cluster_merger.0250.vtk",
-                parameters={"length_unit":(1.0,"Mpc"),
-                            "time_unit"(1.0,"Myr"),
-                            "mass_unit":(1.0e14,"Msun")})
+
+   units_override = {"length_unit":(1.0,"Mpc"),
+                     "time_unit"(1.0,"Myr"),
+                     "mass_unit":(1.0e14,"Msun")}
+
+   ds = yt.load("id0/cluster_merger.0250.vtk", units_override=units_override)
 
 This means that the yt fields, e.g. ``("gas","density")``, ``("gas","x-velocity")``,
 ``("gas","magnetic_field_x")``, will be in cgs units, but the Athena fields, e.g.,
 ``("athena","density")``, ``("athena","velocity_x")``, ``("athena","cell_centered_B_x")``, will be
 in code units.
 
+Alternative values for the following simulation parameters may be specified using a ``parameters``
+dict, accepting the following keys:
+
+* ``Gamma``: ratio of specific heats, Type: Float
+* ``geometry``: Geometry type, currently accepts ``"cartesian"`` or ``"cylindrical"``
+* ``periodicity``: Is the domain periodic? Type: Tuple of boolean values corresponding to each dimension
+
+.. code-block:: python
+
+   import yt
+
+   parameters = {"gamma":4./3., "geometry":"cylindrical", "periodicity":(False,False,False)}
+
+   ds = yt.load("relativistic_jet_0000.vtk", parameters=parameters)
+
 .. rubric:: Caveats
 
 * yt primarily works with primitive variables. If the Athena
   dataset contains conservative variables, the yt primitive fields will be generated from the
   conserved variables on disk.
+* Special relativistic datasets may be loaded, but are not fully supported. In particular, the relationships between
+  quantities such as pressure and thermal energy will be incorrect, as it is currently assumed that their relationship
+  is that of an ideal a :math:`\gamma`-law equation of state.
 * Domains may be visualized assuming periodicity.
 * Particle list data is currently unsupported.
 
+.. note::
+
+   The old behavior of supplying unit conversions using a ``parameters``
+   dict supplied to ``load`` for Athena datasets is still supported, but is being deprecated in
+   favor of ``units_override``, which provides the same functionality.
+
 .. _loading-orion-data:
 
 BoxLib Data
@@ -983,6 +1010,8 @@
 onto the grid, you can also effectively mimic what your data would look like at
 lower resolution.
 
+See :ref:`gadget-notebook` for an example.
+
 .. _loading-tipsy-data:
 
 Tipsy Data

diff -r 18c5f92a3babc86b0e3015e2d0889ff46136bba5 -r 0f47064b69d24e80aad7fc463ac97c5a82affc61 doc/source/reference/configuration.rst
--- a/doc/source/reference/configuration.rst
+++ b/doc/source/reference/configuration.rst
@@ -40,14 +40,15 @@
 
 .. code-block:: python
 
-   from yt.config import ytcfg
-   ytcfg["yt", "loglevel"] = "1"
+   import yt
+   yt.funcs.mylog.setLevel(1)
 
-   from yt.mods import *
-   ds = load("my_data0001")
+   ds = yt.load("my_data0001")
    ds.print_stats()
 
-This has the same effect as setting ``loglevel = 1`` in the configuration file.
+This has the same effect as setting ``loglevel = 1`` in the configuration
+file. Note that a log level of 1 means that all log messages are printed to
+stdout.  To disable logging, set the log level to 50.
 
 Setting Configuration On the Command Line
 -----------------------------------------
@@ -95,3 +96,5 @@
   quiet.
 * ``stdoutStreamLogging`` (default: ``'False'``): If true, logging is directed
   to stdout rather than stderr
+* ``skip_dataset_cache`` (default: ``'False'``): If true, automatic caching of datasets
+  is turned off.
\ No newline at end of file

diff -r 18c5f92a3babc86b0e3015e2d0889ff46136bba5 -r 0f47064b69d24e80aad7fc463ac97c5a82affc61 doc/source/reference/faq/index.rst
--- a/doc/source/reference/faq/index.rst
+++ b/doc/source/reference/faq/index.rst
@@ -214,26 +214,37 @@
 
 The plugin file is a means of modifying the available fields, quantities, data
 objects and so on without modifying the source code of yt.  The plugin file
-will be executed if it is detected, and it must be:
+will be executed if it is detected.  It must be located in a ``.yt`` folder
+in your home directory and be named ``my_plugins.py``:
 
 .. code-block:: bash
 
    $HOME/.yt/my_plugins.py
 
-The code in this file can thus add fields, add derived quantities, add
+The code in this file can add fields, define functions, define
 datatypes, and on and on.  It is executed at the bottom of ``yt.mods``, and so
-it is provided with the entire namespace available in the module ``yt.mods`` --
-which is the primary entry point to yt, and which contains most of the
-functionality of yt.  For example, if I created a plugin file containing:
+it is provided with the entire namespace available in the module ``yt.mods``.
+For example, if I created a plugin file containing:
 
 .. code-block:: python
 
    def _myfunc(field, data):
        return np.random.random(data["density"].shape)
-   add_field("SomeQuantity", function=_myfunc)
+   add_field("some_quantity", function=_myfunc, units='')
 
-then all of my data objects would have access to the field "SomeQuantity"
-despite its lack of use.
+then all of my data objects would have access to the field "some_quantity".
+Note that the units must be specified as a string, see
+:ref:`data_selection_and_fields` for more details on units and derived fields.
+
+.. note::
+
+   Since the ``my_plugins.py`` is parsed inside of ``yt.mods``, you must import
+   yt using ``yt.mods`` to use the plugins file.  If you import using
+   ``import yt``, the plugins file will not be parsed.  You can tell that your
+   plugins file is being parsed by watching for a logging message when you
+   import yt.  Note that both the ``yt load`` and ``iyt`` command line entry
+   points invoke ``from yt.mods import *``, so the ``my_plugins.py`` file
+   will be parsed if you enter yt that way.
 
 You can also define other convenience functions in your plugin file.  For
 instance, you could define some variables or functions, and even import common

diff -r 18c5f92a3babc86b0e3015e2d0889ff46136bba5 -r 0f47064b69d24e80aad7fc463ac97c5a82affc61 doc/source/visualizing/plots.rst
--- a/doc/source/visualizing/plots.rst
+++ b/doc/source/visualizing/plots.rst
@@ -469,6 +469,21 @@
    slc.set_log('density', False)
    slc.save()
 
+Specifically, a field containing both positive and negative values can be plotted
+with symlog scale, by seting the boolean to be ``True`` and providing an extra
+parameter ``linthresh``. In the region around zero (when the log scale approaches
+to infinity), the linear scale will be applied to the region ``(-linthresh, linthresh)``
+and stretched relative to the logarithmic range. You can also plot a positive field 
+under symlog scale with the linear range of ``(0, linthresh)``.
+
+.. python-script::
+
+   import yt
+   ds = yt.load("IsolatedGalaxy/galaxy0030/galaxy0030")
+   slc = yt.SlicePlot(ds, 'z', 'x-velocity', width=(30,'kpc'))
+   slc.set_log('x-velocity', True, linthresh=1.e1)
+   slc.save()
+
 Lastly, the :meth:`~yt.visualization.plot_window.AxisAlignedSlicePlot.set_zlim`
 function makes it possible to set a custom colormap range.
 
@@ -531,6 +546,26 @@
    slc.set_buff_size(1600)
    slc.save()
 
+Turning off minorticks
+~~~~~~~~~~~~~~~~~~~~~~
+
+By default minorticks for the x and y axes are turned on.
+The minorticks may be removed using the
+:meth:`~yt.visualization.plot_window.AxisAlignedSlicePlot.set_minorticks`
+function, which either accepts a specific field name including the 'all' alias
+and the desired state for the plot as 'on' or 'off'. There is also an analogous
+:meth:`~yt.visualization.plot_window.AxisAlignedSlicePlot.set_cbar_minorticks`
+function for the colorbar axis.
+
+.. python-script::
+
+   import yt
+   ds = yt.load("IsolatedGalaxy/galaxy0030/galaxy0030")
+   slc = yt.SlicePlot(ds, 'z', 'density', width=(10,'kpc'))
+   slc.set_minorticks('all', 'off')
+   slc.set_cbar_minorticks('all', 'off')
+   slc.save()
+
 .. _matplotlib-customization:
 
 Further customization via matplotlib
@@ -743,7 +778,7 @@
 Adjusting the plot units does not require recreating the histogram, so adjusting
 units will always be inexpensive, requiring only an in-place unit conversion.
 
-In the following example we create a a plot of the average density in solar
+In the following example we create a plot of the average density in solar
 masses per cubic parsec as a function of radius in kiloparsecs.
 
 .. python-script::
@@ -892,7 +927,7 @@
 ``fractional`` keyword to ``True``.  When set to ``True``, the value in each bin
 is divided by the sum total from all bins.  These can be turned into cumulative
 distribution functions (CDFs) by setting the ``accumulation`` keyword to
-``True``.  This will make is so that the value in any bin N is the cumulative
+``True``.  This will make it so that the value in any bin N is the cumulative
 sum of all bins from 0 to N.  The direction of the summation can be reversed by
 setting ``accumulation`` to ``-True``.  For ``PhasePlot``, the accumulation can
 be set independently for each axis by setting ``accumulation`` to a list of

diff -r 18c5f92a3babc86b0e3015e2d0889ff46136bba5 -r 0f47064b69d24e80aad7fc463ac97c5a82affc61 yt/__init__.py
--- a/yt/__init__.py
+++ b/yt/__init__.py
@@ -98,7 +98,12 @@
 
 import yt.utilities.physical_constants as physical_constants
 import yt.units as units
-from yt.units.yt_array import YTArray, YTQuantity
+from yt.units.yt_array import \
+    YTArray, \
+    YTQuantity, \
+    uconcatenate, \
+    uintersect1d, \
+    uunion1d
 
 from yt.fields.api import \
     field_plugins, \

diff -r 18c5f92a3babc86b0e3015e2d0889ff46136bba5 -r 0f47064b69d24e80aad7fc463ac97c5a82affc61 yt/analysis_modules/cosmological_observation/light_cone/light_cone.py
--- a/yt/analysis_modules/cosmological_observation/light_cone/light_cone.py
+++ b/yt/analysis_modules/cosmological_observation/light_cone/light_cone.py
@@ -186,13 +186,13 @@
             # Simple error check to make sure more than 100% of box depth
             # is never required.
             if self.light_cone_solution[q]["box_depth_fraction"] > 1.0:
-                mylog.debug(("Warning: box fraction required to go from " +
+                mylog.error(("Warning: box fraction required to go from " +
                              "z = %f to %f is %f") %
                             (self.light_cone_solution[q]["redshift"], z_next,
                              self.light_cone_solution[q]["box_depth_fraction"]))
-                mylog.debug(("Full box delta z is %f, but it is %f to the " +
+                mylog.error(("Full box delta z is %f, but it is %f to the " +
                              "next data dump.") %
-                            (self.light_cone_solution[q]["deltazMax"],
+                            (self.light_cone_solution[q]["dz_max"],
                              self.light_cone_solution[q]["redshift"]-z_next))
 
             # Get projection axis and center.

diff -r 18c5f92a3babc86b0e3015e2d0889ff46136bba5 -r 0f47064b69d24e80aad7fc463ac97c5a82affc61 yt/analysis_modules/cosmological_observation/light_ray/light_ray.py
--- a/yt/analysis_modules/cosmological_observation/light_ray/light_ray.py
+++ b/yt/analysis_modules/cosmological_observation/light_ray/light_ray.py
@@ -169,7 +169,7 @@
                                 (self.light_ray_solution[q]['redshift'], z_next,
                                  self.light_ray_solution[q]['traversal_box_fraction']))
                     mylog.error("Full box delta z is %f, but it is %f to the next data dump." %
-                                (self.light_ray_solution[q]['deltazMax'],
+                                (self.light_ray_solution[q]['dz_max'],
                                  self.light_ray_solution[q]['redshift']-z_next))
 
                 # Get dataset axis and center.

diff -r 18c5f92a3babc86b0e3015e2d0889ff46136bba5 -r 0f47064b69d24e80aad7fc463ac97c5a82affc61 yt/config.py
--- a/yt/config.py
+++ b/yt/config.py
@@ -39,6 +39,7 @@
     storeparameterfiles = 'False',
     parameterfilestore = 'parameter_files.csv',
     maximumstoreddatasets = '500',
+    skip_dataset_cache = 'False',
     loadfieldplugins = 'True',
     pluginfilename = 'my_plugins.py',
     parallel_traceback = 'False',
@@ -97,6 +98,8 @@
 class YTConfigParser(ConfigParser.ConfigParser):
     def __setitem__(self, key, val):
         self.set(key[0], key[1], val)
+    def __getitem__(self, key):
+        self.get(key[0], key[1])
 
 if os.path.exists(os.path.expanduser("~/.yt/config")):
     ytcfg = YTConfigParser(ytcfg_defaults)

diff -r 18c5f92a3babc86b0e3015e2d0889ff46136bba5 -r 0f47064b69d24e80aad7fc463ac97c5a82affc61 yt/data_objects/data_containers.py
--- a/yt/data_objects/data_containers.py
+++ b/yt/data_objects/data_containers.py
@@ -834,6 +834,8 @@
         self.fields = [k for k in self.field_data if k not in skip]
         if fields is not None:
             self.fields = ensure_list(fields) + self.fields
+        if len(self.fields) == 0:
+            raise ValueError("No fields found to plot in get_pw")
         (bounds, center, display_center) = \
             get_window_parameters(axis, center, width, self.ds)
         pw = PWViewerMPL(self, bounds, fields=self.fields, origin=origin,

diff -r 18c5f92a3babc86b0e3015e2d0889ff46136bba5 -r 0f47064b69d24e80aad7fc463ac97c5a82affc61 yt/data_objects/static_output.py
--- a/yt/data_objects/static_output.py
+++ b/yt/data_objects/static_output.py
@@ -134,7 +134,9 @@
             return obj
         apath = os.path.abspath(filename)
         #if not os.path.exists(apath): raise IOError(filename)
-        if apath not in _cached_datasets:
+        if ytcfg.getboolean("yt","skip_dataset_cache"):
+            obj = object.__new__(cls)
+        elif apath not in _cached_datasets:
             obj = object.__new__(cls)
             if obj._skip_cache is False:
                 _cached_datasets[apath] = obj
@@ -142,7 +144,7 @@
             obj = _cached_datasets[apath]
         return obj
 
-    def __init__(self, filename, dataset_type=None, file_style=None):
+    def __init__(self, filename, dataset_type=None, file_style=None, units_override=None):
         """
         Base class for generating new output types.  Principally consists of
         a *filename* and a *dataset_type* which will be passed on to children.
@@ -157,6 +159,9 @@
         self.known_filters = self.known_filters or {}
         self.particle_unions = self.particle_unions or {}
         self.field_units = self.field_units or {}
+        if units_override is None:
+            units_override = {}
+        self.units_override = units_override
 
         # path stuff
         self.parameter_filename = str(filename)
@@ -667,6 +672,8 @@
 
     def set_code_units(self):
         self._set_code_unit_attributes()
+        # here we override units, if overrides have been provided.
+        self._override_code_units()
         self.unit_registry.modify("code_length", self.length_unit)
         self.unit_registry.modify("code_mass", self.mass_unit)
         self.unit_registry.modify("code_time", self.time_unit)
@@ -679,6 +686,24 @@
             self.unit_registry.add("unitary", float(DW.max() * DW.units.cgs_value),
                                    DW.units.dimensions)
 
+    def _override_code_units(self):
+        if len(self.units_override) == 0:
+            return
+        mylog.warning("Overriding code units. This is an experimental and potentially "+
+                      "dangerous option that may yield inconsistent results, and must be used "+
+                      "very carefully, and only if you know what you want from it.")
+        for unit, cgs in [("length", "cm"), ("time", "s"), ("mass", "g"),
+                          ("velocity","cm/s"), ("magnetic","gauss"), ("temperature","K")]:
+            val = self.units_override.get("%s_unit" % unit, None)
+            if val is not None:
+                if isinstance(val, YTQuantity):
+                    val = (val.v, str(val.units))
+                elif not isinstance(val, tuple):
+                    val = (val, cgs)
+                u = getattr(self, "%s_unit" % unit)
+                mylog.info("Overriding %s_unit: %g %s -> %g %s.", unit, u.v, u.units, val[0], val[1])
+                setattr(self, "%s_unit" % unit, self.quan(val[0], val[1]))
+
     _arr = None
     @property
     def arr(self):

diff -r 18c5f92a3babc86b0e3015e2d0889ff46136bba5 -r 0f47064b69d24e80aad7fc463ac97c5a82affc61 yt/data_objects/tests/test_data_collection.py
--- a/yt/data_objects/tests/test_data_collection.py
+++ b/yt/data_objects/tests/test_data_collection.py
@@ -16,7 +16,7 @@
         yield assert_equal, coll.size, ds.domain_dimensions.prod()
         for gi in range(ds.index.num_grids):
             grids = ds.index.grids[:gi+1]
-            coll = ds.data_collection(ds.domain_center, grids)
+            coll = ds.data_collection(grids)
             crho = coll["density"].sum(dtype="float64")
             grho = np.sum([g["density"].sum(dtype="float64") for g in grids],
                           dtype="float64")

diff -r 18c5f92a3babc86b0e3015e2d0889ff46136bba5 -r 0f47064b69d24e80aad7fc463ac97c5a82affc61 yt/fields/api.py
--- a/yt/fields/api.py
+++ b/yt/fields/api.py
@@ -26,6 +26,11 @@
 from . import particle_fields
 #from . import species_fields
 from . import vector_operations
+from . import local_fields
+from . import my_plugin_fields
+
+from .local_fields import add_field, derived_field
+
 
 from .derived_field import \
     DerivedField, \
@@ -38,6 +43,3 @@
     FieldDetector
 from .field_info_container import \
     FieldInfoContainer
-
-from . import local_fields
-from .local_fields import add_field, derived_field

diff -r 18c5f92a3babc86b0e3015e2d0889ff46136bba5 -r 0f47064b69d24e80aad7fc463ac97c5a82affc61 yt/fields/magnetic_field.py
--- a/yt/fields/magnetic_field.py
+++ b/yt/fields/magnetic_field.py
@@ -55,7 +55,7 @@
     def _plasma_beta(field,data):
         """This assumes that your front end has provided Bx, By, Bz in
         units of Gauss. If you use MKS, make sure to write your own
-        PlasmaBeta field to deal with non-unitary \mu_0.
+        plasma_beta field to deal with non-unitary \mu_0.
         """
         return data[ftype,'pressure']/data[ftype,'magnetic_energy']
     registry.add_field((ftype, "plasma_beta"),
@@ -69,6 +69,10 @@
              units="erg / cm**3")
 
     def _magnetic_field_strength(field,data):
+        """This assumes that your front end has provided Bx, By, Bz in
+        units of Gauss. If you use MKS, make sure to write your own
+        PlasmaBeta field to deal with non-unitary \mu_0.
+        """
         return np.sqrt(8.*np.pi*data[ftype,"magnetic_energy"])
     registry.add_field((ftype,"magnetic_field_strength"),
                        function=_magnetic_field_strength,
@@ -110,3 +114,17 @@
              units="gauss",
              validators=[ValidateParameter("normal")])
 
+    def _alfven_speed(field,data):
+        """This assumes that your front end has provided Bx, By, Bz in
+        units of Gauss. If you use MKS, make sure to write your own
+        alfven_speed field to deal with non-unitary \mu_0.
+        """
+        return data[ftype,'magnetic_field_strength']/np.sqrt(4.*np.pi*data[ftype,'density'])
+    registry.add_field((ftype, "alfven_speed"), function=_alfven_speed,
+                       units="cm/s")
+
+    def _mach_alfven(field,data):
+        return data[ftype,'velocity_magnitude']/data[ftype,'alfven_speed']
+    registry.add_field((ftype, "mach_alfven"), function=_mach_alfven,
+                       units="dimensionless")
+

diff -r 18c5f92a3babc86b0e3015e2d0889ff46136bba5 -r 0f47064b69d24e80aad7fc463ac97c5a82affc61 yt/fields/my_plugin_fields.py
--- /dev/null
+++ b/yt/fields/my_plugin_fields.py
@@ -0,0 +1,31 @@
+"""
+This is a container for storing fields defined in the my_plugins.py file.
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+import numpy as np
+
+from .field_plugin_registry import \
+    register_field_plugin
+
+from .field_info_container import \
+    FieldInfoContainer
+
+# Empty FieldInfoContainer
+my_plugins_fields = FieldInfoContainer(None, [], None)
+
+ at register_field_plugin
+def setup_my_plugins_fields(registry, ftype="gas", slice_info=None):
+    # fields end up inside this container when added via add_field in
+    # my_plugins.py. See yt.funcs.enable_plugins to see how this is set up.
+    registry.update(my_plugins_fields)

diff -r 18c5f92a3babc86b0e3015e2d0889ff46136bba5 -r 0f47064b69d24e80aad7fc463ac97c5a82affc61 yt/frontends/_skeleton/api.py
--- a/yt/frontends/_skeleton/api.py
+++ b/yt/frontends/_skeleton/api.py
@@ -19,8 +19,7 @@
       SkeletonDataset
 
 from .fields import \
-      SkeletonFieldInfo, \
-      add_skeleton_field
+      SkeletonFieldInfo
 
 from .io import \
       IOHandlerSkeleton

diff -r 18c5f92a3babc86b0e3015e2d0889ff46136bba5 -r 0f47064b69d24e80aad7fc463ac97c5a82affc61 yt/frontends/_skeleton/data_structures.py
--- a/yt/frontends/_skeleton/data_structures.py
+++ b/yt/frontends/_skeleton/data_structures.py
@@ -13,18 +13,13 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-import numpy as np
-
-from yt.data_objects.grid_patch import \
-    AMRGridPatch
 from yt.data_objects.grid_patch import \
     AMRGridPatch
 from yt.geometry.grid_geometry_handler import \
     GridIndex
 from yt.data_objects.static_output import \
     Dataset
-from yt.utilities.lib.misc_utilities import \
-    get_box_grids_level
+from .fields import SkeletonFieldInfo
 
 class SkeletonGrid(AMRGridPatch):
     _id_offset = 0
@@ -41,20 +36,15 @@
     def __repr__(self):
         return "SkeletonGrid_%04i (%s)" % (self.id, self.ActiveDimensions)
 
-class SkeletonHierarchy(AMRHierarchy):
-
+class SkeletonHierarchy(GridIndex):
     grid = SkeletonGrid
     
     def __init__(self, ds, dataset_type='skeleton'):
         self.dataset_type = dataset_type
-        self.dataset = weakref.proxy(ds)
         # for now, the index file is the dataset!
         self.index_filename = self.dataset.parameter_filename
         self.directory = os.path.dirname(self.index_filename)
-        AMRHierarchy.__init__(self, ds, dataset_type)
-
-    def _initialize_data_storage(self):
-        pass
+        GridIndex.__init__(self, ds, dataset_type)
 
     def _detect_output_fields(self):
         # This needs to set a self.field_list that contains all the available,
@@ -95,9 +85,12 @@
     _index_class = SkeletonHierarchy
     _field_info_class = SkeletonFieldInfo
     
-    def __init__(self, filename, dataset_type='skeleton'):
+    def __init__(self, filename, dataset_type='skeleton',
+                 storage_filename=None,
+                 units_override=None):
         self.fluid_types += ('skeleton',)
-        Dataset.__init__(self, filename, dataset_type)
+        Dataset.__init__(self, filename, dataset_type,
+                         units_override=units_override)
         self.storage_filename = storage_filename
 
     def _set_code_unit_attributes(self):

diff -r 18c5f92a3babc86b0e3015e2d0889ff46136bba5 -r 0f47064b69d24e80aad7fc463ac97c5a82affc61 yt/frontends/_skeleton/io.py
--- a/yt/frontends/_skeleton/io.py
+++ b/yt/frontends/_skeleton/io.py
@@ -13,9 +13,6 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-import numpy as np
-import h5py
-
 from yt.utilities.io_handler import \
     BaseIOHandler
 

diff -r 18c5f92a3babc86b0e3015e2d0889ff46136bba5 -r 0f47064b69d24e80aad7fc463ac97c5a82affc61 yt/frontends/api.py
--- a/yt/frontends/api.py
+++ b/yt/frontends/api.py
@@ -39,3 +39,5 @@
         for frontend in _frontends:
             _mod = "yt.frontends.%s.api" % frontend
             setattr(self, frontend, importlib.import_module(_mod))
+        setattr(self, 'api', importlib.import_module('yt.frontends.api'))
+        setattr(self, '__name__', 'yt.frontends.api')

diff -r 18c5f92a3babc86b0e3015e2d0889ff46136bba5 -r 0f47064b69d24e80aad7fc463ac97c5a82affc61 yt/frontends/art/api.py
--- a/yt/frontends/art/api.py
+++ b/yt/frontends/art/api.py
@@ -24,3 +24,5 @@
 
 from .io import \
       IOHandlerART
+
+from . import tests

diff -r 18c5f92a3babc86b0e3015e2d0889ff46136bba5 -r 0f47064b69d24e80aad7fc463ac97c5a82affc61 yt/frontends/art/data_structures.py
--- a/yt/frontends/art/data_structures.py
+++ b/yt/frontends/art/data_structures.py
@@ -166,7 +166,8 @@
                  skip_particles=False, skip_stars=False,
                  limit_level=None, spread_age=True,
                  force_max_level=None, file_particle_header=None,
-                 file_particle_data=None, file_particle_stars=None):
+                 file_particle_data=None, file_particle_stars=None,
+                 units_override=None):
         self.fluid_types += ("art", )
         if fields is None:
             fields = fluid_fields
@@ -186,7 +187,8 @@
         self.spread_age = spread_age
         self.domain_left_edge = np.zeros(3, dtype='float')
         self.domain_right_edge = np.zeros(3, dtype='float')+1.0
-        Dataset.__init__(self, filename, dataset_type)
+        Dataset.__init__(self, filename, dataset_type,
+                         units_override=units_override)
         self.storage_filename = storage_filename
 
     def _find_files(self, file_amr):

diff -r 18c5f92a3babc86b0e3015e2d0889ff46136bba5 -r 0f47064b69d24e80aad7fc463ac97c5a82affc61 yt/frontends/art/tests/test_outputs.py
--- a/yt/frontends/art/tests/test_outputs.py
+++ b/yt/frontends/art/tests/test_outputs.py
@@ -16,7 +16,8 @@
 
 from yt.testing import \
     requires_file, \
-    assert_equal
+    assert_equal, \
+    units_override_check
 from yt.utilities.answer_testing.framework import \
     requires_ds, \
     big_patch_amr, \
@@ -48,3 +49,9 @@
 @requires_file(d9p)
 def test_ARTDataset():
     assert isinstance(data_dir_load(d9p), ARTDataset)
+
+ at requires_file(d9p)
+def test_units_override():
+    for test in units_override_check(d9p):
+        yield test
+

diff -r 18c5f92a3babc86b0e3015e2d0889ff46136bba5 -r 0f47064b69d24e80aad7fc463ac97c5a82affc61 yt/frontends/artio/api.py
--- a/yt/frontends/artio/api.py
+++ b/yt/frontends/artio/api.py
@@ -22,3 +22,5 @@
 
 from .io import \
     IOHandlerARTIO
+
+from . import tests

diff -r 18c5f92a3babc86b0e3015e2d0889ff46136bba5 -r 0f47064b69d24e80aad7fc463ac97c5a82affc61 yt/frontends/artio/data_structures.py
--- a/yt/frontends/artio/data_structures.py
+++ b/yt/frontends/artio/data_structures.py
@@ -314,7 +314,8 @@
     _field_info_class = ARTIOFieldInfo
 
     def __init__(self, filename, dataset_type='artio',
-                 storage_filename=None, max_range = 1024):
+                 storage_filename=None, max_range = 1024,
+                 units_override=None):
         if self._handle is not None:
             return
         self.max_range = max_range
@@ -324,7 +325,8 @@
         self._handle = artio_fileset(self._fileset_prefix)
         self.artio_parameters = self._handle.parameters
         # Here we want to initiate a traceback, if the reader is not built.
-        Dataset.__init__(self, filename, dataset_type)
+        Dataset.__init__(self, filename, dataset_type,
+                         units_override=units_override)
         self.storage_filename = storage_filename
 
     def _set_code_unit_attributes(self):

diff -r 18c5f92a3babc86b0e3015e2d0889ff46136bba5 -r 0f47064b69d24e80aad7fc463ac97c5a82affc61 yt/frontends/artio/tests/test_outputs.py
--- a/yt/frontends/artio/tests/test_outputs.py
+++ b/yt/frontends/artio/tests/test_outputs.py
@@ -50,3 +50,8 @@
 @requires_file(sizmbhloz)
 def test_ARTIODataset():
     assert isinstance(data_dir_load(sizmbhloz), ARTIODataset)
+
+ at requires_file(sizmbhloz)
+def test_units_override():
+    for test in units_override_check(sizmbhloz):
+        yield test

diff -r 18c5f92a3babc86b0e3015e2d0889ff46136bba5 -r 0f47064b69d24e80aad7fc463ac97c5a82affc61 yt/frontends/athena/api.py
--- a/yt/frontends/athena/api.py
+++ b/yt/frontends/athena/api.py
@@ -22,3 +22,5 @@
 
 from .io import \
       IOHandlerAthena
+
+from . import tests

diff -r 18c5f92a3babc86b0e3015e2d0889ff46136bba5 -r 0f47064b69d24e80aad7fc463ac97c5a82affc61 yt/frontends/athena/data_structures.py
--- a/yt/frontends/athena/data_structures.py
+++ b/yt/frontends/athena/data_structures.py
@@ -285,7 +285,8 @@
 
         # Need to reset the units in the dataset based on the correct
         # domain left/right/dimensions.
-        self.dataset._set_code_unit_attributes()
+        # DEV: Is this really necessary?
+        #self.dataset._set_code_unit_attributes()
 
         if self.dataset.dimensionality <= 2 :
             self.dataset.domain_dimensions[2] = np.int(1)
@@ -352,12 +353,24 @@
     _dataset_type = "athena"
 
     def __init__(self, filename, dataset_type='athena',
-                 storage_filename=None, parameters=None):
+                 storage_filename=None, parameters=None,
+                 units_override=None):
         self.fluid_types += ("athena",)
         if parameters is None:
             parameters = {}
         self.specified_parameters = parameters
-        Dataset.__init__(self, filename, dataset_type)
+        if units_override is None:
+            units_override = {}
+        # This is for backwards-compatibility
+        already_warned = False
+        for k,v in self.specified_parameters.items():
+            if k.endswith("_unit") and k not in units_override:
+                if not already_warned:
+                    mylog.warning("Supplying unit conversions from the parameters dict is deprecated, "+
+                                  "and will be removed in a future release. Use units_override instead.")
+                    already_warned = True
+                units_override[k] = self.specified_parameters.pop(k)
+        Dataset.__init__(self, filename, dataset_type, units_override=units_override)
         self.filename = filename
         if storage_filename is None:
             storage_filename = '%s.yt' % filename.split('/')[-1]
@@ -372,23 +385,21 @@
         """
         Generates the conversion to various physical _units based on the parameter file
         """
+        if "length_unit" not in self.units_override:
+            self.no_cgs_equiv_length = True
         for unit, cgs in [("length", "cm"), ("time", "s"), ("mass", "g")]:
-            val = self.specified_parameters.get("%s_unit" % unit, None)
-            if val is None:
-                if unit == "length": self.no_cgs_equiv_length = True
-                mylog.warning("No %s conversion to cgs provided.  " +
-                              "Assuming 1.0 = 1.0 %s", unit, cgs)
-                val = 1.0
-            if not isinstance(val, tuple):
-                val = (val, cgs)
-            setattr(self, "%s_unit" % unit, self.quan(val[0], val[1]))
-        self.velocity_unit = self.length_unit/self.time_unit
-        self.magnetic_unit = np.sqrt(4*np.pi * self.mass_unit /
-                                  (self.time_unit**2 * self.length_unit))
-        self.magnetic_unit.convert_to_units("gauss")
+            # We set these to cgs for now, but they may be overridden later.
+            mylog.warning("Assuming 1.0 = 1.0 %s", cgs)
+            setattr(self, "%s_unit" % unit, self.quan(1.0, cgs))
 
     def set_code_units(self):
         super(AthenaDataset, self).set_code_units()
+        mag_unit = getattr(self, "magnetic_unit", None)
+        if mag_unit is None:
+            self.magnetic_unit = np.sqrt(4*np.pi * self.mass_unit /
+                                         (self.time_unit**2 * self.length_unit))
+        self.magnetic_unit.convert_to_units("gauss")
+
         self.unit_registry.modify("code_magnetic", self.magnetic_unit)
 
     def _parse_parameter_file(self):

diff -r 18c5f92a3babc86b0e3015e2d0889ff46136bba5 -r 0f47064b69d24e80aad7fc463ac97c5a82affc61 yt/frontends/athena/tests/test_outputs.py
--- a/yt/frontends/athena/tests/test_outputs.py
+++ b/yt/frontends/athena/tests/test_outputs.py
@@ -43,16 +43,16 @@
         test_blast.__name__ = test.description
         yield test
 
-parameters_stripping = {"time_unit":3.086e14,
-                        "length_unit":8.0236e22,
-                        "mass_unit":9.999e-30*8.0236e22**3}
+uo_stripping = {"time_unit":3.086e14,
+                "length_unit":8.0236e22,
+                "mass_unit":9.999e-30*8.0236e22**3}
 
 _fields_stripping = ("temperature", "density", "specific_scalar[0]")
 
 stripping = "RamPressureStripping/id0/rps.0062.vtk"
 @requires_ds(stripping, big_data=True)
 def test_stripping():
-    ds = data_dir_load(stripping, kwargs={"parameters":parameters_stripping})
+    ds = data_dir_load(stripping, kwargs={"units_override":uo_stripping})
     yield assert_equal, str(ds), "rps.0062"
     for test in small_patch_amr(stripping, _fields_stripping):
         test_stripping.__name__ = test.description

diff -r 18c5f92a3babc86b0e3015e2d0889ff46136bba5 -r 0f47064b69d24e80aad7fc463ac97c5a82affc61 yt/frontends/boxlib/api.py
--- a/yt/frontends/boxlib/api.py
+++ b/yt/frontends/boxlib/api.py
@@ -29,3 +29,5 @@
 
 from .io import \
       IOHandlerBoxlib
+
+from . import tests

diff -r 18c5f92a3babc86b0e3015e2d0889ff46136bba5 -r 0f47064b69d24e80aad7fc463ac97c5a82affc61 yt/frontends/boxlib/data_structures.py
--- a/yt/frontends/boxlib/data_structures.py
+++ b/yt/frontends/boxlib/data_structures.py
@@ -366,7 +366,8 @@
                  cparam_filename="inputs",
                  fparam_filename="probin",
                  dataset_type='boxlib_native',
-                 storage_filename=None):
+                 storage_filename=None,
+                 units_override=None):
         """
         The paramfile is usually called "inputs"
         and there may be a fortran inputs file usually called "probin"
@@ -380,7 +381,8 @@
         self.fparam_filename = self._localize_check(fparam_filename)
         self.storage_filename = storage_filename
 
-        Dataset.__init__(self, output_dir, dataset_type)
+        Dataset.__init__(self, output_dir, dataset_type,
+                         units_override=units_override)
 
         # These are still used in a few places.
         if "HydroMethod" not in self.parameters.keys():
@@ -721,10 +723,12 @@
                  cparam_filename="inputs",
                  fparam_filename="probin",
                  dataset_type='orion_native',
-                 storage_filename=None):
+                 storage_filename=None,
+                 units_override=None):
 
         BoxlibDataset.__init__(self, output_dir,
-                               cparam_filename, fparam_filename, dataset_type)
+                               cparam_filename, fparam_filename,
+                               dataset_type, units_override=units_override)
 
     @classmethod
     def _is_valid(cls, *args, **kwargs):

diff -r 18c5f92a3babc86b0e3015e2d0889ff46136bba5 -r 0f47064b69d24e80aad7fc463ac97c5a82affc61 yt/frontends/boxlib/tests/test_orion.py
--- a/yt/frontends/boxlib/tests/test_orion.py
+++ b/yt/frontends/boxlib/tests/test_orion.py
@@ -47,3 +47,9 @@
 @requires_file(rt)
 def test_OrionDataset():
     assert isinstance(data_dir_load(rt), OrionDataset)
+
+ at requires_file(rt)
+def test_units_override():
+    for test in units_override_check(rt):
+        yield test
+

diff -r 18c5f92a3babc86b0e3015e2d0889ff46136bba5 -r 0f47064b69d24e80aad7fc463ac97c5a82affc61 yt/frontends/chombo/api.py
--- a/yt/frontends/chombo/api.py
+++ b/yt/frontends/chombo/api.py
@@ -35,3 +35,5 @@
 from .io import \
     IOHandlerChomboHDF5,\
     IOHandlerPlutoHDF5
+
+from . import tests

diff -r 18c5f92a3babc86b0e3015e2d0889ff46136bba5 -r 0f47064b69d24e80aad7fc463ac97c5a82affc61 yt/frontends/chombo/data_structures.py
--- a/yt/frontends/chombo/data_structures.py
+++ b/yt/frontends/chombo/data_structures.py
@@ -113,7 +113,8 @@
         self.directory = ds.fullpath
         self._handle = ds._handle
 
-        self.float_type = self._handle['Chombo_global'].attrs['testReal'].dtype.name
+        tr = self._handle['Chombo_global'].attrs.get("testReal", "float32")
+            
         self._levels = [key for key in self._handle.keys() if key.startswith('level')]
         GridIndex.__init__(self, ds, dataset_type)
 
@@ -161,7 +162,13 @@
     def _count_grids(self):
         self.num_grids = 0
         for lev in self._levels:
-            self.num_grids += self._handle[lev]['Processors'].len()
+            d = self._handle[lev]
+            if 'Processors' in d:
+                self.num_grids += d['Processors'].len()
+            elif 'boxes' in d:
+                self.num_grids += d['boxes'].len()
+            else:
+                raise RuntimeError("Uknown file specification")
 
     def _parse_index(self):
         f = self._handle # shortcut
@@ -243,7 +250,8 @@
     _field_info_class = ChomboFieldInfo
 
     def __init__(self, filename, dataset_type='chombo_hdf5',
-                 storage_filename = None, ini_filename = None):
+                 storage_filename = None, ini_filename = None,
+                 units_override=None):
         self.fluid_types += ("chombo",)
         self._handle = HDF5FileHandler(filename)
         self.dataset_type = dataset_type
@@ -258,7 +266,8 @@
         self.geometry = "cartesian"
         self.ini_filename = ini_filename
         self.fullplotdir = os.path.abspath(filename)
-        Dataset.__init__(self,filename, self.dataset_type)
+        Dataset.__init__(self,filename, self.dataset_type,
+                         units_override=units_override)
         self.storage_filename = storage_filename
         self.cosmological_simulation = False
 
@@ -447,10 +456,12 @@
     _field_info_class = PlutoFieldInfo
 
     def __init__(self, filename, dataset_type='pluto_chombo_native',
-                 storage_filename = None, ini_filename = None):
+                 storage_filename = None, ini_filename = None,
+                 units_override=None):
 
         ChomboDataset.__init__(self, filename, dataset_type, 
-                    storage_filename, ini_filename)
+                               storage_filename, ini_filename,
+                               units_override=units_override)
 
     def _parse_parameter_file(self):
         """
@@ -579,10 +590,12 @@
     _field_info_class = Orion2FieldInfo
 
     def __init__(self, filename, dataset_type='orion_chombo_native',
-                 storage_filename = None, ini_filename = None):
+                 storage_filename = None, ini_filename = None,
+                 units_override=None):
 
         ChomboDataset.__init__(self, filename, dataset_type,
-                    storage_filename, ini_filename)
+                               storage_filename, ini_filename,
+                               units_override=units_override)
 
     def _parse_parameter_file(self):
         """
@@ -665,10 +678,12 @@
     _field_info_class = ChomboPICFieldInfo3D
 
     def __init__(self, filename, dataset_type='chombo_hdf5',
-                 storage_filename=None, ini_filename=None):
+                 storage_filename=None, ini_filename=None,
+                 units_override=None):
 
         ChomboDataset.__init__(self, filename, dataset_type,
-                               storage_filename, ini_filename)
+                               storage_filename, ini_filename,
+                               units_override=units_override)
 
         if self.dimensionality == 1:
             self._field_info_class = ChomboPICFieldInfo1D

diff -r 18c5f92a3babc86b0e3015e2d0889ff46136bba5 -r 0f47064b69d24e80aad7fc463ac97c5a82affc61 yt/frontends/chombo/io.py
--- a/yt/frontends/chombo/io.py
+++ b/yt/frontends/chombo/io.py
@@ -25,6 +25,7 @@
     _dataset_type = "chombo_hdf5"
     _offset_string = 'data:offsets=0'
     _data_string = 'data:datatype=0'
+    _offsets = None
 
     def __init__(self, ds, *args, **kwargs):
         BaseIOHandler.__init__(self, ds, *args, **kwargs)
@@ -32,6 +33,29 @@
         self._handle = ds._handle
         self.dim = self._handle['Chombo_global/'].attrs['SpaceDim']
         self._read_ghost_info()
+        if self._offset_string not in self._handle['level_0']:
+            self._calculate_offsets()
+
+    def _calculate_offsets(self):
+        def box_size(corners):
+            size = 1
+            for idim in range(self.dim):
+                size *= (corners[idim+self.dim] - corners[idim] + 1)
+            return size
+
+        self._offsets = {}
+        num_comp = self._handle.attrs['num_components']
+        level = 0
+        while 1:
+            lname = 'level_%i' % level
+            if lname not in self._handle: break
+            boxes = self._handle['level_0']['boxes'].value
+            box_sizes = np.array([box_size(box) for box in boxes])
+
+            offsets = np.cumsum(box_sizes*num_comp, dtype='int64') 
+            offsets -= offsets[0]
+            self._offsets[level] = offsets
+            level += 1
 
     def _read_ghost_info(self):
         try:
@@ -41,7 +65,7 @@
             self.ghost = np.array(self.ghost)
         except KeyError:
             # assume zero ghosts if outputGhosts not present
-            self.ghost = np.zeros(self.dim)
+            self.ghost = np.zeros(self.dim, 'int64')
 
     _field_dict = None
     @property
@@ -80,7 +104,10 @@
         shape = grid.ActiveDimensions + 2*self.ghost
         boxsize = shape.prod()
 
-        grid_offset = lev[self._offset_string][grid._level_id]
+        if self._offsets is not None:
+            grid_offset = self._offsets[grid.Level][grid._level_id]
+        else:
+            grid_offset = lev[self._offset_string][grid._level_id]
         start = grid_offset+self.field_dict[field]*boxsize
         stop = start + boxsize
         data = lev[self._data_string][start:stop]

diff -r 18c5f92a3babc86b0e3015e2d0889ff46136bba5 -r 0f47064b69d24e80aad7fc463ac97c5a82affc61 yt/frontends/chombo/tests/test_outputs.py
--- a/yt/frontends/chombo/tests/test_outputs.py
+++ b/yt/frontends/chombo/tests/test_outputs.py
@@ -15,7 +15,8 @@
 
 from yt.testing import \
     requires_file, \
-    assert_equal
+    assert_equal, \
+    units_override_check
 from yt.utilities.answer_testing.framework import \
     requires_ds, \
     small_patch_amr, \
@@ -80,3 +81,18 @@
 @requires_file(kho)
 def test_PlutoDataset():
     assert isinstance(data_dir_load(kho), PlutoDataset)
+
+ at requires_file(zp)
+def test_units_override_zp():
+    for test in units_override_check(zp):
+        yield test
+
+ at requires_file(gc)
+def test_units_override_gc():
+    for test in units_override_check(gc):
+        yield test
+
+ at requires_file(kho)
+def test_units_override_kho():
+    for test in units_override_check(kho):
+        yield test
\ No newline at end of file

diff -r 18c5f92a3babc86b0e3015e2d0889ff46136bba5 -r 0f47064b69d24e80aad7fc463ac97c5a82affc61 yt/frontends/enzo/api.py
--- a/yt/frontends/enzo/api.py
+++ b/yt/frontends/enzo/api.py
@@ -35,3 +35,5 @@
       IOHandlerInMemory, \
       IOHandlerPacked2D, \
       IOHandlerPacked1D
+
+from . import tests

diff -r 18c5f92a3babc86b0e3015e2d0889ff46136bba5 -r 0f47064b69d24e80aad7fc463ac97c5a82affc61 yt/frontends/enzo/data_structures.py
--- a/yt/frontends/enzo/data_structures.py
+++ b/yt/frontends/enzo/data_structures.py
@@ -444,8 +444,7 @@
                 try:
                     gf = self.io._read_field_names(grid)
                 except self.io._read_exception:
-                    mylog.debug("Grid %s is a bit funky?", grid.id)
-                    continue
+                    raise IOError("Grid %s is a bit funky?", grid.id)
                 mylog.debug("Grid %s has: %s", grid.id, gf)
                 field_list = field_list.union(gf)
             if "AppendActiveParticleType" in self.dataset.parameters:
@@ -665,7 +664,8 @@
                  file_style = None,
                  parameter_override = None,
                  conversion_override = None,
-                 storage_filename = None):
+                 storage_filename = None,
+                 units_override=None):
         """
         This class is a stripped down class that simply reads and parses
         *filename* without looking at the index.  *dataset_type* gets passed
@@ -682,8 +682,8 @@
         if conversion_override is None: conversion_override = {}
         self._conversion_override = conversion_override
         self.storage_filename = storage_filename
-
-        Dataset.__init__(self, filename, dataset_type, file_style=file_style)
+        Dataset.__init__(self, filename, dataset_type, file_style=file_style,
+                         units_override=units_override)
 
     def _setup_1d(self):
         self._index_class = EnzoHierarchy1D
@@ -926,6 +926,8 @@
         magnetic_unit = np.float64(magnetic_unit.in_cgs())
         self.magnetic_unit = self.quan(magnetic_unit, "gauss")
 
+        self._override_code_units()
+
         self.unit_registry.modify("code_magnetic", self.magnetic_unit)
         self.unit_registry.modify("code_length", self.length_unit)
         self.unit_registry.modify("code_mass", self.mass_unit)

diff -r 18c5f92a3babc86b0e3015e2d0889ff46136bba5 -r 0f47064b69d24e80aad7fc463ac97c5a82affc61 yt/frontends/enzo/fields.py
--- a/yt/frontends/enzo/fields.py
+++ b/yt/frontends/enzo/fields.py
@@ -203,11 +203,13 @@
                 units="code_velocity**2")
             # Subtract off B-field energy
             def _sub_b(field, data):
-                return data[te_name] - 0.5*(
-                    data["x-velocity"]**2.0
-                    + data["y-velocity"]**2.0
-                    + data["z-velocity"]**2.0 ) \
-                    - data["MagneticEnergy"]/data["Density"]
+                ret = data[te_name] - 0.5*data["x-velocity"]**2.0
+                if data.ds.dimensionality > 1:
+                    ret -= 0.5*data["y-velocity"]**2.0
+                if data.ds.dimensionality > 2:
+                    ret -= 0.5*data["z-velocity"]**2.0
+                ret -= data["MagneticEnergy"]/data["Density"]
+                return ret
             self.add_field(
                 ("gas", "thermal_energy"),
                 function=_sub_b, units = "erg/g")
@@ -217,10 +219,12 @@
                 units = "code_velocity**2")
             self.alias(("gas", "total_energy"), ("enzo", te_name))
             def _tot_minus_kin(field, data):
-                return data[te_name] - 0.5*(
-                    data["x-velocity"]**2.0
-                    + data["y-velocity"]**2.0
-                    + data["z-velocity"]**2.0 )
+                ret = data[te_name] - 0.5*data["x-velocity"]**2.0
+                if data.ds.dimensionality > 1:
+                    ret -= 0.5*data["y-velocity"]**2.0
+                if data.ds.dimensionality > 2:
+                    ret -= 0.5*data["z-velocity"]**2.0
+                return ret
             self.add_field(
                 ("gas", "thermal_energy"),
                 function = _tot_minus_kin,

diff -r 18c5f92a3babc86b0e3015e2d0889ff46136bba5 -r 0f47064b69d24e80aad7fc463ac97c5a82affc61 yt/frontends/enzo/io.py
--- a/yt/frontends/enzo/io.py
+++ b/yt/frontends/enzo/io.py
@@ -36,7 +36,10 @@
     def _read_field_names(self, grid):
         if grid.filename is None: return []
         f = h5py.File(grid.filename, "r")
-        group = f["/Grid%08i" % grid.id]
+        try:
+            group = f["/Grid%08i" % grid.id]
+        except KeyError:
+            group = f
         fields = []
         add_io = "io" in grid.ds.particle_types
         for name, v in group.iteritems():
@@ -366,6 +369,8 @@
                     #print "Opening (count) %s" % g.filename
                     f = h5py.File(g.filename, "r")
                 gds = f.get("/Grid%08i" % g.id)
+                if gds is None:
+                    gds = f
                 for field in fields:
                     ftype, fname = field
                     ds = np.atleast_3d(gds.get(fname).value.transpose())

diff -r 18c5f92a3babc86b0e3015e2d0889ff46136bba5 -r 0f47064b69d24e80aad7fc463ac97c5a82affc61 yt/frontends/enzo/tests/test_outputs.py
--- a/yt/frontends/enzo/tests/test_outputs.py
+++ b/yt/frontends/enzo/tests/test_outputs.py
@@ -91,6 +91,11 @@
     # Now we test our species fields
     yield check_color_conservation(ds)
 
+ at requires_file(enzotiny)
+def test_units_override():
+    for test in units_override_check(enzotiny):
+        yield test
+
 @requires_ds(ecp, big_data=True)
 def test_nuclei_density_fields():
     ds = data_dir_load(ecp)

diff -r 18c5f92a3babc86b0e3015e2d0889ff46136bba5 -r 0f47064b69d24e80aad7fc463ac97c5a82affc61 yt/frontends/fits/api.py
--- a/yt/frontends/fits/api.py
+++ b/yt/frontends/fits/api.py
@@ -22,4 +22,6 @@
       IOHandlerFITS
 
 from .misc import \
-      setup_counts_fields
\ No newline at end of file
+      setup_counts_fields
+
+from . import tests

diff -r 18c5f92a3babc86b0e3015e2d0889ff46136bba5 -r 0f47064b69d24e80aad7fc463ac97c5a82affc61 yt/frontends/fits/data_structures.py
--- a/yt/frontends/fits/data_structures.py
+++ b/yt/frontends/fits/data_structures.py
@@ -313,17 +313,18 @@
     _handle = None
 
     def __init__(self, filename,
-                 dataset_type = 'fits',
-                 auxiliary_files = [],
-                 nprocs = None,
-                 storage_filename = None,
-                 nan_mask = None,
-                 spectral_factor = 1.0,
-                 z_axis_decomp = False,
-                 line_database = None,
-                 line_width = None,
-                 suppress_astropy_warnings = True,
-                 parameters = None):
+                 dataset_type='fits',
+                 auxiliary_files=[],
+                 nprocs=None,
+                 storage_filename=None,
+                 nan_mask=None,
+                 spectral_factor=1.0,
+                 z_axis_decomp=False,
+                 line_database=None,
+                 line_width=None,
+                 suppress_astropy_warnings=True,
+                 parameters=None,
+                 units_override=None):
 
         if parameters is None:
             parameters = {}
@@ -434,7 +435,7 @@
 
         self.refine_by = 2
 
-        Dataset.__init__(self, fn, dataset_type)
+        Dataset.__init__(self, fn, dataset_type, units_override=units_override)
         self.storage_filename = storage_filename
 
     def _set_code_unit_attributes(self):

diff -r 18c5f92a3babc86b0e3015e2d0889ff46136bba5 -r 0f47064b69d24e80aad7fc463ac97c5a82affc61 yt/frontends/fits/tests/test_outputs.py
--- a/yt/frontends/fits/tests/test_outputs.py
+++ b/yt/frontends/fits/tests/test_outputs.py
@@ -41,3 +41,9 @@
     for test in small_patch_amr(vf, _fields_vels, input_center="c", input_weight="ones"):
         test_velocity_field.__name__ = test.description
         yield test
+
+ at requires_file(vf)
+def test_units_override():
+    for test in units_override_check(vf):
+        yield test
+

diff -r 18c5f92a3babc86b0e3015e2d0889ff46136bba5 -r 0f47064b69d24e80aad7fc463ac97c5a82affc61 yt/frontends/flash/api.py
--- a/yt/frontends/flash/api.py
+++ b/yt/frontends/flash/api.py
@@ -23,3 +23,5 @@
 
 from .io import \
       IOHandlerFLASH
+
+from . import tests

This diff is so big that we needed to truncate the remainder.

https://bitbucket.org/yt_analysis/yt/commits/9bb0796dc733/
Changeset:   9bb0796dc733
Branch:      yt
User:        drudd
Date:        2014-10-27 19:32:18+00:00
Summary:     Alter the way the ID field is created for test_compose
Affected #:  1 file

diff -r 0f47064b69d24e80aad7fc463ac97c5a82affc61 -r 9bb0796dc733bd2544445a204269827c41f4328a yt/data_objects/tests/test_compose.py
--- a/yt/data_objects/tests/test_compose.py
+++ b/yt/data_objects/tests/test_compose.py
@@ -2,51 +2,35 @@
 from yt.fields.local_fields import add_field
 from yt.units.yt_array import YTArray, uintersect1d
 
-def setup_foo():
+def setup():
     from yt.config import ytcfg
     ytcfg["yt","__withintesting"] = "True"
-    def _ID(field, data):
-        width = data.ds.domain_right_edge - data.ds.domain_left_edge
-        min_dx = YTArray(1.0/8192, input_units='code_length',
-                         registry=data.ds.unit_registry)
-        delta = width / min_dx
-        x = data['x'] - min_dx / 2.
-        y = data['y'] - min_dx / 2.
-        z = data['z'] - min_dx / 2.
-        xi = x / min_dx
-        yi = y / min_dx
-        zi = z / min_dx
-        index = xi + delta[0] * (yi + delta[1] * zi)
-        index = index.astype('int64')
-        return index
 
-    add_field("ID", function=_ID, units=None)
+# Copied from test_boolean for computing a unique identifier for
+# each cell from cell positions
+def _IDFIELD(field, data):
+    width = data.ds.domain_right_edge - data.ds.domain_left_edge
+    min_dx = YTArray(1.0/8192, input_units='code_length',
+                     registry=data.ds.unit_registry)
+    delta = width / min_dx
+    x = data['x'] - min_dx / 2.
+    y = data['y'] - min_dx / 2.
+    z = data['z'] - min_dx / 2.
+    xi = x / min_dx
+    yi = y / min_dx
+    zi = z / min_dx
+    index = xi + delta[0] * (yi + delta[1] * zi)
+    index = index.astype('int64')
+    return index
 
 def test_compose_no_overlap():
     r"""Test to make sure that composed data objects that don't
     overlap behave the way we expect (return empty collections)
     """
-
-    def _ID(field, data):
-        width = data.ds.domain_right_edge - data.ds.domain_left_edge
-        min_dx = YTArray(1.0/8192, input_units='code_length',
-                         registry=data.ds.unit_registry)
-        delta = width / min_dx
-        x = data['x'] - min_dx / 2.
-        y = data['y'] - min_dx / 2.
-        z = data['z'] - min_dx / 2.
-        xi = x / min_dx
-        yi = y / min_dx
-        zi = z / min_dx
-        index = xi + delta[0] * (yi + delta[1] * zi)
-        index = index.astype('int64')
-        return index
-
     empty = np.array([])
     for n in [1, 2, 4, 8]:
         ds = fake_random_ds(64, nprocs=n)
-        ds.add_field("ID", function=_ID)
-        ds.index
+        ds.add_field("ID", function=_IDFIELD)
 
         # position parameters for initial region
         center = [0.25]*3
@@ -94,7 +78,7 @@
     empty = np.array([])
     for n in [1, 2, 4, 8]:
         ds = fake_random_ds(64, nprocs=n)
-        ds.index
+        ds.add_field("ID", function=_IDFIELD)
 
         # position parameters for initial region
         center = [0.4, 0.5, 0.5]


https://bitbucket.org/yt_analysis/yt/commits/b4349fc571ea/
Changeset:   b4349fc571ea
Branch:      yt
User:        jzuhone
Date:        2014-11-04 14:25:48+00:00
Summary:     Merged in drudd/yt (pull request #1264)

Added ComposeSelector which allows YTSelectionContainers to draw from a data_source.
Affected #:  6 files

diff -r 1e0faa5cb0fc8a94a358a7d90fd672176db42eeb -r b4349fc571eab65546c041ae955591ca3688aac4 doc/source/analyzing/objects.rst
--- a/doc/source/analyzing/objects.rst
+++ b/doc/source/analyzing/objects.rst
@@ -96,7 +96,7 @@
 
 **Point** 
     | Class :class:`~yt.data_objects.selection_data_containers.YTPointBase`    
-    | Usage: ``point(coord, ds=None, field_parameters=None)``
+    | Usage: ``point(coord, ds=None, field_parameters=None, data_source=None)``
     | A point defined by a single cell at specified coordinates.
 
 1D Objects
@@ -104,14 +104,14 @@
 
 **Ray (Axis-Aligned)** 
     | Class :class:`~yt.data_objects.selection_data_containers.YTOrthoRayBase`
-    | Usage: ``ortho_ray(axis, coord, ds=None, field_parameters=None)``
+    | Usage: ``ortho_ray(axis, coord, ds=None, field_parameters=None, data_source=None)``
     | A line (of data cells) stretching through the full domain 
       aligned with one of the x,y,z axes.  Defined by an axis and a point
       to be intersected.
 
 **Ray (Arbitrarily-Aligned)** 
     | Class :class:`~yt.data_objects.selection_data_containers.YTRayBase`
-    | Usage: ``ray(start_coord, end_coord, ds=None, field_parameters=None)``
+    | Usage: ``ray(start_coord, end_coord, ds=None, field_parameters=None, data_source=None)``
     | A line (of data cells) defined by arbitrary start and end coordinates. 
 
 2D Objects
@@ -119,13 +119,13 @@
 
 **Slice (Axis-Aligned)** 
     | Class :class:`~yt.data_objects.selection_data_containers.YTSliceBase`
-    | Usage: ``slice(axis, coord, center=None, ds=None, field_parameters=None)``
+    | Usage: ``slice(axis, coord, center=None, ds=None, field_parameters=None, data_source=None)``
     | A plane normal to one of the axes and intersecting a particular 
       coordinate.
 
 **Slice (Arbitrarily-Aligned)** 
     | Class :class:`~yt.data_objects.selection_data_containers.YTCuttingPlaneBase`
-    | Usage: ``cutting(normal, coord, north_vector=None, ds=None, field_parameters=None)``
+    | Usage: ``cutting(normal, coord, north_vector=None, ds=None, field_parameters=None, data_source=None)``
     | A plane normal to a specified vector and intersecting a particular 
       coordinate.
 
@@ -141,8 +141,8 @@
 
 **Box Region** 
     | Class :class:`~yt.data_objects.selection_data_containers.YTRegionBase`
-    | Usage: ``region(center, left_edge, right_edge, fields=None, ds=None, field_parameters=None)``
-    | Alternatively: ``box(left_edge, right_edge, fields=None, ds=None, field_parameters=None)``
+    | Usage: ``region(center, left_edge, right_edge, fields=None, ds=None, field_parameters=None, data_source=None)``
+    | Alternatively: ``box(left_edge, right_edge, fields=None, ds=None, field_parameters=None, data_source=None)``
     | A box-like region aligned with the grid axis orientation.  It is 
       defined by a left_edge, a right_edge, and a center.  The left_edge
       and right_edge are the minimum and maximum bounds in the three axes
@@ -152,14 +152,14 @@
 
 **Disk/Cylinder** 
     | Class: :class:`~yt.data_objects.selection_data_containers.YTDiskBase`
-    | Usage: ``disk(center, normal, radius, height, fields=None, ds=None, field_parameters=None)``
+    | Usage: ``disk(center, normal, radius, height, fields=None, ds=None, field_parameters=None, data_source=None)``
     | A cylinder defined by a point at the center of one of the circular bases,
       a normal vector to it defining the orientation of the length of the
       cylinder, and radius and height values for the cylinder's dimensions.
 
 **Ellipsoid** 
     | Class :class:`~yt.data_objects.selection_data_containers.YTEllipsoidBase`
-    | Usage: ``ellipsoid(center, semi_major_axis_length, semi_medium_axis_length, semi_minor_axis_length, semi_major_vector, tilt, fields=None, ds=None, field_parameters=None)``
+    | Usage: ``ellipsoid(center, semi_major_axis_length, semi_medium_axis_length, semi_minor_axis_length, semi_major_vector, tilt, fields=None, ds=None, field_parameters=None, data_source=None)``
     | An ellipsoid with axis magnitudes set by semi_major_axis_length, 
      semi_medium_axis_length, and semi_minor_axis_length.  semi_major_vector 
      sets the direction of the semi_major_axis.  tilt defines the orientation 
@@ -167,7 +167,7 @@
 
 **Sphere** 
     | Class :class:`~yt.data_objects.selection_data_containers.YTSphereBase`
-    | Usage: ``sphere(center, radius, ds=None, field_parameters=None)``
+    | Usage: ``sphere(center, radius, ds=None, field_parameters=None, data_source=None)``
     | A sphere defined by a central coordinate and a radius.
 
 
@@ -176,6 +176,12 @@
 
 See also the section on :ref:`filtering-data`.
 
+**Intersecting Regions**
+    | Most Region objects provide a data_source parameter, which allows you to subselect
+    | one region from another (in the coordinate system of the DataSet). Note, this can
+    | easily lead to empty data for non-intersecting regions.
+    | Usage: ``slice(axis, coord, ds, data_source=sph)``
+
 **Boolean Regions** 
     | **Note: not yet implemented in yt 3.0**
     | Usage: ``boolean()``

diff -r 1e0faa5cb0fc8a94a358a7d90fd672176db42eeb -r b4349fc571eab65546c041ae955591ca3688aac4 yt/data_objects/construction_data_containers.py
--- a/yt/data_objects/construction_data_containers.py
+++ b/yt/data_objects/construction_data_containers.py
@@ -42,7 +42,7 @@
 from yt.utilities.minimal_representation import \
     MinimalProjectionData
 from yt.utilities.parallel_tools.parallel_analysis_interface import \
-    parallel_objects, parallel_root_only, ParallelAnalysisInterface
+    parallel_objects, parallel_root_only 
 from yt.units.unit_object import Unit
 import yt.geometry.particle_deposit as particle_deposit
 from yt.utilities.grid_data_format.writer import write_to_gdf
@@ -833,7 +833,7 @@
             new_fields.append(output_field)
         level_state.fields = new_fields
 
-class YTSurfaceBase(YTSelectionContainer3D, ParallelAnalysisInterface):
+class YTSurfaceBase(YTSelectionContainer3D):
     r"""This surface object identifies isocontours on a cell-by-cell basis,
     with no consideration of global connectedness, and returns the vertices
     of the Triangles in that isocontour.
@@ -886,7 +886,6 @@
                          ("index", "z"))
     vertices = None
     def __init__(self, data_source, surface_field, field_value):
-        ParallelAnalysisInterface.__init__(self)
         self.data_source = data_source
         self.surface_field = surface_field
         self.field_value = field_value

diff -r 1e0faa5cb0fc8a94a358a7d90fd672176db42eeb -r b4349fc571eab65546c041ae955591ca3688aac4 yt/data_objects/data_containers.py
--- a/yt/data_objects/data_containers.py
+++ b/yt/data_objects/data_containers.py
@@ -41,6 +41,8 @@
 from yt.fields.derived_field import \
     ValidateSpatial
 import yt.geometry.selection_routines
+from yt.geometry.selection_routines import \
+    compose_selector
 from yt.extern.six import add_metaclass
 
 def force_array(item, shape):
@@ -101,8 +103,15 @@
         sets its initial set of fields, and the remainder of the arguments
         are passed as field_parameters.
         """
-        if ds != None:
+        # ds is typically set in the new object type created in Dataset._add_object_class
+        # but it can also be passed as a parameter to the constructor, in which case it will 
+        # override the default. This code ensures it is never not set.
+        if ds is not None:
             self.ds = ds
+        else:
+            if not hasattr(self, "ds"):
+                raise RuntimeError("Error: ds must be set either through class type or parameter to the constructor")
+
         self._current_particle_type = "all"
         self._current_fluid_type = self.ds.default_fluid_type
         self.ds.objects.append(weakref.proxy(self))
@@ -542,10 +551,22 @@
     _sort_by = None
     _selector = None
     _current_chunk = None
+    _data_source = None
+    _dimensionality = None
 
-    def __init__(self, *args, **kwargs):
-        super(YTSelectionContainer, self).__init__(*args, **kwargs)
-
+    def __init__(self, ds, field_parameters, data_source=None):
+        ParallelAnalysisInterface.__init__(self)
+        super(YTSelectionContainer, self).__init__(ds, field_parameters)
+        self._data_source = data_source
+        if data_source is not None:
+            if data_source.ds is not self.ds:
+                raise RuntimeError("Attempted to construct a DataContainer with a data_source from a different DataSet", ds, data_source.ds)
+            else:
+                print "DataSets: ", self.ds, data_source.ds
+            if data_source._dimensionality < self._dimensionality:
+                raise RuntimeError("Attempted to construct a DataContainer with a data_source of lower dimensionality (%u vs %u)" %
+                                    (data_source._dimensionality, self._dimensionality))
+ 
     @property
     def selector(self):
         if self._selector is not None: return self._selector
@@ -555,7 +576,11 @@
                          "%s_selector" % self._type_name, None)
         if sclass is None:
             raise YTDataSelectorNotImplemented(self._type_name)
-        self._selector = sclass(self)
+
+        if self._data_source is not None:
+            self._selector = compose_selector(self, self._data_source.selector, sclass(self))
+        else:
+            self._selector = sclass(self)
         return self._selector
 
     def chunks(self, fields, chunking_style, **kwargs):
@@ -765,30 +790,32 @@
 
 class YTSelectionContainer0D(YTSelectionContainer):
     _spatial = False
-    def __init__(self, ds, field_parameters):
+    _dimensionality = 0
+    def __init__(self, ds, field_parameters = None, data_source = None):
         super(YTSelectionContainer0D, self).__init__(
-            ds, field_parameters)
+            ds, field_parameters, data_source)
 
 class YTSelectionContainer1D(YTSelectionContainer):
     _spatial = False
-    def __init__(self, ds, field_parameters):
+    _dimensionality = 1
+    def __init__(self, ds, field_parameters = None, data_source = None):
         super(YTSelectionContainer1D, self).__init__(
-            ds, field_parameters)
+            ds, field_parameters, data_source)
         self._grids = None
         self._sortkey = None
         self._sorted = {}
 
 class YTSelectionContainer2D(YTSelectionContainer):
     _key_fields = ['px','py','pdx','pdy']
+    _dimensionality = 2
     """
     Prepares the YTSelectionContainer2D, normal to *axis*.  If *axis* is 4, we are not
     aligned with any axis.
     """
     _spatial = False
-    def __init__(self, axis, ds, field_parameters):
-        ParallelAnalysisInterface.__init__(self)
+    def __init__(self, axis, ds, field_parameters = None, data_source = None):
         super(YTSelectionContainer2D, self).__init__(
-            ds, field_parameters)
+            ds, field_parameters, data_source)
         # We need the ds, which will exist by now, for fix_axis.
         self.axis = fix_axis(axis, self.ds)
         self.set_field_parameter("axis", axis)
@@ -910,9 +937,9 @@
     _key_fields = ['x','y','z','dx','dy','dz']
     _spatial = False
     _num_ghost_zones = 0
-    def __init__(self, center, ds = None, field_parameters = None):
-        ParallelAnalysisInterface.__init__(self)
-        super(YTSelectionContainer3D, self).__init__(ds, field_parameters)
+    _dimensionality = 3
+    def __init__(self, center, ds, field_parameters = None, data_source = None):
+        super(YTSelectionContainer3D, self).__init__(ds, field_parameters, data_source)
         self._set_center(center)
         self.coords = None
         self._grids = None
@@ -1273,9 +1300,9 @@
     """
     _type_name = "boolean"
     _con_args = ("regions",)
-    def __init__(self, regions, fields = None, ds = None, **kwargs):
+    def __init__(self, regions, fields = None, ds = None, field_parameters = None, data_source = None):
         # Center is meaningless, but we'll define it all the same.
-        YTSelectionContainer3D.__init__(self, [0.5]*3, fields, ds, **kwargs)
+        YTSelectionContainer3D.__init__(self, [0.5]*3, fields, ds, field_parameters, data_source)
         self.regions = regions
         self._all_regions = []
         self._some_overlap = []

diff -r 1e0faa5cb0fc8a94a358a7d90fd672176db42eeb -r b4349fc571eab65546c041ae955591ca3688aac4 yt/data_objects/selection_data_containers.py
--- a/yt/data_objects/selection_data_containers.py
+++ b/yt/data_objects/selection_data_containers.py
@@ -51,8 +51,11 @@
     ds: Dataset, optional
         An optional dataset to use rather than self.ds
     field_parameters : dictionary
-         A dictionary of field parameters than can be accessed by derived
-         fields.
+        A dictionary of field parameters than can be accessed by derived
+        fields.
+    data_source: optional
+        Draw the selection from the provided data source rather than
+        all data associated with the data_set
 
     Examples
     --------
@@ -64,8 +67,8 @@
     """
     _type_name = "point"
     _con_args = ('p',)
-    def __init__(self, p, ds = None, field_parameters = None):
-        super(YTPointBase, self).__init__(ds, field_parameters)
+    def __init__(self, p, ds=None, field_parameters=None, data_source=None):
+        super(YTPointBase, self).__init__(ds, field_parameters, data_source)
         self.p = p
 
 class YTOrthoRayBase(YTSelectionContainer1D):
@@ -92,6 +95,9 @@
     field_parameters : dictionary
          A dictionary of field parameters than can be accessed by derived
          fields.
+    data_source: optional
+        Draw the selection from the provided data source rather than
+        all data associated with the data_set
 
     Examples
     --------
@@ -104,8 +110,9 @@
     _key_fields = ['x','y','z','dx','dy','dz']
     _type_name = "ortho_ray"
     _con_args = ('axis', 'coords')
-    def __init__(self, axis, coords, ds=None, field_parameters=None):
-        super(YTOrthoRayBase, self).__init__(ds, field_parameters)
+    def __init__(self, axis, coords, ds=None, 
+                 field_parameters=None, data_source=None):
+        super(YTOrthoRayBase, self).__init__(ds, field_parameters, data_source)
         self.axis = axis
         xax = self.ds.coordinates.x_axis[self.axis]
         yax = self.ds.coordinates.y_axis[self.axis]
@@ -144,6 +151,9 @@
     field_parameters : dictionary
          A dictionary of field parameters than can be accessed by derived
          fields.
+    data_source: optional
+        Draw the selection from the provided data source rather than
+        all data associated with the data_set
 
     Examples
     --------
@@ -156,8 +166,9 @@
     _type_name = "ray"
     _con_args = ('start_point', 'end_point')
     _container_fields = ("t", "dts")
-    def __init__(self, start_point, end_point, ds=None, field_parameters=None):
-        super(YTRayBase, self).__init__(ds, field_parameters)
+    def __init__(self, start_point, end_point, ds=None,
+                 field_parameters=None, data_source=None):
+        super(YTRayBase, self).__init__(ds, field_parameters, data_source)
         self.start_point = self.ds.arr(start_point,
                             'code_length', dtype='float64')
         self.end_point = self.ds.arr(end_point,
@@ -204,6 +215,9 @@
     field_parameters : dictionary
          A dictionary of field parameters than can be accessed by derived
          fields.
+    data_source: optional
+        Draw the selection from the provided data source rather than
+        all data associated with the data_set
 
     Examples
     --------
@@ -217,10 +231,10 @@
     _type_name = "slice"
     _con_args = ('axis', 'coord')
     _container_fields = ("px", "py", "pdx", "pdy")
-
     def __init__(self, axis, coord, center=None, ds=None,
-                 field_parameters = None):
-        YTSelectionContainer2D.__init__(self, axis, ds, field_parameters)
+                 field_parameters=None, data_source=None):
+        YTSelectionContainer2D.__init__(self, axis, ds,
+                                        field_parameters, data_source)
         self._set_center(center)
         self.coord = coord
 
@@ -285,6 +299,9 @@
     field_parameters : dictionary
          A dictionary of field parameters than can be accessed by derived
          fields.
+    data_source: optional
+        Draw the selection from the provided data source rather than
+        all data associated with the data_set
 
     Notes
     -----
@@ -308,10 +325,10 @@
     _type_name = "cutting"
     _con_args = ('normal', 'center')
     _container_fields = ("px", "py", "pz", "pdx", "pdy", "pdz")
-
-    def __init__(self, normal, center, north_vector = None, 
-                 ds = None, field_parameters = None):
-        YTSelectionContainer2D.__init__(self, 4, ds, field_parameters)
+    def __init__(self, normal, center, north_vector=None,
+                 ds=None, field_parameters=None, data_source=None):
+        YTSelectionContainer2D.__init__(self, 4, ds,
+                                        field_parameters, data_source)
         self._set_center(center)
         self.set_field_parameter('center',center)
         # Let's set up our plane equation
@@ -465,7 +482,7 @@
 
     Parameters
     ----------
-    center : array_like 
+    center : array_like
         coordinate to which the normal, radius, and height all reference
     normal : array_like
         the normal vector defining the direction of lengthwise part of the 
@@ -482,6 +499,9 @@
     field_parameters : dictionary
          A dictionary of field parameters than can be accessed by derived
          fields.
+    data_source: optional
+        Draw the selection from the provided data source rather than
+        all data associated with the data_set
 
     Examples
     --------
@@ -494,8 +514,9 @@
     _type_name = "disk"
     _con_args = ('center', '_norm_vec', 'radius', 'height')
     def __init__(self, center, normal, radius, height, fields=None,
-                 ds=None, **kwargs):
-        YTSelectionContainer3D.__init__(self, center, fields, ds, **kwargs)
+                 ds=None, field_parameters=None, data_source=None):
+        YTSelectionContainer3D.__init__(self, center, ds,
+                                        field_parameters, data_source)
         self._norm_vec = np.array(normal)/np.sqrt(np.dot(normal,normal))
         self.set_field_parameter("normal", self._norm_vec)
         self.set_field_parameter("center", self.center)
@@ -523,9 +544,10 @@
     """
     _type_name = "region"
     _con_args = ('center', 'left_edge', 'right_edge')
-    def __init__(self, center, left_edge, right_edge, fields = None,
-                 ds = None, **kwargs):
-        YTSelectionContainer3D.__init__(self, center, ds, **kwargs)
+    def __init__(self, center, left_edge, right_edge, fields=None,
+                 ds=None, field_parameters=None, data_source=None):
+        YTSelectionContainer3D.__init__(self, center, ds,
+                                        field_parameters, data_source)
         if not isinstance(left_edge, YTArray):
             self.left_edge = self.ds.arr(left_edge, 'code_length')
         else:
@@ -542,8 +564,10 @@
     """
     _type_name = "data_collection"
     _con_args = ("_obj_list",)
-    def __init__(self, obj_list, ds=None, field_parameters=None, center=None):
-        YTSelectionContainer3D.__init__(self, center, ds, field_parameters)
+    def __init__(self, obj_list, ds=None, field_parameters=None,
+                 data_source=None, center=None):
+        YTSelectionContainer3D.__init__(self, center, ds,
+                                        field_parameters, data_source)
         self._obj_ids = np.array([o.id - o._id_offset for o in obj_list],
                                 dtype="int64")
         self._obj_list = obj_list
@@ -569,8 +593,10 @@
     """
     _type_name = "sphere"
     _con_args = ('center', 'radius')
-    def __init__(self, center, radius, ds = None, field_parameters = None):
-        super(YTSphereBase, self).__init__(center, ds, field_parameters)
+    def __init__(self, center, radius, ds=None,
+                 field_parameters=None, data_source=None):
+        super(YTSphereBase, self).__init__(center, ds,
+                                           field_parameters, data_source)
         # Unpack the radius, if necessary
         radius = fix_length(radius, self.ds)
         if radius < self.index.get_smallest_dx():
@@ -615,8 +641,9 @@
     _type_name = "ellipsoid"
     _con_args = ('center', '_A', '_B', '_C', '_e0', '_tilt')
     def __init__(self, center, A, B, C, e0, tilt, fields=None,
-                 ds=None, field_parameters = None):
-        YTSelectionContainer3D.__init__(self, center, ds, field_parameters)
+                 ds=None, field_parameters=None, data_source=None):
+        YTSelectionContainer3D.__init__(self, center, ds,
+                                        field_parameters, data_source)
         # make sure the magnitudes of semi-major axes are in order
         if A<B or B<C:
             raise YTEllipsoidOrdering(ds, A, B, C)
@@ -625,10 +652,10 @@
         self._B = self.ds.quan(B, 'code_length')
         self._C = self.ds.quan(C, 'code_length')
         if self._C < self.index.get_smallest_dx():
-            raise YTSphereTooSmall(ds, self._C, self.index.get_smallest_dx())
+            raise YTSphereTooSmall(self.ds, self._C, self.index.get_smallest_dx())
         self._e0 = e0 = e0 / (e0**2.0).sum()**0.5
         self._tilt = tilt
-        
+ 
         # find the t1 angle needed to rotate about z axis to align e0 to x
         t1 = np.arctan(e0[1] / e0[0])
         # rotate e0 by -t1
@@ -684,9 +711,10 @@
     """
     _type_name = "cut_region"
     _con_args = ("base_object", "conditionals")
-    def __init__(self, base_object, conditionals, ds = None,
-                 field_parameters = None):
-        super(YTCutRegionBase, self).__init__(base_object.center, ds, field_parameters)
+    def __init__(self, base_object, conditionals, ds=None,
+                 field_parameters=None, data_source=None):
+        super(YTCutRegionBase, self).__init__(base_object.center, ds,
+                                              field_parameters, data_source)
         self.conditionals = ensure_list(conditionals)
         self.base_object = base_object
         self._selector = None
@@ -762,4 +790,3 @@
     @property
     def fwidth(self):
         return self.base_object.fwidth[self._cond_ind,:]
-

diff -r 1e0faa5cb0fc8a94a358a7d90fd672176db42eeb -r b4349fc571eab65546c041ae955591ca3688aac4 yt/data_objects/tests/test_compose.py
--- /dev/null
+++ b/yt/data_objects/tests/test_compose.py
@@ -0,0 +1,146 @@
+from yt.testing import *
+from yt.fields.local_fields import add_field
+from yt.units.yt_array import YTArray, uintersect1d
+
+def setup():
+    from yt.config import ytcfg
+    ytcfg["yt","__withintesting"] = "True"
+
+# Copied from test_boolean for computing a unique identifier for
+# each cell from cell positions
+def _IDFIELD(field, data):
+    width = data.ds.domain_right_edge - data.ds.domain_left_edge
+    min_dx = YTArray(1.0/8192, input_units='code_length',
+                     registry=data.ds.unit_registry)
+    delta = width / min_dx
+    x = data['x'] - min_dx / 2.
+    y = data['y'] - min_dx / 2.
+    z = data['z'] - min_dx / 2.
+    xi = x / min_dx
+    yi = y / min_dx
+    zi = z / min_dx
+    index = xi + delta[0] * (yi + delta[1] * zi)
+    index = index.astype('int64')
+    return index
+
+def test_compose_no_overlap():
+    r"""Test to make sure that composed data objects that don't
+    overlap behave the way we expect (return empty collections)
+    """
+    empty = np.array([])
+    for n in [1, 2, 4, 8]:
+        ds = fake_random_ds(64, nprocs=n)
+        ds.add_field("ID", function=_IDFIELD)
+
+        # position parameters for initial region
+        center = [0.25]*3
+        left_edge = [0.1]*3
+        right_edge = [0.4]*3
+        normal = [1, 0, 0]
+        radius = height = 0.15
+
+        # initial 3D regions
+        sources = [ds.sphere(center, radius),
+                   ds.region(center, left_edge, right_edge),
+                   ds.disk(center, normal, radius, height)]
+
+        # position parameters for non-overlapping regions
+        center = [0.75]*3
+        left_edge = [0.6]*3
+        right_edge = [0.9]*3
+
+        # subselect non-overlapping 0, 1, 2, 3D regions
+        for data1 in sources:
+            data2 = ds.sphere(center, radius, data_source=data1)
+            yield assert_array_equal, data2['ID'], empty
+
+            data2 = ds.region(center, left_edge, right_edge, data_source=data1)
+            yield assert_array_equal, data2['ID'], empty  
+
+            data2 = ds.disk(center, normal, radius, height, data_source=data1)
+            yield assert_array_equal, data2['ID'], empty
+
+            for d in range(3):
+                data2 = ds.slice(d, center[d], data_source=data1)
+                yield assert_array_equal, data2['ID'], empty
+
+            for d in range(3):
+                data2 = ds.ortho_ray(d, center[0:d] + center[d+1:], data_source=data1)
+                yield assert_array_equal, data2['ID'], empty
+
+            data2 = ds.point(center, data_source=data1)
+            yield assert_array_equal, data2['ID'], empty
+
+def test_compose_overlap():
+    r"""Test to make sure that composed data objects that do
+    overlap behave the way we expect 
+    """
+    empty = np.array([])
+    for n in [1, 2, 4, 8]:
+        ds = fake_random_ds(64, nprocs=n)
+        ds.add_field("ID", function=_IDFIELD)
+
+        # position parameters for initial region
+        center = [0.4, 0.5, 0.5]
+        left_edge = [0.1]*3
+        right_edge = [0.7]*3
+        normal = [1, 0, 0]
+        radius = height = 0.15
+
+        # initial 3D regions
+        sources = [ds.sphere(center, radius),
+                   ds.region(center, left_edge, right_edge),
+                   ds.disk(center, normal, radius, height)]
+
+        # position parameters for overlapping regions
+        center = [0.6, 0.5, 0.5]
+        left_edge = [0.3]*3
+        right_edge = [0.9]*3
+
+        # subselect non-overlapping 0, 1, 2, 3D regions
+        for data1 in sources:
+            id1 = data1['ID']
+
+            data2 = ds.sphere(center, radius)
+            data3 = ds.sphere(center, radius, data_source=data1)
+            id2 = data2['ID']
+            id3 = data3['ID']
+            id3.sort()
+            yield assert_array_equal, uintersect1d(id1, id2), id3
+
+            data2 = ds.region(center, left_edge, right_edge)
+            data3 = ds.region(center, left_edge, right_edge, data_source=data1)
+            id2 = data2['ID']
+            id3 = data3['ID']
+            id3.sort()
+            yield assert_array_equal, uintersect1d(id1, id2), id3
+
+            data2 = ds.disk(center, normal, radius, height)
+            data3 = ds.disk(center, normal, radius, height, data_source=data1)
+            id2 = data2['ID']
+            id3 = data3['ID']
+            id3.sort()
+            yield assert_array_equal, uintersect1d(id1, id2), id3
+
+            for d in range(3):
+                data2 = ds.slice(d, center[d])
+                data3 = ds.slice(d, center[d], data_source=data1)
+                id2 = data2['ID']
+                id3 = data3['ID']
+                id3.sort()
+                yield assert_array_equal, uintersect1d(id1, id2), id3
+
+            for d in range(3):
+                data2 = ds.ortho_ray(d, center[0:d] + center[d+1:])
+                data3 = ds.ortho_ray(d, center[0:d] + center[d+1:], data_source=data1)
+                id2 = data2['ID']
+                id3 = data3['ID']
+                id3.sort()
+                yield assert_array_equal, uintersect1d(id1, id2), id3
+
+            data2 = ds.point(center)
+            data3 = ds.point(center, data_source=data1)
+            id2 = data2['ID']
+            id3 = data3['ID']
+            id3.sort()
+            yield assert_array_equal, uintersect1d(id1, id2), id3

diff -r 1e0faa5cb0fc8a94a358a7d90fd672176db42eeb -r b4349fc571eab65546c041ae955591ca3688aac4 yt/geometry/selection_routines.pyx
--- a/yt/geometry/selection_routines.pyx
+++ b/yt/geometry/selection_routines.pyx
@@ -112,7 +112,7 @@
 
 cdef class SelectorObject:
 
-    def __cinit__(self, dobj):
+    def __cinit__(self, dobj, *args):
         self.min_level = getattr(dobj, "min_level", 0)
         self.max_level = getattr(dobj, "max_level", 99)
         self.overlap_cells = 0
@@ -1726,6 +1726,65 @@
 
 always_selector = AlwaysSelector
 
+cdef class ComposeSelector(SelectorObject):
+    cdef SelectorObject selector1
+    cdef SelectorObject selector2
+
+    def __init__(self, dobj, selector1, selector2):
+        self.selector1 = selector1
+        self.selector2 = selector2
+
+    def select_grids(self,
+                     np.ndarray[np.float64_t, ndim=2] left_edges,
+                     np.ndarray[np.float64_t, ndim=2] right_edges,
+                     np.ndarray[np.int32_t, ndim=2] levels):
+        return np.logical_or(
+                    self.selector1.select_grids(left_edges, right_edges, levels),
+                    self.selector2.select_grids(left_edges, right_edges, levels))
+
+    cdef int select_cell(self, np.float64_t pos[3], np.float64_t dds[3]) nogil:
+        if self.selector1.select_cell(pos, dds) and \
+                self.selector2.select_cell(pos, dds):
+            return 1
+        else:
+            return 0
+
+    cdef int select_grid(self, np.float64_t left_edge[3],
+                         np.float64_t right_edge[3], np.int32_t level,
+                         Oct *o = NULL) nogil:
+        if self.selector1.select_grid(left_edge, right_edge, level, o) or \
+                self.selector2.select_grid(left_edge, right_edge, level, o):
+            return 1
+        else:
+            return 0
+        
+    cdef int select_point(self, np.float64_t pos[3]) nogil:
+        if self.selector1.select_point(pos) and \
+                self.selector2.select_point(pos):
+            return 1
+        else:
+            return 0
+
+    cdef int select_sphere(self, np.float64_t pos[3], np.float64_t radius) nogil:
+        if self.selector1.select_sphere(pos, radius) and \
+                self.selector2.select_sphere(pos, radius):
+            return 1
+        else:
+            return 0
+
+    cdef int select_bbox(self, np.float64_t left_edge[3],
+                               np.float64_t right_edge[3]) nogil:
+        if self.selector1.select_bbox(left_edge, right_edge) and \
+                self.selector2.select_bbox(left_edge, right_edge):
+            return 1
+        else:
+            return 0
+
+    def _hash_vals(self):
+        return (hash(self.selector1), hash(self.selector2))
+
+compose_selector = ComposeSelector
+
 cdef class HaloParticlesSelector(SelectorObject):
     cdef public object base_source
     cdef SelectorObject base_selector

Repository URL: https://bitbucket.org/yt_analysis/yt/

--

This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.



More information about the yt-svn mailing list