[yt-svn] commit/yt: 7 new changesets

commits-noreply at bitbucket.org commits-noreply at bitbucket.org
Tue Apr 11 08:36:03 PDT 2017


7 new commits in yt:

https://bitbucket.org/yt_analysis/yt/commits/be98d3bf2e19/
Changeset:   be98d3bf2e19
Branch:      yt
User:        brittonsmith
Date:        2017-03-03 23:55:39+00:00
Summary:     Don't use filename template unless there is more than one file.
Affected #:  1 file

diff -r 3eca2ae80ab14a48b643d3055d7d3c0933fa77ae -r be98d3bf2e1925c2dff98c83e9f239ebe081e973 yt/geometry/particle_geometry_handler.py
--- a/yt/geometry/particle_geometry_handler.py
+++ b/yt/geometry/particle_geometry_handler.py
@@ -74,8 +74,15 @@
         template = self.dataset.filename_template
         ndoms = self.dataset.file_count
         cls = self.dataset._file_class
-        self.data_files = [cls(self.dataset, self.io, template % {'num':i}, i)
-                           for i in range(ndoms)]
+        if ndoms > 1:
+            self.data_files = \
+              [cls(self.dataset, self.io, template % {'num':i}, i)
+               for i in range(ndoms)]
+        else:
+            self.data_files = \
+              [cls(self.dataset, self.io,
+                   self.dataset.parameter_filename, 0)]
+
         index_ptype = self.index_ptype
         if index_ptype == "all":
             self.total_particles = sum(


https://bitbucket.org/yt_analysis/yt/commits/4984b4655b75/
Changeset:   4984b4655b75
Branch:      yt
User:        brittonsmith
Date:        2017-03-04 00:09:06+00:00
Summary:     Check that field exists before adding to default quantities.
Affected #:  1 file

diff -r be98d3bf2e1925c2dff98c83e9f239ebe081e973 -r 4984b4655b75bdba2bcbad64de950358176986ba yt/analysis_modules/halo_analysis/halo_catalog.py
--- a/yt/analysis_modules/halo_analysis/halo_catalog.py
+++ b/yt/analysis_modules/halo_analysis/halo_catalog.py
@@ -478,10 +478,12 @@
                         field_types=ftypes, extra_attrs=extra_attrs)
 
     def add_default_quantities(self, field_type='halos'):
-        self.add_quantity("particle_identifier", field_type=field_type,prepend=True)
-        self.add_quantity("particle_mass", field_type=field_type,prepend=True)
-        self.add_quantity("particle_position_x", field_type=field_type,prepend=True)
-        self.add_quantity("particle_position_y", field_type=field_type,prepend=True)
-        self.add_quantity("particle_position_z", field_type=field_type,prepend=True)
-        self.add_quantity("virial_radius", field_type=field_type,prepend=True)
-
+        for field in ["particle_identifier", "particle_mass",
+                      "particle_position_x", "particle_position_y",
+                      "particle_position_z", "virial_radius"]:
+            field_name = (field_type, field)
+            if field_name not in self.halos_ds.field_list:
+                mylog.warn("Halo dataset %s has no field %s." %
+                           (self.halos_ds, str(field_name)))
+                continue
+            self.add_quantity(field, field_type=field_type, prepend=True)


https://bitbucket.org/yt_analysis/yt/commits/8359b88e684e/
Changeset:   8359b88e684e
Branch:      yt
User:        brittonsmith
Date:        2017-03-17 18:24:17+00:00
Summary:     Moving filename setup into a class method so I can subclass it.
Affected #:  2 files

diff -r 4984b4655b75bdba2bcbad64de950358176986ba -r 8359b88e684ee3ba5faa32e32632ead3b07644a5 yt/frontends/halo_catalog/data_structures.py
--- a/yt/frontends/halo_catalog/data_structures.py
+++ b/yt/frontends/halo_catalog/data_structures.py
@@ -30,6 +30,20 @@
 from yt.data_objects.static_output import \
     ParticleFile
 
+class HaloCatalogParticleIndex(ParticleIndex):
+    def _setup_filenames(self):
+        template = self.dataset.filename_template
+        ndoms = self.dataset.file_count
+        cls = self.dataset._file_class
+        if ndoms > 1:
+            self.data_files = \
+              [cls(self.dataset, self.io, template % {'num':i}, i)
+               for i in range(ndoms)]
+        else:
+            self.data_files = \
+              [cls(self.dataset, self.io,
+                   self.dataset.parameter_filename, 0)]
+
 class HaloCatalogHDF5File(ParticleFile):
     def __init__(self, ds, io, filename, file_id):
         with h5py.File(filename, "r") as f:

diff -r 4984b4655b75bdba2bcbad64de950358176986ba -r 8359b88e684ee3ba5faa32e32632ead3b07644a5 yt/geometry/particle_geometry_handler.py
--- a/yt/geometry/particle_geometry_handler.py
+++ b/yt/geometry/particle_geometry_handler.py
@@ -69,19 +69,17 @@
     def convert(self, unit):
         return self.dataset.conversion_factors[unit]
 
-    def _initialize_particle_handler(self):
-        self._setup_data_io()
+    def _setup_filenames(self):
         template = self.dataset.filename_template
         ndoms = self.dataset.file_count
         cls = self.dataset._file_class
-        if ndoms > 1:
-            self.data_files = \
-              [cls(self.dataset, self.io, template % {'num':i}, i)
-               for i in range(ndoms)]
-        else:
-            self.data_files = \
-              [cls(self.dataset, self.io,
-                   self.dataset.parameter_filename, 0)]
+        self.data_files = \
+          [cls(self.dataset, self.io, template % {'num':i}, i)
+           for i in range(ndoms)]
+
+    def _initialize_particle_handler(self):
+        self._setup_data_io()
+        self._setup_filenames()
 
         index_ptype = self.index_ptype
         if index_ptype == "all":


https://bitbucket.org/yt_analysis/yt/commits/c4e8a5ae1ca2/
Changeset:   c4e8a5ae1ca2
Branch:      yt
User:        brittonsmith
Date:        2017-03-13 19:40:26+00:00
Summary:     Merging.
Affected #:  2 files

diff -r f3015d6048eb7cd636f07151284643db38cec6fc -r c4e8a5ae1ca2f3ec860ae1ec13eef70a535aca42 yt/analysis_modules/halo_analysis/halo_catalog.py
--- a/yt/analysis_modules/halo_analysis/halo_catalog.py
+++ b/yt/analysis_modules/halo_analysis/halo_catalog.py
@@ -478,10 +478,12 @@
                         field_types=ftypes, extra_attrs=extra_attrs)
 
     def add_default_quantities(self, field_type='halos'):
-        self.add_quantity("particle_identifier", field_type=field_type,prepend=True)
-        self.add_quantity("particle_mass", field_type=field_type,prepend=True)
-        self.add_quantity("particle_position_x", field_type=field_type,prepend=True)
-        self.add_quantity("particle_position_y", field_type=field_type,prepend=True)
-        self.add_quantity("particle_position_z", field_type=field_type,prepend=True)
-        self.add_quantity("virial_radius", field_type=field_type,prepend=True)
-
+        for field in ["particle_identifier", "particle_mass",
+                      "particle_position_x", "particle_position_y",
+                      "particle_position_z", "virial_radius"]:
+            field_name = (field_type, field)
+            if field_name not in self.halos_ds.field_list:
+                mylog.warn("Halo dataset %s has no field %s." %
+                           (self.halos_ds, str(field_name)))
+                continue
+            self.add_quantity(field, field_type=field_type, prepend=True)

diff -r f3015d6048eb7cd636f07151284643db38cec6fc -r c4e8a5ae1ca2f3ec860ae1ec13eef70a535aca42 yt/geometry/particle_geometry_handler.py
--- a/yt/geometry/particle_geometry_handler.py
+++ b/yt/geometry/particle_geometry_handler.py
@@ -74,8 +74,15 @@
         template = self.dataset.filename_template
         ndoms = self.dataset.file_count
         cls = self.dataset._file_class
-        self.data_files = [cls(self.dataset, self.io, template % {'num':i}, i)
-                           for i in range(ndoms)]
+        if ndoms > 1:
+            self.data_files = \
+              [cls(self.dataset, self.io, template % {'num':i}, i)
+               for i in range(ndoms)]
+        else:
+            self.data_files = \
+              [cls(self.dataset, self.io,
+                   self.dataset.parameter_filename, 0)]
+
         index_ptype = self.index_ptype
         if index_ptype == "all":
             self.total_particles = sum(


https://bitbucket.org/yt_analysis/yt/commits/5f4fc6ce7089/
Changeset:   5f4fc6ce7089
Branch:      yt
User:        brittonsmith
Date:        2017-03-17 18:26:11+00:00
Summary:     Merging.
Affected #:  40 files

diff -r 8359b88e684ee3ba5faa32e32632ead3b07644a5 -r 5f4fc6ce70895d0ee68726c091c194d5a1dc2ecc doc/Makefile
--- a/doc/Makefile
+++ b/doc/Makefile
@@ -47,6 +47,9 @@
 	-rm -rf _temp/*.done source/cookbook/_static/*
 
 html:
+	sphinx-apidoc -o source/reference/api/ -e ../yt ../yt/extern/* \
+		$(shell find ../yt -name "*tests*" -type d) ../yt/utilities/voropp*
+	sed -e '/show-inheritance/ a\ \ \ \ :inherited-members:' -i source/reference/api/yt*.rst
 	$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
 	@echo
 	@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."

diff -r 8359b88e684ee3ba5faa32e32632ead3b07644a5 -r 5f4fc6ce70895d0ee68726c091c194d5a1dc2ecc doc/extensions/pythonscript_sphinxext.py
--- a/doc/extensions/pythonscript_sphinxext.py
+++ b/doc/extensions/pythonscript_sphinxext.py
@@ -1,4 +1,5 @@
 import tempfile
+import time
 import os
 import glob
 import shutil
@@ -37,12 +38,16 @@
             f.write(content)
 
         # Use sphinx logger?
+        uid = uuid.uuid4().hex[:8]
         print("")
+        print(">> Contents of the script: %s" % uid)
         print(content)
         print("")
 
+        start = time.time()
         subprocess.call(['python', 'temp.py'])
-
+        print(">> The execution of the script %s took %f s" %
+              (uid, time.time() - start))
         text = ''
         for im in sorted(glob.glob("*.png")):
             text += get_image_tag(im, image_dir, image_rel_dir)

diff -r 8359b88e684ee3ba5faa32e32632ead3b07644a5 -r 5f4fc6ce70895d0ee68726c091c194d5a1dc2ecc doc/source/analyzing/Particle_Trajectories.ipynb
--- a/doc/source/analyzing/Particle_Trajectories.ipynb
+++ b/doc/source/analyzing/Particle_Trajectories.ipynb
@@ -279,9 +279,9 @@
    "source": [
     "fig = plt.figure(figsize=(8.0, 8.0))\n",
     "ax = fig.add_subplot(111, projection='3d')\n",
-    "ax.plot(trajs[\"particle_position_x\"][100], trajs[\"particle_position_z\"][100], trajs[\"particle_position_z\"][100])\n",
-    "ax.plot(trajs[\"particle_position_x\"][8], trajs[\"particle_position_z\"][8], trajs[\"particle_position_z\"][8])\n",
-    "ax.plot(trajs[\"particle_position_x\"][25], trajs[\"particle_position_z\"][25], trajs[\"particle_position_z\"][25])"
+    "ax.plot(trajs[\"particle_position_x\"][100], trajs[\"particle_position_y\"][100], trajs[\"particle_position_z\"][100])\n",
+    "ax.plot(trajs[\"particle_position_x\"][8], trajs[\"particle_position_y\"][8], trajs[\"particle_position_z\"][8])\n",
+    "ax.plot(trajs[\"particle_position_x\"][25], trajs[\"particle_position_y\"][25], trajs[\"particle_position_z\"][25])"
    ]
   },
   {

diff -r 8359b88e684ee3ba5faa32e32632ead3b07644a5 -r 5f4fc6ce70895d0ee68726c091c194d5a1dc2ecc doc/source/analyzing/analysis_modules/halo_catalogs.rst
--- a/doc/source/analyzing/analysis_modules/halo_catalogs.rst
+++ b/doc/source/analyzing/analysis_modules/halo_catalogs.rst
@@ -481,7 +481,9 @@
 
 A :class:`~yt.analysis_modules.halo_analysis.halo_catalog.HaloCatalog`
 saved to disk can be reloaded as a yt dataset with the
-standard call to ``yt.load``. Any side data, such as profiles, can be reloaded
+standard call to ``yt.load``.  See :ref:`halocatalog` for a demonstration
+of loading and working only with the catalog.
+Any side data, such as profiles, can be reloaded
 with a ``load_profiles`` callback and a call to
 :func:`~yt.analysis_modules.halo_analysis.halo_catalog.HaloCatalog.load`.
 

diff -r 8359b88e684ee3ba5faa32e32632ead3b07644a5 -r 5f4fc6ce70895d0ee68726c091c194d5a1dc2ecc doc/source/analyzing/analysis_modules/xray_emission_fields.rst
--- a/doc/source/analyzing/analysis_modules/xray_emission_fields.rst
+++ b/doc/source/analyzing/analysis_modules/xray_emission_fields.rst
@@ -1,3 +1,6 @@
 .. _xray_emission_fields:
 
+X-ray Emission Fields
+=====================
+
 .. notebook:: XrayEmissionFields.ipynb

diff -r 8359b88e684ee3ba5faa32e32632ead3b07644a5 -r 5f4fc6ce70895d0ee68726c091c194d5a1dc2ecc doc/source/examining/loading_data.rst
--- a/doc/source/examining/loading_data.rst
+++ b/doc/source/examining/loading_data.rst
@@ -1458,7 +1458,8 @@
 
 If you have access to both the halo catalog and the simulation snapshot from
 the same redshift, additional analysis can be performed for each halo using
-:ref:`halo_catalog`.
+:ref:`halo_catalog`.  The resulting product can be reloaded in a similar manner
+to the other halo catalogs shown here.
 
 .. _rockstar:
 
@@ -1600,6 +1601,39 @@
    # The halo mass
    print(ad["FOF", "particle_mass"])
 
+.. _halocatalog:
+
+HaloCatalog
+^^^^^^^^^^^
+
+These are catalogs produced by the analysis discussed in :ref:`halo_catalog`.
+In the case where multiple files were produced, one need only provide the path
+to a single one of them.  The field type for all fields is "halos".  The fields
+available here are similar to other catalogs.  Any addition
+:ref:`halo_catalog_quantities` will also be accessible as fields.
+
++-------------------+---------------------------+
+| HaloCatalog field | yt field name             |
++===================+===========================+
+| halo id           | particle_identifier       |
++-------------------+---------------------------+
+| virial mass       | particle_mass             |
++-------------------+---------------------------+
+| virial radius     | virial_radius             |
++-------------------+---------------------------+
+| halo position     | particle_position_(x,y,z) |
++-------------------+---------------------------+
+| halo velocity     | particle_velocity_(x,y,z) |
++-------------------+---------------------------+
+
+.. code-block:: python
+
+   import yt
+   ds = yt.load("catalogs/catalog.0.h5")
+   ad = ds.all_data()
+   # The halo mass
+   print(ad["halos", "particle_mass"])
+
 .. _loading-openpmd-data:
 
 openPMD Data

diff -r 8359b88e684ee3ba5faa32e32632ead3b07644a5 -r 5f4fc6ce70895d0ee68726c091c194d5a1dc2ecc doc/source/reference/api/api.rst
--- a/doc/source/reference/api/api.rst
+++ b/doc/source/reference/api/api.rst
@@ -10,7 +10,6 @@
 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 .. autosummary::
-   :toctree: generated/
 
    ~yt.visualization.plot_window.SlicePlot
    ~yt.visualization.plot_window.AxisAlignedSlicePlot
@@ -24,7 +23,6 @@
 ^^^^^^^^^^^^^^^^^^^^^^^^^
 
 .. autosummary::
-   :toctree: generated/
 
    ~yt.visualization.profile_plotter.ProfilePlot
    ~yt.visualization.profile_plotter.PhasePlot
@@ -34,7 +32,6 @@
 ^^^^^^^^^^^^^^
 
 .. autosummary::
-   :toctree: generated/
 
    ~yt.visualization.particle_plots.ParticleProjectionPlot
    ~yt.visualization.particle_plots.ParticlePhasePlot
@@ -44,7 +41,6 @@
 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 .. autosummary::
-   :toctree: generated/
 
    ~yt.visualization.fixed_resolution.FixedResolutionBuffer
    ~yt.visualization.fixed_resolution.ParticleImageBuffer
@@ -69,7 +65,6 @@
 These will almost never need to be instantiated on their own.
 
 .. autosummary::
-   :toctree: generated/
 
    ~yt.data_objects.data_containers.YTDataContainer
    ~yt.data_objects.data_containers.YTSelectionContainer
@@ -85,7 +80,6 @@
 geometric.
 
 .. autosummary::
-   :toctree: generated/
 
    ~yt.data_objects.selection_data_containers.YTPoint
    ~yt.data_objects.selection_data_containers.YTOrthoRay
@@ -108,7 +102,6 @@
 expensive set of intermediate data.
 
 .. autosummary::
-   :toctree: generated/
 
    ~yt.data_objects.construction_data_containers.YTStreamline
    ~yt.data_objects.construction_data_containers.YTQuadTreeProj
@@ -124,7 +117,6 @@
 datasets.
 
 .. autosummary::
-   :toctree: generated/
 
    ~yt.data_objects.time_series.DatasetSeries
    ~yt.data_objects.time_series.DatasetSeriesObject
@@ -138,7 +130,6 @@
 These objects generate an "index" into multiresolution data.
 
 .. autosummary::
-   :toctree: generated/
 
    ~yt.geometry.geometry_handler.Index
    ~yt.geometry.grid_geometry_handler.GridIndex
@@ -152,7 +143,6 @@
 These classes and functions enable yt's symbolic unit handling system.
 
 .. autosummary::
-   :toctree: generated/
 
    yt.data_objects.static_output.Dataset.arr
    yt.data_objects.static_output.Dataset.quan
@@ -173,13 +163,11 @@
 ---------
 
 .. autosummary::
-   :toctree: generated/
 
 ARTIO
 ^^^^^
 
 .. autosummary::
-   :toctree: generated/
 
    ~yt.frontends.artio.data_structures.ARTIOIndex
    ~yt.frontends.artio.data_structures.ARTIOOctreeSubset
@@ -194,7 +182,6 @@
 ^^^^^^
 
 .. autosummary::
-   :toctree: generated/
 
    ~yt.frontends.athena.data_structures.AthenaGrid
    ~yt.frontends.athena.data_structures.AthenaHierarchy
@@ -206,7 +193,6 @@
 ^^^^^^
 
 .. autosummary::
-   :toctree: generated/
 
    ~yt.frontends.boxlib.data_structures.BoxlibGrid
    ~yt.frontends.boxlib.data_structures.BoxlibHierarchy
@@ -225,7 +211,6 @@
 ^^^^^^
 
 .. autosummary::
-   :toctree: generated/
 
    ~yt.frontends.chombo.data_structures.ChomboGrid
    ~yt.frontends.chombo.data_structures.ChomboHierarchy
@@ -239,7 +224,6 @@
 ^^^^
 
 .. autosummary::
-   :toctree: generated/
 
    ~yt.frontends.enzo.answer_testing_support.ShockTubeTest
    ~yt.frontends.enzo.data_structures.EnzoGrid
@@ -264,7 +248,6 @@
 ^^^^
 
 .. autosummary::
-   :toctree: generated/
 
    ~yt.frontends.fits.data_structures.FITSGrid
    ~yt.frontends.fits.data_structures.FITSHierarchy
@@ -276,7 +259,6 @@
 ^^^^^
 
 .. autosummary::
-   :toctree: generated/
 
    ~yt.frontends.flash.data_structures.FLASHGrid
    ~yt.frontends.flash.data_structures.FLASHHierarchy
@@ -288,7 +270,6 @@
 ^^^
 
 .. autosummary::
-   :toctree: generated/
 
    ~yt.frontends.gdf.data_structures.GDFGrid
    ~yt.frontends.gdf.data_structures.GDFHierarchy
@@ -299,7 +280,6 @@
 ^^^^^^^^^^^^^
 
 .. autosummary::
-   :toctree: generated/
 
    ~yt.frontends.halo_catalog.data_structures.HaloCatalogHDF5File
    ~yt.frontends.halo_catalog.data_structures.HaloCatalogDataset
@@ -319,7 +299,6 @@
 ^^^^
 
 .. autosummary::
-   :toctree: generated/
 
    ~yt.frontends.moab.data_structures.MoabHex8Hierarchy
    ~yt.frontends.moab.data_structures.MoabHex8Mesh
@@ -334,7 +313,6 @@
 ^^^^^^^
 
 .. autosummary::
-   :toctree: generated/
 
    ~yt.frontends.open_pmd.data_structures.OpenPMDGrid
    ~yt.frontends.open_pmd.data_structures.OpenPMDHierarchy
@@ -349,7 +327,6 @@
 ^^^^^^
 
 .. autosummary::
-   :toctree: generated/
 
    ~yt.frontends.ramses.data_structures.RAMSESDomainFile
    ~yt.frontends.ramses.data_structures.RAMSESDomainSubset
@@ -362,7 +339,6 @@
 ^^^^^^^^^^^^^^^^^^^^^^
 
 .. autosummary::
-   :toctree: generated/
 
    ~yt.frontends.gadget.data_structures.GadgetBinaryFile
    ~yt.frontends.gadget.data_structures.GadgetHDF5Dataset
@@ -384,7 +360,6 @@
 ^^^^^^
 
 .. autosummary::
-   :toctree: generated/
 
    ~yt.frontends.stream.data_structures.StreamDictFieldHandler
    ~yt.frontends.stream.data_structures.StreamGrid
@@ -410,7 +385,6 @@
 ^^^^^^
 
 .. autosummary::
-   :toctree: generated/
 
    ~yt.frontends.ytdata.data_structures.YTDataContainerDataset
    ~yt.frontends.ytdata.data_structures.YTSpatialPlotDataset
@@ -434,7 +408,6 @@
 ------------
 
 .. autosummary::
-   :toctree: generated/
 
    ~yt.convenience.load
    ~yt.convenience.simulation
@@ -457,7 +430,6 @@
 
 
 .. autosummary::
-   :toctree: generated/
 
    ~yt.data_objects.profiles.ProfileND
    ~yt.data_objects.profiles.Profile1D
@@ -475,7 +447,6 @@
 of topologically disconnected structures, i.e., clump finding.
 
 .. autosummary::
-   :toctree: generated/
 
    ~yt.analysis_modules.level_sets.clump_handling.Clump
    ~yt.analysis_modules.level_sets.clump_handling.Clump.add_info_item
@@ -495,7 +466,6 @@
 on cosmological halos.  It is also the primary interface for halo finding.
 
 .. autosummary::
-   :toctree: generated/
 
    ~yt.analysis_modules.halo_analysis.halo_catalog.HaloCatalog
    ~yt.analysis_modules.halo_analysis.halo_finding_methods.HaloFindingMethod
@@ -526,7 +496,6 @@
 to use the ``HaloCatalog``.
 
 .. autosummary::
-   :toctree: generated/
 
    ~yt.analysis_modules.halo_finding.halo_objects.FOFHaloFinder
    ~yt.analysis_modules.halo_finding.halo_objects.HOPHaloFinder
@@ -541,7 +510,6 @@
 
 
 .. autosummary::
-   :toctree: generated/
 
    ~yt.analysis_modules.two_point_functions.two_point_functions.TwoPointFunctions
    ~yt.analysis_modules.two_point_functions.two_point_functions.FcnSet
@@ -550,7 +518,6 @@
 -----------
 
 .. autosummary::
-   :toctree: generated/
 
    ~yt.fields.field_info_container.FieldInfoContainer
    ~yt.fields.derived_field.DerivedField
@@ -564,7 +531,6 @@
 ---------------
 
 .. autosummary::
-   :toctree: generated/
 
    ~yt.fields.field_info_container.FieldInfoContainer.add_field
    ~yt.data_objects.static_output.Dataset.add_field
@@ -574,7 +540,6 @@
 ----------------
 
 .. autosummary::
-   :toctree: generated/
 
    ~yt.data_objects.particle_filters.add_particle_filter
    ~yt.data_objects.particle_filters.particle_filter
@@ -587,7 +552,6 @@
 writing to bitmaps.
 
 .. autosummary::
-   :toctree: generated/
 
    ~yt.data_objects.image_array.ImageArray
 
@@ -601,7 +565,6 @@
 
 
 .. autosummary::
-   :toctree: generated/
 
    ~yt.analysis_modules.star_analysis.sfr_spectrum.StarFormationRate
    ~yt.analysis_modules.star_analysis.sfr_spectrum.SpectrumBuilder
@@ -611,7 +574,6 @@
 
 
 .. autosummary::
-   :toctree: generated/
 
    ~yt.analysis_modules.cosmological_observation.light_cone.light_cone.LightCone
    ~yt.analysis_modules.cosmological_observation.light_ray.light_ray.LightRay
@@ -619,7 +581,6 @@
 Absorption and X-ray spectra and spectral lines:
 
 .. autosummary::
-   :toctree: generated/
 
    ~yt.analysis_modules.absorption_spectrum.absorption_spectrum.AbsorptionSpectrum
    ~yt.fields.xray_emission_fields.XrayEmissivityIntegrator
@@ -628,14 +589,12 @@
 Absorption spectra fitting:
 
 .. autosummary::
-   :toctree: generated/
 
    ~yt.analysis_modules.absorption_spectrum.absorption_spectrum_fit.generate_total_fit
 
 Sunrise exporting:
 
 .. autosummary::
-   :toctree: generated/
 
    ~yt.analysis_modules.sunrise_export.sunrise_exporter.export_to_sunrise
    ~yt.analysis_modules.sunrise_export.sunrise_exporter.export_to_sunrise_from_halolist
@@ -643,7 +602,6 @@
 RADMC-3D exporting:
 
 .. autosummary::
-   :toctree: generated/
 
    ~yt.analysis_modules.radmc3d_export.RadMC3DInterface.RadMC3DLayer
    ~yt.analysis_modules.radmc3d_export.RadMC3DInterface.RadMC3DWriter
@@ -657,7 +615,6 @@
 Scene infrastructure:
 
 .. autosummary::
-   :toctree: generated/
 
    ~yt.visualization.volume_rendering.volume_rendering.volume_render
    ~yt.visualization.volume_rendering.volume_rendering.create_scene
@@ -669,7 +626,6 @@
 The different kinds of sources:
 
 .. autosummary::
-   :toctree: generated/
 
    ~yt.visualization.volume_rendering.render_source.RenderSource
    ~yt.visualization.volume_rendering.render_source.VolumeSource
@@ -683,7 +639,6 @@
 The different kinds of transfer functions:
 
 .. autosummary::
-   :toctree: generated/
 
    ~yt.visualization.volume_rendering.transfer_functions.TransferFunction
    ~yt.visualization.volume_rendering.transfer_functions.ColorTransferFunction
@@ -695,7 +650,6 @@
 The different kinds of lenses:
 
 .. autosummary::
-   :toctree: generated/
 
    ~yt.visualization.volume_rendering.lens.Lens
    ~yt.visualization.volume_rendering.lens.PlaneParallelLens
@@ -712,7 +666,6 @@
 
 
 .. autosummary::
-   :toctree: generated/
 
    ~yt.visualization.streamlines.Streamlines
 
@@ -725,7 +678,6 @@
 
 
 .. autosummary::
-   :toctree: generated/
 
    ~yt.visualization.image_writer.multi_image_composite
    ~yt.visualization.image_writer.write_bitmap
@@ -740,7 +692,6 @@
 particularly with complicated layouts.
 
 .. autosummary::
-   :toctree: generated/
 
    ~yt.visualization.eps_writer.DualEPS
    ~yt.visualization.eps_writer.single_plot
@@ -757,7 +708,6 @@
 
 
 .. autosummary::
-   :toctree: generated/
 
    ~yt.data_objects.derived_quantities.DerivedQuantity
    ~yt.data_objects.derived_quantities.DerivedQuantityCollection
@@ -783,7 +733,6 @@
 See also :ref:`callbacks`.
 
 .. autosummary::
-   :toctree: generated/
 
    ~yt.visualization.plot_window.PWViewerMPL.annotate_clear
    ~yt.visualization.plot_modifications.ArrowCallback
@@ -817,7 +766,6 @@
 See also :ref:`colormaps`.
 
 .. autosummary::
-   :toctree: generated/
 
    ~yt.visualization.color_maps.add_cmap
    ~yt.visualization.color_maps.make_colormap
@@ -828,7 +776,6 @@
 
 
 .. autosummary::
-   :toctree: generated/
 
    ~yt.convenience.load
    ~yt.frontends.ytdata.utilities.save_as_dataset
@@ -864,7 +811,6 @@
 
 
 .. autosummary::
-   :toctree: generated/
 
    ~yt.utilities.math_utils.periodic_position
    ~yt.utilities.math_utils.periodic_dist
@@ -899,7 +845,6 @@
 
 
 .. autosummary::
-   :toctree: generated/
 
    ~yt.config.YTConfigParser
    ~yt.utilities.parameter_file_storage.ParameterFileStore
@@ -913,7 +858,6 @@
 --------------------
 
 .. autosummary::
-   :toctree: generated/
 
    ~yt.utilities.cosmology.Cosmology
    ~yt.utilities.cosmology.Cosmology.hubble_distance
@@ -937,7 +881,6 @@
 The first set of functions are all provided by NumPy.
 
 .. autosummary::
-   :toctree: generated/
 
    ~yt.testing.assert_array_equal
    ~yt.testing.assert_almost_equal
@@ -953,7 +896,6 @@
 These are yt-provided functions:
 
 .. autosummary::
-   :toctree: generated/
 
    ~yt.testing.assert_rel_equal
    ~yt.testing.amrspace

diff -r 8359b88e684ee3ba5faa32e32632ead3b07644a5 -r 5f4fc6ce70895d0ee68726c091c194d5a1dc2ecc yt/analysis_modules/halo_analysis/halo_catalog.py
--- a/yt/analysis_modules/halo_analysis/halo_catalog.py
+++ b/yt/analysis_modules/halo_analysis/halo_catalog.py
@@ -72,11 +72,11 @@
     --------
 
     >>> # create profiles or overdensity vs. radius for each halo and save to disk
-    >>> from yt.mods import *
+    >>> import yt
     >>> from yt.analysis_modules.halo_analysis.api import *
-    >>> data_ds = load("DD0064/DD0064")
-    >>> halos_ds = load("rockstar_halos/halos_64.0.bin",
-    ...                 output_dir="halo_catalogs/catalog_0064")
+    >>> data_ds = yt.load("DD0064/DD0064")
+    >>> halos_ds = yt.load("rockstar_halos/halos_64.0.bin",
+    ...                    output_dir="halo_catalogs/catalog_0064")
     >>> hc = HaloCatalog(data_ds=data_ds, halos_ds=halos_ds)
     >>> # filter out halos with mass < 1e13 Msun
     >>> hc.add_filter("quantity_value", "particle_mass", ">", 1e13, "Msun")
@@ -91,7 +91,7 @@
     >>> hc.create()
 
     >>> # load in the saved halo catalog and all the profile data
-    >>> halos_ds = load("halo_catalogs/catalog_0064/catalog_0064.0.h5")
+    >>> halos_ds = yt.load("halo_catalogs/catalog_0064/catalog_0064.0.h5")
     >>> hc = HaloCatalog(halos_ds=halos_ds,
                          output_dir="halo_catalogs/catalog_0064")
     >>> hc.add_callback("load_profiles", output_dir="profiles")

diff -r 8359b88e684ee3ba5faa32e32632ead3b07644a5 -r 5f4fc6ce70895d0ee68726c091c194d5a1dc2ecc yt/analysis_modules/halo_finding/halo_objects.py
--- a/yt/analysis_modules/halo_finding/halo_objects.py
+++ b/yt/analysis_modules/halo_finding/halo_objects.py
@@ -708,12 +708,12 @@
         -------
         tuple : (cm, mag_A, mag_B, mag_C, e0_vector, tilt)
             The 6-tuple has in order:
-              #. The center of mass as an array.
-              #. mag_A as a float.
-              #. mag_B as a float.
-              #. mag_C as a float.
-              #. e0_vector as an array.
-              #. tilt as a float.
+            #. The center of mass as an array.
+            #. mag_A as a float.
+            #. mag_B as a float.
+            #. mag_C as a float.
+            #. e0_vector as an array.
+            #. tilt as a float.
 
         Examples
         --------

diff -r 8359b88e684ee3ba5faa32e32632ead3b07644a5 -r 5f4fc6ce70895d0ee68726c091c194d5a1dc2ecc yt/analysis_modules/halo_mass_function/halo_mass_function.py
--- a/yt/analysis_modules/halo_mass_function/halo_mass_function.py
+++ b/yt/analysis_modules/halo_mass_function/halo_mass_function.py
@@ -327,22 +327,22 @@
 
     def sigmaM(self):
         """
-         Written by BWO, 2006 (updated 25 January 2007).
-         Converted to Python by Stephen Skory December 2009.
+        Written by BWO, 2006 (updated 25 January 2007).
+        Converted to Python by Stephen Skory December 2009.
 
-         This routine takes in cosmological parameters and creates a file (array) with
-         sigma(M) in it, which is necessary for various press-schechter type
-         stuff.  In principle one can calculate it ahead of time, but it's far,
-         far faster in the long run to calculate your sigma(M) ahead of time.
+        This routine takes in cosmological parameters and creates a file (array) with
+        sigma(M) in it, which is necessary for various press-schechter type
+        stuff.  In principle one can calculate it ahead of time, but it's far,
+        far faster in the long run to calculate your sigma(M) ahead of time.
         
-         Inputs: cosmology, user must set parameters
+        Inputs: cosmology, user must set parameters
         
-         Outputs: four columns of data containing the following information:
+        Outputs: four columns of data containing the following information:
 
-         1) mass (Msolar/h)
-         2) sigma (normalized) using Msun/h as the input
-         
-         The arrays output are used later.
+        1) mass (Msolar/h)
+        2) sigma (normalized) using Msun/h as the input
+        
+        The arrays output are used later.
         """
         
         # Set up the transfer function object.
@@ -446,13 +446,12 @@
 
     def sigma_squared_of_R(self, R):
         """
-        /* calculates sigma^2(R).  This is the routine where the magic happens (or
-           whatever it is that we do here).  Integrates the sigma_squared_integrand
-           parameter from R to infinity.  Calls GSL (gnu scientific library) to do
-           the actual integration.  
+        calculates sigma^2(R).  This is the routine where the magic happens (or
+        whatever it is that we do here).  Integrates the sigma_squared_integrand
+        parameter from R to infinity.  Calls GSL (gnu scientific library) to do
+        the actual integration.  
         
-           Note that R is in h^-1 Mpc (comoving)
-        */
+        Note that R is in h^-1 Mpc (comoving)
         """
         self.R = R
         result = integrate_inf(self.sigma_squared_integrand)
@@ -463,7 +462,7 @@
 
     def sigma_squared_integrand(self, k):
         """
-        /* integrand for integral to get sigma^2(R). */
+        integrand for integral to get sigma^2(R).
         """
 
         Rcom = self.R;  # this is R in comoving Mpc/h
@@ -474,7 +473,7 @@
 
     def PofK(self, k):
         """
-        /* returns power spectrum as a function of wavenumber k */
+        returns power spectrum as a function of wavenumber k
         """
 
         thisPofK = np.power(k, self.primordial_index) * np.power( self.TofK(k), 2.0);
@@ -483,7 +482,7 @@
 
     def TofK(self, k):
         """
-        /* returns transfer function as a function of wavenumber k. */
+        returns transfer function as a function of wavenumber k.
         """
         
         thisTofK = self.TF.TFmdm_onek_hmpc(k);
@@ -503,9 +502,9 @@
 
     def multiplicityfunction(self, sigma):
         """
-        /* Multiplicity function - this is where the various fitting functions/analytic 
+        Multiplicity function - this is where the various fitting functions/analytic 
         theories are different.  The various places where I found these fitting functions
-        are listed below.  */
+        are listed below.
         """
         
         nu = self.delta_c0 / sigma;
@@ -552,7 +551,7 @@
 
     def sigmaof_M_z(self, sigmabin, redshift):
         """
-        /* sigma(M, z) */
+        sigma(M, z)
         """
         
         thissigma = self.Dofz(redshift) * self.sigmaarray[sigmabin];
@@ -561,7 +560,7 @@
 
     def Dofz(self, redshift):
         """
-        /* Growth function */
+        Growth function
         """
 
         thisDofz = self.gofz(redshift) / self.gofz(0.0) / (1.0+redshift);
@@ -571,7 +570,7 @@
 
     def gofz(self, redshift):
         """
-        /* g(z) - I don't think this has any other name*/
+        g(z) - I don't think this has any other name
         """
 
         thisgofz = 2.5 * self.omega_matter_of_z(redshift) / \
@@ -585,7 +584,7 @@
 
     def omega_matter_of_z(self,redshift):
         """
-        /* Omega matter as a function of redshift */
+        Omega matter as a function of redshift
         """
         
         thisomofz = self.omega_matter0 * math.pow( 1.0+redshift, 3.0) / \
@@ -595,7 +594,7 @@
 
     def omega_lambda_of_z(self,redshift):
         """
-        /* Omega lambda as a function of redshift */
+        Omega lambda as a function of redshift
         """
 
         thisolofz = self.omega_lambda0 / math.pow( self.Eofz(redshift), 2.0 )
@@ -604,7 +603,7 @@
 
     def Eofz(self, redshift):
         """
-        /* E(z) - I don't think this has any other name */
+        E(z) - I don't think this has any other name
         """
         thiseofz = math.sqrt( self.omega_lambda0 \
             + (1.0 - self.omega_lambda0 - self.omega_matter0)*math.pow( 1.0+redshift, 2.0) \
@@ -614,15 +613,15 @@
 
 
 """ 
-/* Fitting Formulae for CDM + Baryon + Massive Neutrino (MDM) cosmologies. */
-/* Daniel J. Eisenstein & Wayne Hu, Institute for Advanced Study */
+Fitting Formulae for CDM + Baryon + Massive Neutrino (MDM) cosmologies.
+Daniel J. Eisenstein & Wayne Hu, Institute for Advanced Study
 
-/* There are two primary routines here, one to set the cosmology, the
+There are two primary routines here, one to set the cosmology, the
 other to construct the transfer function for a single wavenumber k. 
 You should call the former once (per cosmology) and the latter as 
-many times as you want. */
+many times as you want. 
 
-/* TFmdm_set_cosm() -- User passes all the cosmological parameters as
+   TFmdm_set_cosm() -- User passes all the cosmological parameters as
    arguments; the routine sets up all of the scalar quantites needed 
    computation of the fitting formula.  The input parameters are: 
    1) omega_matter -- Density of CDM, baryons, and massive neutrinos,
@@ -634,7 +633,7 @@
    6) hubble       -- Hubble constant, in units of 100 km/s/Mpc 
    7) redshift     -- The redshift at which to evaluate */
 
-/* TFmdm_onek_mpc() -- User passes a single wavenumber, in units of Mpc^-1.
+   TFmdm_onek_mpc() -- User passes a single wavenumber, in units of Mpc^-1.
    Routine returns the transfer function from the Eisenstein & Hu
    fitting formula, based on the cosmology currently held in the 
    internal variables.  The routine returns T_cb (the CDM+Baryon
@@ -642,29 +641,40 @@
    Baryon+Neutrino density-weighted transfer function) is stored
    in the global variable tf_cbnu. */
 
-/* We also supply TFmdm_onek_hmpc(), which is identical to the previous
-   routine, but takes the wavenumber in units of h Mpc^-1. */
+We also supply TFmdm_onek_hmpc(), which is identical to the previous
+routine, but takes the wavenumber in units of h Mpc^-1.
 
-/* We hold the internal scalar quantities in global variables, so that
-the user may access them in an external program, via "extern" declarations. */
+We hold the internal scalar quantities in global variables, so that
+the user may access them in an external program, via "extern" declarations.
 
-/* Please note that all internal length scales are in Mpc, not h^-1 Mpc! */
+Please note that all internal length scales are in Mpc, not h^-1 Mpc!
 """
 
 class TransferFunction(object):
     """
-    /* This routine takes cosmological parameters and a redshift and sets up
-    all the internal scalar quantities needed to compute the transfer function. */
-    /* INPUT: omega_matter -- Density of CDM, baryons, and massive neutrinos,
-                    in units of the critical density. */
-    /* 	  omega_baryon -- Density of baryons, in units of critical. */
-    /* 	  omega_hdm    -- Density of massive neutrinos, in units of critical */
-    /* 	  degen_hdm    -- (Int) Number of degenerate massive neutrino species */
-    /*        omega_lambda -- Cosmological constant */
-    /* 	  hubble       -- Hubble constant, in units of 100 km/s/Mpc */
-    /*        redshift     -- The redshift at which to evaluate */
-    /* OUTPUT: Returns 0 if all is well, 1 if a warning was issued.  Otherwise,
-        sets many global variables for use in TFmdm_onek_mpc() */
+    This routine takes cosmological parameters and a redshift and sets up
+    all the internal scalar quantities needed to compute the transfer function.
+
+    Parameters
+    ----------
+    omega_matter : float
+        Density of CDM, baryons, and massive neutrinos, in units 
+        of the critical density.
+    omega_baryon : float
+        Density of baryons, in units of critical.
+    omega_hdm : float
+        Density of massive neutrinos, in units of critical
+    degen_hdm : integer
+        Number of degenerate massive neutrino species
+    omega_lambda : float
+        Cosmological constant
+    hubble : float
+        Hubble constant, in units of 100 km/s/Mpc
+    redshift : float
+        The redshift at which to evaluate
+
+    Returns 0 if all is well, 1 if a warning was issued. Otherwise,
+    sets many global variables for use in TFmdm_onek_mpc()
     """
     def __init__(self, omega_matter, omega_baryon, omega_hdm,
                  degen_hdm, omega_lambda, hubble, redshift):
@@ -751,15 +761,23 @@
 
     def TFmdm_onek_mpc(self,  kk):
         """
-        /* Given a wavenumber in Mpc^-1, return the transfer function for the
-        cosmology held in the global variables. */
-        /* Input: kk -- Wavenumber in Mpc^-1 */
-        /* Output: The following are set as global variables:
-            growth_cb -- the transfer function for density-weighted
-                    CDM + Baryon perturbations. 
-            growth_cbnu -- the transfer function for density-weighted
-                    CDM + Baryon + Massive Neutrino perturbations. */
-        /* The function returns growth_cb */
+        Given a wavenumber in Mpc^-1, return the transfer function for the
+        cosmology held in the global variables.
+
+        Parameters
+        ----------
+        kk : float
+            Wavenumber in Mpc^-1
+
+        Returns
+        -------
+        growth_cb : float
+            the transfer function for density-weighted
+            CDM + Baryon perturbations. (returned and set as a global var)
+        growth_cbnu : float
+            the transfer function for density-weighted
+            CDM + Baryon + Massive Neutrino perturbations.
+            (set as a global var)
         """
     
         self.qq = kk/self.omhh*SQR(self.theta_cmb);
@@ -794,15 +812,22 @@
 
     def TFmdm_onek_hmpc(self, kk):
         """
-        /* Given a wavenumber in h Mpc^-1, return the transfer function for the
-        cosmology held in the global variables. */
-        /* Input: kk -- Wavenumber in h Mpc^-1 */
-        /* Output: The following are set as global variables:
-            growth_cb -- the transfer function for density-weighted
-                    CDM + Baryon perturbations. 
-            growth_cbnu -- the transfer function for density-weighted
-                    CDM + Baryon + Massive Neutrino perturbations. */
-        /* The function returns growth_cb */
+        Given a wavenumber in h Mpc^-1, return the transfer function for the
+        cosmology held in the global variables.
+
+        Parameters
+        ----------
+        kk : float
+            Wavenumber in h Mpc^-1
+
+        Returns
+        -------
+        growth_cb : float
+            the transfer function for density-weighted
+            CDM + Baryon perturbations. (return and set as a global var) 
+        growth_cbnu : float
+            the transfer function for density-weighted
+            CDM + Baryon + Massive Neutrino perturbations.
         """
         return self.TFmdm_onek_mpc(kk*self.hhubble);
 

diff -r 8359b88e684ee3ba5faa32e32632ead3b07644a5 -r 5f4fc6ce70895d0ee68726c091c194d5a1dc2ecc yt/analysis_modules/sunyaev_zeldovich/projection.py
--- a/yt/analysis_modules/sunyaev_zeldovich/projection.py
+++ b/yt/analysis_modules/sunyaev_zeldovich/projection.py
@@ -78,7 +78,7 @@
 
     Parameters
     ----------
-    ds : Dataset
+    ds : ~yt.data_objects.static_output.Dataset 
         The dataset
     freqs : array_like
         The frequencies (in GHz) at which to compute the SZ spectral distortion.

diff -r 8359b88e684ee3ba5faa32e32632ead3b07644a5 -r 5f4fc6ce70895d0ee68726c091c194d5a1dc2ecc yt/data_objects/data_containers.py
--- a/yt/data_objects/data_containers.py
+++ b/yt/data_objects/data_containers.py
@@ -1957,7 +1957,7 @@
     multiple data objects.
 
     This object is not designed to be created directly; it is designed to be
-    created implicitly by using one of the bitwise operations (&, |, ^, ~) on
+    created implicitly by using one of the bitwise operations (&, \|, ^, \~) on
     one or two other data objects.  These correspond to the appropriate boolean
     operations, and the resultant object can be nested.
 

diff -r 8359b88e684ee3ba5faa32e32632ead3b07644a5 -r 5f4fc6ce70895d0ee68726c091c194d5a1dc2ecc yt/data_objects/static_output.py
--- a/yt/data_objects/static_output.py
+++ b/yt/data_objects/static_output.py
@@ -194,6 +194,8 @@
         self.data = weakref.WeakKeyDictionary()
 
     def __get__(self, instance, owner):
+        if not instance:
+            return None
         ret = self.data.get(instance, None)
         try:
             ret = ret.copy()
@@ -1056,7 +1058,7 @@
         Parameters
         ----------
 
-        input_array : iterable
+        input_array : Iterable
             A tuple, list, or array to attach units to
         input_units : String unit specification, unit symbol or astropy object
             The units of the array. Powers must be specified using python syntax

diff -r 8359b88e684ee3ba5faa32e32632ead3b07644a5 -r 5f4fc6ce70895d0ee68726c091c194d5a1dc2ecc yt/fields/species_fields.py
--- a/yt/fields/species_fields.py
+++ b/yt/fields/species_fields.py
@@ -126,8 +126,8 @@
     """
     This takes a field registry, a fluid type, and two species names.  
     The first species name is one you wish to alias to an existing species
-    name.  For instance you might alias all "H_p0" fields to "H_" fields
-    to indicate that "H_" fields are really just neutral hydrogen fields.
+    name.  For instance you might alias all "H_p0" fields to "H\_" fields
+    to indicate that "H\_" fields are really just neutral hydrogen fields.
     This function registers field aliases for the density, number_density,
     mass, and fraction fields between the two species given in the arguments.
     """

diff -r 8359b88e684ee3ba5faa32e32632ead3b07644a5 -r 5f4fc6ce70895d0ee68726c091c194d5a1dc2ecc yt/fields/xray_emission_fields.py
--- a/yt/fields/xray_emission_fields.py
+++ b/yt/fields/xray_emission_fields.py
@@ -72,7 +72,7 @@
 
     Parameters
     ----------
-    table_type: string
+    table_type : string
         The type of data to use when computing the emissivity values. If "cloudy",
         a file called "cloudy_emissivity.h5" is used, for photoionized
         plasmas. If, "apec", a file called "apec_emissivity.h5" is used for 

diff -r 8359b88e684ee3ba5faa32e32632ead3b07644a5 -r 5f4fc6ce70895d0ee68726c091c194d5a1dc2ecc yt/frontends/chombo/io.py
--- a/yt/frontends/chombo/io.py
+++ b/yt/frontends/chombo/io.py
@@ -199,11 +199,9 @@
 
 def parse_orion_sinks(fn):
     '''
-
     Orion sink particles are stored in text files. This function
     is for figuring what particle fields are present based on the
-    number of entries per line in the *.sink file.
-
+    number of entries per line in the \*.sink file.
     '''
 
     # Figure out the format of the particle file

diff -r 8359b88e684ee3ba5faa32e32632ead3b07644a5 -r 5f4fc6ce70895d0ee68726c091c194d5a1dc2ecc yt/frontends/fits/misc.py
--- a/yt/frontends/fits/misc.py
+++ b/yt/frontends/fits/misc.py
@@ -49,7 +49,7 @@
 
     Parameters
     ----------
-    ds : Dataset
+    ds : `~yt.data_objects.static_output.Dataset`
         The FITS events file dataset to add the counts fields to.
     ebounds : list of tuples
         A list of tuples, one for each field, with (emin, emax) as the

diff -r 8359b88e684ee3ba5faa32e32632ead3b07644a5 -r 5f4fc6ce70895d0ee68726c091c194d5a1dc2ecc yt/frontends/stream/data_structures.py
--- a/yt/frontends/stream/data_structures.py
+++ b/yt/frontends/stream/data_structures.py
@@ -903,7 +903,7 @@
 
     Parameters
     ----------
-    base_ds : Dataset
+    base_ds : `~yt.data_objects.static_output.Dataset`
         This is any static output.  It can also be a stream static output, for
         instance as returned by load_uniform_data.
     refinement_critera : list of :class:`~yt.utilities.flagging_methods.FlaggingMethod`
@@ -1167,7 +1167,7 @@
     r"""Define the cell coordinates and cell neighbors of a hexahedral mesh
     for a semistructured grid. Used to specify the connectivity and
     coordinates parameters used in
-    :function:`~yt.frontends.stream.data_structures.load_hexahedral_mesh`.
+    :func:`~yt.frontends.stream.data_structures.load_hexahedral_mesh`.
 
     Parameters
     ----------

diff -r 8359b88e684ee3ba5faa32e32632ead3b07644a5 -r 5f4fc6ce70895d0ee68726c091c194d5a1dc2ecc yt/frontends/ytdata/data_structures.py
--- a/yt/frontends/ytdata/data_structures.py
+++ b/yt/frontends/ytdata/data_structures.py
@@ -275,7 +275,7 @@
             # since this is now particle-like data.
             data_type = self.parameters.get("data_type")
             container_type = self.parameters.get("container_type")
-            ex_container_type = ["cutting", "proj", "ray", "slice"]
+            ex_container_type = ["cutting", "proj", "ray", "slice", "cut_region"]
             if data_type == "yt_light_ray" or container_type in ex_container_type:
                 mylog.info("Returning an all_data data container.")
                 return self.all_data()

diff -r 8359b88e684ee3ba5faa32e32632ead3b07644a5 -r 5f4fc6ce70895d0ee68726c091c194d5a1dc2ecc yt/frontends/ytdata/tests/test_outputs.py
--- a/yt/frontends/ytdata/tests/test_outputs.py
+++ b/yt/frontends/ytdata/tests/test_outputs.py
@@ -100,6 +100,12 @@
     assert isinstance(sphere_ds, YTDataContainerDataset)
     yield YTDataFieldTest(full_fn, ("grid", "density"))
     yield YTDataFieldTest(full_fn, ("all", "particle_mass"))
+    cr = ds.cut_region(sphere, ['obj["temperature"] > 1e4'])
+    fn = cr.save_as_dataset(fields=["temperature"])
+    full_fn = os.path.join(tmpdir, fn)
+    cr_ds = load(full_fn)
+    assert isinstance(cr_ds, YTDataContainerDataset)
+    assert (cr["temperature"] == cr_ds.data["temperature"]).all()
     os.chdir(curdir)
     shutil.rmtree(tmpdir)
 

diff -r 8359b88e684ee3ba5faa32e32632ead3b07644a5 -r 5f4fc6ce70895d0ee68726c091c194d5a1dc2ecc yt/frontends/ytdata/utilities.py
--- a/yt/frontends/ytdata/utilities.py
+++ b/yt/frontends/ytdata/utilities.py
@@ -239,4 +239,9 @@
         val = np.array(val)
         if val.dtype.kind == 'U':
             val = val.astype('|S')
-    fh.attrs[str(attr)] = val
+    try:
+        fh.attrs[str(attr)] = val
+    # This is raised if no HDF5 equivalent exists.
+    # In that case, save its string representation.
+    except TypeError:
+        fh.attrs[str(attr)] = str(val)

diff -r 8359b88e684ee3ba5faa32e32632ead3b07644a5 -r 5f4fc6ce70895d0ee68726c091c194d5a1dc2ecc yt/funcs.py
--- a/yt/funcs.py
+++ b/yt/funcs.py
@@ -747,6 +747,7 @@
 
     With a name provided by the user, this will decide how to 
     appropriately name the output file by the following rules:
+
     1. if name is None, the filename will be the keyword plus 
        the suffix.
     2. if name ends with "/", assume name is a directory and 

diff -r 8359b88e684ee3ba5faa32e32632ead3b07644a5 -r 5f4fc6ce70895d0ee68726c091c194d5a1dc2ecc yt/pmods.py
--- a/yt/pmods.py
+++ b/yt/pmods.py
@@ -79,13 +79,13 @@
 # LLC, and shall not be used for advertising or product endorsement
 # purposes.
 
-"""MPI_Import defines an mpi-aware import hook. The standard use of
-this module is as follows:
+"""``MPI_Import`` defines an mpi-aware import hook. The standard use of
+this module is as follows::
 
-   from MPI_Import import mpi_import
-   with mpi_import():
-      import foo
-      import bar
+    from MPI_Import import mpi_import
+    with mpi_import():
+        import foo
+        import bar
 
 Within the with block, the standard import statement is replaced by an
 MPI-aware import statement. The rank 0 process finds the location of
@@ -94,35 +94,35 @@
 
 One CRITICAL detail: any code inside the mpi_import block must be
 executed exactly the same on all of the MPI ranks. For example,
-consider this:
+consider this::
 
-def foo():
-   import mpi
-   if mpi.rank == 0:
-      bar = someFunction()
-   bar = mpi.bcast(bar,root=0)
+    def foo():
+        import mpi
+        if mpi.rank == 0:
+            bar = someFunction()
+        bar = mpi.bcast(bar,root=0)
+    
+    def someFunction():
+        import os
+        return os.name
 
-def someFunction():
-   import os
-   return os.name
-
-If foo() is called during the import process, then things may go very
+If ``foo()`` is called during the import process, then things may go very
 wrong. If the os module hasn't been loaded, then the rank 0 process
 will find os and broadcast its location. Since there's no
-corresponding bcast for rank > 0, the other processes will receive
+corresponding bcast for ``rank > 0``, the other processes will receive
 that broadcast instead of the broadcast for bar, resulting in
-undefined behavior. Similarly, if rank >0 process encounters an import
-that rank 0 does not encounter, that process will either hang waiting
+undefined behavior. Similarly, if ``rank > 0`` process encounters an import
+that ``rank`` 0 does not encounter, that process will either hang waiting
 for the bcast, or it will receive an out-of-order bcast.
 
 The import hook provides a way to test whether we're using this
 importer, which can be used to disable rank-asymmetric behavior in a
-module import:
+module import::
 
-from yt.extern.six.moves import builtins
-hasattr(builtins.__import__,"mpi_import")
+    from yt.extern.six.moves import builtins
+    hasattr(builtins.__import__,"mpi_import")
 
-This evaluates to True only when we're in an mpi_import() context
+This evaluates to True only when we're in an ``mpi_import()`` context
 manager.
 
 There are some situations where rank-dependent code may be necessary.
@@ -130,66 +130,66 @@
 tends to cause deadlocks when it is executed inside an mpi_imported
 module. In that case, we provide a hook to execute a function after
 the mpi_import hook has been replaced by the standard import hook.
-Here is an example showing the use of this feature:
+Here is an example showing the use of this feature::
 
-# encapsulate the rank-asymmetric code in a function
-def f():
-    if mpi.rank == 0:
-        doOneThing()
-    else:
-        doSomethingElse()
+    # encapsulate the rank-asymmetric code in a function
+    def f():
+        if mpi.rank == 0:
+            doOneThing()
+        else:
+            doSomethingElse()
 
-# Either importer is None (standard import) or it's a reference to
-# the mpi_import object that owns the current importer.
-from yt.extern.six.moves import builtins
-importer = getattr(builtins.__import__,"mpi_import",None)
-if importer:
-    importer.callAfterImport(f)
-else:
-    # If we're using the standard import, then we'll execute the
-    # code in f immediately
-    f()
+    # Either importer is None (standard import) or it's a reference to
+    # the mpi_import object that owns the current importer.
+    from yt.extern.six.moves import builtins
+    importer = getattr(builtins.__import__,"mpi_import",None)
+    if importer:
+        importer.callAfterImport(f)
+    else:
+        # If we're using the standard import, then we'll execute the
+        # code in f immediately
+        f()
 
-WARNING: the callAfterImport feature is not intended for casual use.
+WARNING: the ``callAfterImport`` feature is not intended for casual use.
 Usually it will be sufficient (and preferable) to either remove the
 rank-asymmetric code or explicitly move it outside of the 'with
 mpi_import' block. callAfterImport is provided for the (hopefully
 rare!) cases where this does not suffice.
 
-
-Some implementation details:
+Some implementation details
+---------------------------
 
--This code is based on knee.py, which is an example of a pure Python
- hierarchical import that was included with Python 2.6 distributions.
+* This code is based on knee.py, which is an example of a pure Python
+  hierarchical import that was included with Python 2.6 distributions.
 
--Python PEP 302 defines another way to override import by using finder
- and loader objects, which behave similarly to the imp.find_module and
- imp.load_module functions in __import_module__ below. Unfortunately,
- the implementation of PEP 302 is such that the path for the module
- has already been found by the time that the "finder" object is
- constructed, so it's not suitable for our purposes.
+* Python PEP 302 defines another way to override import by using finder
+  and loader objects, which behave similarly to the imp.find_module and
+  imp.load_module functions in __import_module__ below. Unfortunately,
+  the implementation of PEP 302 is such that the path for the module
+  has already been found by the time that the "finder" object is
+  constructed, so it's not suitable for our purposes.
 
--This module uses pyMPI. It was originally designed with mpi4py, and
- switching back to mpi4py requires only minor modifications. To
- quickly substitute mpi4py for pyMPI, the 'import mpi' line below can
- be replaced with the following wrapper:
+* This module uses pyMPI. It was originally designed with mpi4py, and
+  switching back to mpi4py requires only minor modifications. To
+  quickly substitute mpi4py for pyMPI, the 'import mpi' line below can
+  be replaced with the following wrapper::
 
-from mpi4py import MPI
-class mpi(object):
-    rank = MPI.COMM_WORLD.Get_rank()
-    @staticmethod
-    def bcast(obj=None,root=0):
-        return MPI.COMM_WORLD.bcast(obj,root)
+    from mpi4py import MPI
+    class mpi(object):
+        rank = MPI.COMM_WORLD.Get_rank()
+        @staticmethod
+        def bcast(obj=None,root=0):
+            return MPI.COMM_WORLD.bcast(obj,root)
 
--An alternate version of this module had rank 0 perform all of the
- lookups, and then broadcast the locations all-at-once when that
- process reached the end of the context manager. This was somewhat
- faster than the current implementation, but was prone to deadlock
- when loading modules containing MPI synchronization points.
-
--The 'level' parameter to the import hook is not handled correctly; we
- treat it as if it were -1 (try relative and absolute imports). For
- more information about the level parameter, run 'help(__import__)'.
+* An alternate version of this module had rank 0 perform all of the
+  lookups, and then broadcast the locations all-at-once when that
+  process reached the end of the context manager. This was somewhat
+  faster than the current implementation, but was prone to deadlock
+  when loading modules containing MPI synchronization points.
+  
+* The ``level`` parameter to the import hook is not handled correctly; we
+  treat it as if it were -1 (try relative and absolute imports). For
+  more information about the level parameter, run ``help(__import__)``.
 """
 from __future__ import print_function
 

diff -r 8359b88e684ee3ba5faa32e32632ead3b07644a5 -r 5f4fc6ce70895d0ee68726c091c194d5a1dc2ecc yt/testing.py
--- a/yt/testing.py
+++ b/yt/testing.py
@@ -353,13 +353,13 @@
     This dataset allows you to easily explore orientations and
     handiness in VR and other renderings
 
-    Parameters:
-    -----------
+    Parameters
+    ----------
 
-    N: integer
+    N : integer
        The number of cells along each direction
 
-    scale: float
+    scale : float
        A spatial scale, the domain boundaries will be multiplied by scale to
        test datasets that have spatial different scales (e.g. data in CGS units)
 
@@ -811,17 +811,17 @@
     Examples
     --------
 
-    @check_results
-    def my_func(ds):
-        return ds.domain_width
+    >>> @check_results
+    ... def my_func(ds):
+    ...     return ds.domain_width
 
-    my_func(ds)
+    >>> my_func(ds)
 
-    @check_results
-    def field_checker(dd, field_name):
-        return dd[field_name]
+    >>> @check_results
+    ... def field_checker(dd, field_name):
+    ...     return dd[field_name]
 
-    field_cheker(ds.all_data(), 'density', result_basename='density')
+    >>> field_cheker(ds.all_data(), 'density', result_basename='density')
 
     """
     def compute_results(func):
@@ -952,9 +952,12 @@
         with the units of ``actual`` and ``desired``. If no units are attached,
         assumes the same units as ``desired``. Defaults to zero.
 
+    Notes
+    -----
     Also accepts additional keyword arguments accepted by
     :func:`numpy.testing.assert_allclose`, see the documentation of that
     function for details.
+
     """
     # Create a copy to ensure this function does not alter input arrays
     act = YTArray(actual)

diff -r 8359b88e684ee3ba5faa32e32632ead3b07644a5 -r 5f4fc6ce70895d0ee68726c091c194d5a1dc2ecc yt/units/yt_array.py
--- a/yt/units/yt_array.py
+++ b/yt/units/yt_array.py
@@ -228,7 +228,7 @@
     Parameters
     ----------
 
-    input_array : iterable
+    input_array : Iterable
         A tuple, list, or array to attach units to
     input_units : String unit specification, unit symbol object, or astropy units
         The units of the array. Powers must be specified using python

diff -r 8359b88e684ee3ba5faa32e32632ead3b07644a5 -r 5f4fc6ce70895d0ee68726c091c194d5a1dc2ecc yt/utilities/grid_data_format/writer.py
--- a/yt/utilities/grid_data_format/writer.py
+++ b/yt/utilities/grid_data_format/writer.py
@@ -50,11 +50,13 @@
     dataset_units : dictionary, optional
         A dictionary of (value, unit) tuples to set the default units
         of the dataset. Keys can be:
-            "length_unit"
-            "time_unit"
-            "mass_unit"
-            "velocity_unit"
-            "magnetic_unit"
+
+        * "length_unit"
+        * "time_unit"
+        * "mass_unit"
+        * "velocity_unit"
+        * "magnetic_unit"
+
         If not specified, these will carry over from the parent
         dataset.
     particle_type_name : string, optional

diff -r 8359b88e684ee3ba5faa32e32632ead3b07644a5 -r 5f4fc6ce70895d0ee68726c091c194d5a1dc2ecc yt/utilities/hierarchy_inspection.py
--- a/yt/utilities/hierarchy_inspection.py
+++ b/yt/utilities/hierarchy_inspection.py
@@ -12,7 +12,7 @@
 
     Parameters
     ----------
-    candidates : iterable
+    candidates : Iterable
         An interable object that is a collection of classes to find the lowest
         subclass of.
 

diff -r 8359b88e684ee3ba5faa32e32632ead3b07644a5 -r 5f4fc6ce70895d0ee68726c091c194d5a1dc2ecc yt/utilities/math_utils.py
--- a/yt/utilities/math_utils.py
+++ b/yt/utilities/math_utils.py
@@ -61,7 +61,7 @@
     pos : array
         An array of floats.
 
-    ds : Dataset
+    ds : ~yt.data_objects.static_output.Dataset 
         A simulation static output.
 
     Examples
@@ -852,7 +852,7 @@
     Parameters
     ----------
     maxr : scalar
-        should be max(|x|, |y|)
+        should be ``max(|x|, |y|)``
 
     aspect : scalar
         The aspect ratio of width / height for the projection.

diff -r 8359b88e684ee3ba5faa32e32632ead3b07644a5 -r 5f4fc6ce70895d0ee68726c091c194d5a1dc2ecc yt/utilities/parallel_tools/io_runner.py
--- a/yt/utilities/parallel_tools/io_runner.py
+++ b/yt/utilities/parallel_tools/io_runner.py
@@ -13,7 +13,7 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-import np
+import numpy as np
 from yt.utilities.logger import ytLogger as mylog
 from .parallel_analysis_interface import \
     ProcessorPool, parallel_objects

diff -r 8359b88e684ee3ba5faa32e32632ead3b07644a5 -r 5f4fc6ce70895d0ee68726c091c194d5a1dc2ecc yt/utilities/parallel_tools/parallel_analysis_interface.py
--- a/yt/utilities/parallel_tools/parallel_analysis_interface.py
+++ b/yt/utilities/parallel_tools/parallel_analysis_interface.py
@@ -434,7 +434,7 @@
 
     Parameters
     ----------
-    objects : iterable
+    objects : Iterable
         The list of objects to dispatch to different processors.
     njobs : int
         How many jobs to spawn.  By default, one job will be dispatched for
@@ -540,7 +540,7 @@
 
     Parameters
     ----------
-    objects : iterable
+    objects : Iterable
         The list of objects to operate on.
     generator_func : callable
         This function will be called on each object, and the results yielded.

diff -r 8359b88e684ee3ba5faa32e32632ead3b07644a5 -r 5f4fc6ce70895d0ee68726c091c194d5a1dc2ecc yt/utilities/sdf.py
--- a/yt/utilities/sdf.py
+++ b/yt/utilities/sdf.py
@@ -431,17 +431,16 @@
 
     Parameters
     ----------
-    filename: string
-    The filename associated with the data to be loaded.
-    header: string, optional
-    If separate from the data file, a file containing the
-    header can be specified. Default: None.
+    filename : string
+        The filename associated with the data to be loaded.
+    header : string, optional
+        If separate from the data file, a file containing the
+        header can be specified. Default: None.
 
     Returns
     -------
     self : SDFRead object
-    Dict-like container of parameters and data.
-
+        Dict-like container of parameters and data.
 
     References
     ----------
@@ -1270,11 +1269,15 @@
         Return list of data chunks for a cell on the given level
         plus a padding around the cell, for a list of fields.
 
-        Returns:
-            data: A list of dictionaries of data.
+        Returns
+        -------
+            data: list
+                A list of dictionaries of data.
 
-        chunks = midx.get_padded_bbox_data(6, np.array([128]*3),
-                                             8.0, ['x','y','z','ident'])
+        Examples
+        --------
+        >>> chunks = midx.get_padded_bbox_data(6, np.array([128]*3),
+        ...                                    8.0, ['x','y','z','ident'])
 
         """
         _ensure_xyz_fields(fields)

diff -r 8359b88e684ee3ba5faa32e32632ead3b07644a5 -r 5f4fc6ce70895d0ee68726c091c194d5a1dc2ecc yt/utilities/spatial/__init__.py
--- a/yt/utilities/spatial/__init__.py
+++ b/yt/utilities/spatial/__init__.py
@@ -1,12 +1,9 @@
 """
-=============================================================
 Spatial algorithms and data structures (:mod:`scipy.spatial`)
-=============================================================
 
 Nearest-neighbor queries:
 
 .. autosummary::
-   :toctree: generated/
 
    KDTree      -- class for efficient nearest-neighbor queries
    cKDTree     -- class for efficient nearest-neighbor queries (faster impl.)
@@ -15,7 +12,6 @@
 Delaunay triangulation:
 
 .. autosummary::
-   :toctree: generated/
 
    Delaunay
    tsearch

diff -r 8359b88e684ee3ba5faa32e32632ead3b07644a5 -r 5f4fc6ce70895d0ee68726c091c194d5a1dc2ecc yt/utilities/spatial/ckdtree.pyx
--- a/yt/utilities/spatial/ckdtree.pyx
+++ b/yt/utilities/spatial/ckdtree.pyx
@@ -639,25 +639,31 @@
         """ query the tree for the nearest neighbors, to get the density
             of particles for chainHOP.
 
-        Parameters:
-        ===========
+        Parameters
+        ----------
 
-        mass: A array-like list of the masses of the particles, in the same
+        mass : array 
+            A array-like list of the masses of the particles, in the same
             order as the data that went into building the kd tree.
 
-        num_neighbors: Optional, the number of neighbors to search for and to
+        num_neighbors : integer, optional 
+            The number of neighbors to search for and to
             use in the density calculation. Default is 65, and is probably what
             one should stick with.
 
-        nMerge: The number of nearest neighbor tags to return for each particle.
+        nMerge: integer, optional
+            The number of nearest neighbor tags to return for each particle.
+            Defaults to 6.
 
-        Returns:
-        ========
+        Returns
+        -------
 
-        dens: An array of the densities for each particle, in the same order
+        dens : array 
+            An array of the densities for each particle, in the same order
             as the input data.
 
-        tags: A two-dimensional array of the indexes, nMerge nearest neighbors
+        tags : array 
+            A two-dimensional array of the indexes, nMerge nearest neighbors
             for each particle.
 
         """
@@ -723,20 +729,23 @@
         """ query the tree in chunks, between start and finish, recording the
             nearest neighbors.
 
-        Parameters:
-        ===========
+        Parameters
+        ----------
 
-        start: The starting point in the dataset for this search.
+        start: integer
+            The starting point in the dataset for this search.
 
-        finish: The ending point in the dataset for this search.
+        finish: integer
+            The ending point in the dataset for this search.
 
-        num_neighbors: Optional, the number of neighbors to search for.
-            The default is 65.
+        num_neighbors: integer, optional
+            The number of neighbors to search for. The default is 65.
 
-        Returns:
-        ========
+        Returns
+        -------
 
-        chunk_tags: A two-dimensional array of the nearest neighbor tags for the
+        chunk_tags: array
+            A two-dimensional array of the nearest neighbor tags for the
             points in this search.
 
         """

diff -r 8359b88e684ee3ba5faa32e32632ead3b07644a5 -r 5f4fc6ce70895d0ee68726c091c194d5a1dc2ecc yt/utilities/spatial/distance.py
--- a/yt/utilities/spatial/distance.py
+++ b/yt/utilities/spatial/distance.py
@@ -12,7 +12,6 @@
 stored in a rectangular array.
 
 .. autosummary::
-   :toctree: generated/
 
    pdist   -- pairwise distances between observation vectors.
    cdist   -- distances between between two collections of observation vectors
@@ -23,7 +22,6 @@
 for computing the number of observations in a distance matrix.
 
 .. autosummary::
-   :toctree: generated/
 
    is_valid_dm -- checks for a valid distance matrix
    is_valid_y  -- checks for a valid condensed distance matrix
@@ -35,7 +33,6 @@
 functions. Use ``pdist`` for this purpose.
 
 .. autosummary::
-   :toctree: generated/
 
    braycurtis       -- the Bray-Curtis distance.
    canberra         -- the Canberra distance.
@@ -383,9 +380,9 @@
 
 
 def jaccard(u, v):
-    """
+    r"""
     Computes the Jaccard-Needham dissimilarity between two boolean
-    n-vectors u and v, which is
+    ``n``-vectors ``u`` and ``v``, which is
 
     .. math::
 
@@ -399,9 +396,9 @@
     Parameters
     ----------
     u : ndarray
-        An :math:`n`-dimensional vector.
+        An ``n``-dimensional vector.
     v : ndarray
-        An :math:`n`-dimensional vector.
+        An ``n``-dimensional vector.
 
     Returns
     -------
@@ -417,9 +414,9 @@
 
 
 def kulsinski(u, v):
-    """
-    Computes the Kulsinski dissimilarity between two boolean n-vectors
-    u and v, which is defined as
+    r"""
+    Computes the Kulsinski dissimilarity between two boolean ``n``-vectors
+    ``u`` and ``v``, which is defined as
 
     .. math::
 
@@ -433,9 +430,9 @@
     Parameters
     ----------
     u : ndarray
-        An :math:`n`-dimensional vector.
+        An ``n``-dimensional vector.
     v : ndarray
-        An :math:`n`-dimensional vector.
+        An ``n``-dimensional vector.
 
     Returns
     -------
@@ -918,9 +915,9 @@
     r"""
     Computes the pairwise distances between m original observations in
     n-dimensional space. Returns a condensed distance matrix Y.  For
-    each :math:`i` and :math:`j` (where :math:`i<j<n`), the
+    each ``i`` and ``j`` (where ``i < j < n``), the
     metric ``dist(u=X[i], v=X[j])`` is computed and stored in the
-    :math:`ij`th entry.
+    ``ij`` th entry.
 
     See ``squareform`` for information on how to calculate the index of
     this entry or to convert the condensed distance matrix to a
@@ -972,7 +969,7 @@
           1 - \frac{uv^T}
                    {{|u|}_2 {|v|}_2}
 
-       where |*|_2 is the 2 norm of its argument *.
+       where :math:`|*|_2` is the 2 norm of its argument \*.
 
     7. ``Y = pdist(X, 'correlation')``
 
@@ -1361,49 +1358,45 @@
 
     Parameters
     ----------
-       X : ndarray
-           Either a condensed or redundant distance matrix.
+    X : ndarray
+        Either a condensed or redundant distance matrix.
 
     Returns
     -------
-       Y : ndarray
-           If a condensed distance matrix is passed, a redundant
-           one is returned, or if a redundant one is passed, a
-           condensed distance matrix is returned.
-
-       force : string
-           As with MATLAB(TM), if force is equal to 'tovector' or
-           'tomatrix', the input will be treated as a distance matrix
-           or distance vector respectively.
+    Y : ndarray
+        If a condensed distance matrix is passed, a redundant
+        one is returned, or if a redundant one is passed, a
+        condensed distance matrix is returned.
+    force : string
+        As with MATLAB(TM), if force is equal to 'tovector' or
+        'tomatrix', the input will be treated as a distance matrix
+        or distance vector respectively.
 
-       checks : bool
-           If ``checks`` is set to ``False``, no checks will be made
-           for matrix symmetry nor zero diagonals. This is useful if
-           it is known that ``X - X.T1`` is small and ``diag(X)`` is
-           close to zero. These values are ignored any way so they do
-           not disrupt the squareform transformation.
+    checks : bool
+        If ``checks`` is set to ``False``, no checks will be made
+        for matrix symmetry nor zero diagonals. This is useful if
+        it is known that ``X - X.T1`` is small and ``diag(X)`` is
+        close to zero. These values are ignored any way so they do
+        not disrupt the squareform transformation.
 
-
-    Calling Conventions
+    Notes
     -------------------
 
     1. v = squareform(X)
 
-       Given a square d by d symmetric distance matrix ``X``,
-       ``v=squareform(X)`` returns a :math:`d*(d-1)/2` (or
-       `${n \choose 2}$`) sized vector v.
-
-      v[{n \choose 2}-{n-i \choose 2} + (j-i-1)] is the distance
-      between points i and j. If X is non-square or asymmetric, an error
-      is returned.
+      Given a square d by d symmetric distance matrix ``X``, ``v=squareform(X)``
+      returns a :math:`d*(d-1)/2` (or :math:`{n \choose 2}`) sized vector ``v``.
+      :math:`v[{n \choose 2}-{n-i \choose 2} + (j-i-1)]` is the distance between
+      points ``i`` and ``j``. If ``X`` is non-square or asymmetric, an error is
+      returned.
 
-    X = squareform(v)
+    2. X = squareform(v)
 
-      Given a d*d(-1)/2 sized v for some integer d>=2 encoding distances
-      as described, X=squareform(v) returns a d by d distance matrix X. The
-      X[i, j] and X[j, i] values are set to
-      v[{n \choose 2}-{n-i \choose 2} + (j-u-1)] and all
-      diagonal elements are zero.
+      Given a ``d*d(-1)/2`` sized ``v`` for some integer ``d>=2`` encoding
+      distances as described, ``X=squareform(v)`` returns a ``d`` by ``d``
+      distance matrix ``X``. The ``X[i, j]`` and ``X[j, i]`` values are set to
+      :math:`v[{n \choose 2}-{n-i \choose 2} + (j-u-1)]` and all diagonal
+      elements are zero.
 
     """
 
@@ -1489,7 +1482,7 @@
     ----------
     D : ndarray
         The candidate object to test for validity.
-    tol : double
+    tol : float
         The distance matrix should be symmetric. tol is the maximum
         difference between the :math:`ij`th entry and the
         :math:`ji`th entry for the distance metric to be
@@ -1750,7 +1743,7 @@
           \frac{1 - uv^T}
                {{|u|}_2 {|v|}_2}
 
-       where :math:`|*|_2` is the 2-norm of its argument *.
+       where :math:`|*|_2` is the 2-norm of its argument \*.
 
     7. ``Y = cdist(XA, XB, 'correlation')``
 

diff -r 8359b88e684ee3ba5faa32e32632ead3b07644a5 -r 5f4fc6ce70895d0ee68726c091c194d5a1dc2ecc yt/utilities/spatial/kdtree.py
--- a/yt/utilities/spatial/kdtree.py
+++ b/yt/utilities/spatial/kdtree.py
@@ -547,7 +547,7 @@
         Parameters
         ==========
 
-        other : KDTree
+        other : `~yt.utilities.spatial.kdtree.KDTree`
             The tree containing points to search against
         r : positive float
             The maximum distance
@@ -719,7 +719,7 @@
         Parameters
         ==========
 
-        other : KDTree
+        other : `~yt.utilities.spatial.kdtree.KDTree`
 
         r : float or one-dimensional array of floats
             The radius to produce a count for. Multiple radii are searched with a single
@@ -790,14 +790,14 @@
         any distance greater than max_distance.
 
         Parameters
-        ==========
+        ----------
 
-        other : KDTree
+        other : `~yt.utilities.spatial.kdtree.KDTree`
 
         max_distance : positive float
 
         Returns
-        =======
+        -------
 
         result : dok_matrix
             Sparse matrix representing the results in "dictionary of keys" format.

This diff is so big that we needed to truncate the remainder.

https://bitbucket.org/yt_analysis/yt/commits/6fef7faaec95/
Changeset:   6fef7faaec95
Branch:      yt
User:        brittonsmith
Date:        2017-03-17 18:26:24+00:00
Summary:     Merging with tip.
Affected #:  20 files

diff -r 5f4fc6ce70895d0ee68726c091c194d5a1dc2ecc -r 6fef7faaec95455acdaf432aae4585a767f83145 doc/Makefile
--- a/doc/Makefile
+++ b/doc/Makefile
@@ -33,23 +33,24 @@
 	@echo "  linkcheck   to check all external links for integrity"
 	@echo "  doctest     to run all doctests embedded in the documentation (if enabled)"
 	@echo "  clean 	     to remove the build directory"
-	@echo "  fullclean   to remove the build directory and autogenerated api docs"
 	@echo "  recipeclean to remove files produced by running the cookbook scripts"
 
 clean:
 	-rm -rf $(BUILDDIR)/*
+	-rm -rf source/reference/api/yt.*
+	-rm -rf source/reference/api/modules.rst
 
-fullclean:
-	-rm -rf $(BUILDDIR)/*
-	-rm -rf source/reference/api/generated
+fullclean: clean
 
 recipeclean:
 	-rm -rf _temp/*.done source/cookbook/_static/*
 
 html:
-	sphinx-apidoc -o source/reference/api/ -e ../yt ../yt/extern/* \
-		$(shell find ../yt -name "*tests*" -type d) ../yt/utilities/voropp*
-	sed -e '/show-inheritance/ a\ \ \ \ :inherited-members:' -i source/reference/api/yt*.rst
+ifneq ($(READTHEDOCS),True)
+	SPHINX_APIDOC_OPTIONS=members,undoc-members,inherited-members,show-inheritance sphinx-apidoc \
+        -o source/reference/api/ \
+        -e ../yt ../yt/extern/* $(shell find ../yt -name "*tests*" -type d) ../yt/utilities/voropp*
+endif
 	$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
 	@echo
 	@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."

diff -r 5f4fc6ce70895d0ee68726c091c194d5a1dc2ecc -r 6fef7faaec95455acdaf432aae4585a767f83145 doc/source/analyzing/analysis_modules/star_analysis.rst
--- a/doc/source/analyzing/analysis_modules/star_analysis.rst
+++ b/doc/source/analyzing/analysis_modules/star_analysis.rst
@@ -209,8 +209,8 @@
 There are two ways to write out the data once the spectrum has been calculated.
 The command ``write_out`` outputs two columns of data:
 
-  1. Wavelength :math:`(\text{\AA})`
-  2. Flux (Luminosity per unit wavelength :math:`(\mathrm{\rm{L}_\odot} / \text{\AA})` , where
+  1. Wavelength (:math:`\text{Angstroms}`)
+  2. Flux (Luminosity per unit wavelength :math:`(\mathrm{\rm{L}_\odot} / \text{Angstrom})` , where
        :math:`\mathrm{\rm{L}_\odot} = 3.826 \cdot 10^{33}\, \mathrm{ergs / s}` ).
 
 and can be called simply, specifying the output file:
@@ -225,7 +225,7 @@
 distribution to. The default is 5200 Angstroms. This command outputs the data
 in two columns:
 
-  1. Wavelength :math:`(\text{\AA})`
+  1. Wavelength :math:`(\text{Angstroms})`
   2. Relative flux normalized to the flux at *flux_norm*.
 
 .. code-block:: python

diff -r 5f4fc6ce70895d0ee68726c091c194d5a1dc2ecc -r 6fef7faaec95455acdaf432aae4585a767f83145 doc/source/conf.py
--- a/doc/source/conf.py
+++ b/doc/source/conf.py
@@ -11,9 +11,9 @@
 # All configuration values have a default; values that are commented out
 # serve to show the default.
 
-import sys, os, glob, re
-from sphinx.search import WordCollector
-from docutils.nodes import comment, title, Text, SkipNode
+import sys
+import os
+import glob
 
 on_rtd = os.environ.get("READTHEDOCS", None) == "True"
 
@@ -30,7 +30,7 @@
 # Add any Sphinx extension module names here, as strings. They can be extensions
 # coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
 extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx',
-              'sphinx.ext.pngmath', 'sphinx.ext.viewcode',
+              'sphinx.ext.mathjax', 'sphinx.ext.viewcode',
               'sphinx.ext.napoleon', 'yt_cookbook', 'yt_colormaps',
               'config_help']
 
@@ -228,13 +228,6 @@
 # If true, show URL addresses after external links.
 #latex_show_urls = False
 
-# Additional stuff for the LaTeX preamble.
-latex_preamble = r"""
-\renewcommand{\AA}{\text{\r{A}}} % Allow \AA in math mode
-\usepackage[utf8]{inputenc}      % Allow unicode symbols in text
-\DeclareUnicodeCharacter {212B} {\AA}                  % Angstrom
-"""
-
 # Documents to append as an appendix to all manuals.
 #latex_appendices = []
 

diff -r 5f4fc6ce70895d0ee68726c091c194d5a1dc2ecc -r 6fef7faaec95455acdaf432aae4585a767f83145 setup.cfg
--- a/setup.cfg
+++ b/setup.cfg
@@ -15,4 +15,4 @@
 #      vendored libraries
 exclude = doc,benchmarks,*/api.py,*/__init__.py,*/__config__.py,yt/visualization/_mpl_imports.py,yt/utilities/lodgeit.py,yt/utilities/lru_cache.py,yt/utilities/poster/*,yt/extern/*,yt/mods.py,yt/utilities/fits_image.py
 max-line-length=999
-ignore = E111,E121,E122,E123,E124,E125,E126,E127,E128,E129,E131,E201,E202,E211,E221,E222,E227,E228,E241,E301,E203,E225,E226,E231,E251,E261,E262,E265,E266,E302,E303,E402,E502,E701,E703,E731,W291,W292,W293,W391,W503
\ No newline at end of file
+ignore = E111,E121,E122,E123,E124,E125,E126,E127,E128,E129,E131,E201,E202,E211,E221,E222,E227,E228,E241,E301,E203,E225,E226,E231,E251,E261,E262,E265,E266,E302,E303,E305,E306,E402,E502,E701,E703,E731,W291,W292,W293,W391,W503
\ No newline at end of file

diff -r 5f4fc6ce70895d0ee68726c091c194d5a1dc2ecc -r 6fef7faaec95455acdaf432aae4585a767f83145 yt/frontends/chombo/data_structures.py
--- a/yt/frontends/chombo/data_structures.py
+++ b/yt/frontends/chombo/data_structures.py
@@ -34,7 +34,8 @@
 from yt.data_objects.static_output import \
     Dataset
 from yt.utilities.file_handler import \
-    HDF5FileHandler
+    HDF5FileHandler, \
+    warn_h5py
 from yt.utilities.parallel_tools.parallel_analysis_interface import \
     parallel_root_only
 from yt.utilities.lib.misc_utilities import \
@@ -730,6 +731,8 @@
     @classmethod
     def _is_valid(self, *args, **kwargs):
 
+        warn_h5py(args[0])
+
         if not is_chombo_hdf5(args[0]):
             return False
 

diff -r 5f4fc6ce70895d0ee68726c091c194d5a1dc2ecc -r 6fef7faaec95455acdaf432aae4585a767f83145 yt/frontends/exodus_ii/data_structures.py
--- a/yt/frontends/exodus_ii/data_structures.py
+++ b/yt/frontends/exodus_ii/data_structures.py
@@ -23,8 +23,9 @@
 from yt.data_objects.static_output import \
     Dataset
 from yt.data_objects.unions import MeshUnion
-from .io import \
-    NetCDF4FileHandler
+from yt.utilities.file_handler import \
+    NetCDF4FileHandler, \
+    warn_netcdf
 from yt.utilities.logger import ytLogger as mylog
 from .fields import \
     ExodusIIFieldInfo
@@ -180,18 +181,18 @@
 
     def _parse_parameter_file(self):
         self._handle = NetCDF4FileHandler(self.parameter_filename)
-        self._vars = self._handle.dataset.variables
-        self._read_glo_var()
-        self.dimensionality = self._vars['coor_names'].shape[0]
-        self.parameters['info_records'] = self._load_info_records()
-        self.unique_identifier = self._get_unique_identifier()
-        self.num_steps = len(self._vars['time_whole'])
-        self.current_time = self._get_current_time()
-        self.parameters['num_meshes'] = self._vars['eb_status'].shape[0]
-        self.parameters['elem_names'] = self._get_elem_names()
-        self.parameters['nod_names'] = self._get_nod_names()
-        self.domain_left_edge, self.domain_right_edge = self._load_domain_edge()
-        self.periodicity = (False, False, False)
+        with self._handle.open_ds() as ds:
+            self._read_glo_var()
+            self.dimensionality = ds.variables['coor_names'].shape[0]
+            self.parameters['info_records'] = self._load_info_records()
+            self.unique_identifier = self._get_unique_identifier()
+            self.num_steps = len(ds.variables['time_whole'])
+            self.current_time = self._get_current_time()
+            self.parameters['num_meshes'] = ds.variables['eb_status'].shape[0]
+            self.parameters['elem_names'] = self._get_elem_names()
+            self.parameters['nod_names'] = self._get_nod_names()
+            self.domain_left_edge, self.domain_right_edge = self._load_domain_edge()
+            self.periodicity = (False, False, False)
 
         # These attributes don't really make sense for unstructured
         # mesh data, but yt warns if they are not present, so we set
@@ -205,18 +206,18 @@
         self.refine_by = 0
 
     def _get_fluid_types(self):
-        handle = NetCDF4FileHandler(self.parameter_filename).dataset
-        fluid_types = ()
-        i = 1
-        while True:
-            ftype = 'connect%d' % i
-            if ftype in handle.variables:
-                fluid_types += (ftype,)
-                i += 1
-            else:
-                break
-        fluid_types += ('all',)
-        return fluid_types
+        with NetCDF4FileHandler(self.parameter_filename).open_ds() as ds:
+            fluid_types = ()
+            i = 1
+            while True:
+                ftype = 'connect%d' % i
+                if ftype in ds.variables:
+                    fluid_types += (ftype,)
+                    i += 1
+                else:
+                    break
+            fluid_types += ('all',)
+            return fluid_types
 
     def _read_glo_var(self):
         """
@@ -226,31 +227,34 @@
         names = self._get_glo_names()
         if not names:
             return
-        values = self._vars['vals_glo_var'][:].transpose()
-        for name, value in zip(names, values):
-            self.parameters[name] = value
+        with self._handle.open_ds() as ds:
+            values = ds.variables['vals_glo_var'][:].transpose()
+            for name, value in zip(names, values):
+                self.parameters[name] = value
 
     def _load_info_records(self):
         """
         Returns parsed version of the info_records.
         """
-        try:
-            return load_info_records(self._vars['info_records'])
-        except (KeyError, TypeError):
-            mylog.warning("No info_records found")
-            return []
+        with self._handle.open_ds() as ds:
+            try:
+                return load_info_records(ds.variables['info_records'])
+            except (KeyError, TypeError):
+                mylog.warning("No info_records found")
+                return []
 
     def _get_unique_identifier(self):
         return self.parameter_filename
 
     def _get_current_time(self):
-        try:
-            return self._vars['time_whole'][self.step]
-        except IndexError:
-            raise RuntimeError("Invalid step number, max is %d" \
-                               % (self.num_steps - 1))
-        except (KeyError, TypeError):
-            return 0.0
+        with self._handle.open_ds() as ds:
+            try:
+                return ds.variables['time_whole'][self.step]
+            except IndexError:
+                raise RuntimeError("Invalid step number, max is %d" \
+                                   % (self.num_steps - 1))
+            except (KeyError, TypeError):
+                return 0.0
 
     def _get_glo_names(self):
         """
@@ -259,12 +263,13 @@
 
         """
 
-        if "name_glo_var" not in self._vars:
-            mylog.warning("name_glo_var not found")
-            return []
-        else:
-            return [sanitize_string(v.tostring()) for v in
-                    self._vars["name_glo_var"]]
+        with self._handle.open_ds() as ds:
+            if "name_glo_var" not in ds.variables:
+                mylog.warning("name_glo_var not found")
+                return []
+            else:
+                return [sanitize_string(v.tostring()) for v in
+                        ds.variables["name_glo_var"]]
 
     def _get_elem_names(self):
         """
@@ -273,12 +278,13 @@
 
         """
 
-        if "name_elem_var" not in self._vars:
-            mylog.warning("name_elem_var not found")
-            return []
-        else:
-            return [sanitize_string(v.tostring()) for v in
-                    self._vars["name_elem_var"]]
+        with self._handle.open_ds() as ds:
+            if "name_elem_var" not in ds.variables:
+                mylog.warning("name_elem_var not found")
+                return []
+            else:
+                return [sanitize_string(v.tostring()) for v in
+                        ds.variables["name_elem_var"]]
 
     def _get_nod_names(self):
         """
@@ -287,12 +293,13 @@
 
         """
 
-        if "name_nod_var" not in self._vars:
-            mylog.warning("name_nod_var not found")
-            return []
-        else:
-            return [sanitize_string(v.tostring()) for v in
-                    self._vars["name_nod_var"]]
+        with self._handle.open_ds() as ds:
+            if "name_nod_var" not in ds.variables:
+                mylog.warning("name_nod_var not found")
+                return []
+            else:
+                return [sanitize_string(v.tostring()) for v in
+                        ds.variables["name_nod_var"]]
 
     def _read_coordinates(self):
         """
@@ -304,13 +311,14 @@
         coord_axes = 'xyz'[:self.dimensionality]
 
         mylog.info("Loading coordinates")
-        if "coord" not in self._vars:
-            coords = np.array([self._vars["coord%s" % ax][:]
-                               for ax in coord_axes]).transpose().copy()
-        else:
-            coords = np.array([coord for coord in
-                               self._vars["coord"][:]]).transpose().copy()
-        return coords
+        with self._handle.open_ds() as ds:
+            if "coord" not in ds.variables:
+                coords = np.array([ds.variables["coord%s" % ax][:]
+                                   for ax in coord_axes]).transpose().copy()
+            else:
+                coords = np.array([coord for coord in
+                                   ds.variables["coord"][:]]).transpose().copy()
+            return coords
 
     def _apply_displacement(self, coords, mesh_id):
 
@@ -324,13 +332,14 @@
         offset = self.displacements[mesh_name][1]
 
         coord_axes = 'xyz'[:self.dimensionality]
-        for i, ax in enumerate(coord_axes):
-            if "disp_%s" % ax in self.parameters['nod_names']:
-                ind = self.parameters['nod_names'].index("disp_%s" % ax)
-                disp = self._vars['vals_nod_var%d' % (ind + 1)][self.step]
-                new_coords[:, i] = coords[:, i] + fac*disp + offset[i]
+        with self._handle.open_ds() as ds:
+            for i, ax in enumerate(coord_axes):
+                if "disp_%s" % ax in self.parameters['nod_names']:
+                    ind = self.parameters['nod_names'].index("disp_%s" % ax)
+                    disp = ds.variables['vals_nod_var%d' % (ind + 1)][self.step]
+                    new_coords[:, i] = coords[:, i] + fac*disp + offset[i]
 
-        return new_coords
+            return new_coords
 
     def _read_connectivity(self):
         """
@@ -338,9 +347,10 @@
         """
         mylog.info("Loading connectivity")
         connectivity = []
-        for i in range(self.parameters['num_meshes']):
-            connectivity.append(self._vars["connect%d" % (i+1)][:].astype("i8"))
-        return connectivity
+        with self._handle.open_ds() as ds:
+            for i in range(self.parameters['num_meshes']):
+                connectivity.append(ds.variables["connect%d" % (i+1)][:].astype("i8"))
+            return connectivity
 
     def _load_domain_edge(self):
         """
@@ -373,11 +383,12 @@
         for i in range(self.dimensionality, 3):
             mi[i] = 0.0
             ma[i] = 1.0
-        
+
         return mi, ma
 
     @classmethod
     def _is_valid(self, *args, **kwargs):
+        warn_netcdf(args[0])
         try:
             from netCDF4 import Dataset
             filename = args[0]

diff -r 5f4fc6ce70895d0ee68726c091c194d5a1dc2ecc -r 6fef7faaec95455acdaf432aae4585a767f83145 yt/frontends/exodus_ii/io.py
--- a/yt/frontends/exodus_ii/io.py
+++ b/yt/frontends/exodus_ii/io.py
@@ -28,7 +28,7 @@
     def __init__(self, ds):
         self.filename = ds.index_filename
         exodus_ii_handler = NetCDF4FileHandler(self.filename)
-        self.handler = exodus_ii_handler.dataset
+        self.handler = exodus_ii_handler
         super(IOHandlerExodusII, self).__init__(ds)
         self.node_fields = ds._get_nod_names()
         self.elem_fields = ds._get_elem_names()
@@ -46,46 +46,47 @@
         # dict gets returned at the end and it should be flat, with selected
         # data.  Note that if you're reading grid data, you might need to
         # special-case a grid selector object.
-        chunks = list(chunks)
-        rv = {}
-        for field in fields:
-            ftype, fname = field
-            if ftype == "all":
-                ci = np.concatenate([mesh.connectivity_indices - self._INDEX_OFFSET \
-                                     for mesh in self.ds.index.mesh_union])
-            else:
-                ci = self.handler.variables[ftype][:] - self._INDEX_OFFSET
-            num_elem = ci.shape[0]
-            if fname in self.node_fields:
-                nodes_per_element = ci.shape[1]
-                rv[field] = np.zeros((num_elem, nodes_per_element), dtype="float64")
-            elif fname in self.elem_fields:
-                rv[field] = np.zeros(num_elem, dtype="float64")
-        for field in fields:
-            ind = 0
-            ftype, fname = field
-            if ftype == "all":
-                mesh_ids = [mesh.mesh_id + 1 for mesh in self.ds.index.mesh_union]
-                objs = [mesh for mesh in self.ds.index.mesh_union]
-            else:
-                mesh_ids = [int(ftype[-1])]
-                chunk = chunks[mesh_ids[0] - 1]
-                objs = chunk.objs
-            if fname in self.node_fields:
-                field_ind = self.node_fields.index(fname)
-                fdata = self.handler.variables['vals_nod_var%d' % (field_ind + 1)]
-                for g in objs:
-                    ci = g.connectivity_indices - self._INDEX_OFFSET
-                    data = fdata[self.ds.step][ci]
-                    ind += g.select(selector, data, rv[field], ind)  # caches
-            if fname in self.elem_fields:
-                field_ind = self.elem_fields.index(fname)
-                for g, mesh_id in zip(objs, mesh_ids):
-                    fdata = self.handler.variables['vals_elem_var%deb%s' %
-                                                   (field_ind + 1, mesh_id)][:]
-                    data = fdata[self.ds.step, :]
-                    ind += g.select(selector, data, rv[field], ind)  # caches
-        return rv
+        with self.handler.open_ds() as ds:
+            chunks = list(chunks)
+            rv = {}
+            for field in fields:
+                ftype, fname = field
+                if ftype == "all":
+                    ci = np.concatenate([mesh.connectivity_indices - self._INDEX_OFFSET \
+                                         for mesh in self.ds.index.mesh_union])
+                else:
+                    ci = ds.variables[ftype][:] - self._INDEX_OFFSET
+                num_elem = ci.shape[0]
+                if fname in self.node_fields:
+                    nodes_per_element = ci.shape[1]
+                    rv[field] = np.zeros((num_elem, nodes_per_element), dtype="float64")
+                elif fname in self.elem_fields:
+                    rv[field] = np.zeros(num_elem, dtype="float64")
+            for field in fields:
+                ind = 0
+                ftype, fname = field
+                if ftype == "all":
+                    mesh_ids = [mesh.mesh_id + 1 for mesh in self.ds.index.mesh_union]
+                    objs = [mesh for mesh in self.ds.index.mesh_union]
+                else:
+                    mesh_ids = [int(ftype[-1])]
+                    chunk = chunks[mesh_ids[0] - 1]
+                    objs = chunk.objs
+                if fname in self.node_fields:
+                    field_ind = self.node_fields.index(fname)
+                    fdata = ds.variables['vals_nod_var%d' % (field_ind + 1)]
+                    for g in objs:
+                        ci = g.connectivity_indices - self._INDEX_OFFSET
+                        data = fdata[self.ds.step][ci]
+                        ind += g.select(selector, data, rv[field], ind)  # caches
+                if fname in self.elem_fields:
+                    field_ind = self.elem_fields.index(fname)
+                    for g, mesh_id in zip(objs, mesh_ids):
+                        fdata = ds.variables['vals_elem_var%deb%s' %
+                                                       (field_ind + 1, mesh_id)][:]
+                        data = fdata[self.ds.step, :]
+                        ind += g.select(selector, data, rv[field], ind)  # caches
+            return rv
 
     def _read_chunk_data(self, chunk, fields):
         # This reads the data from a single chunk, and is only used for

diff -r 5f4fc6ce70895d0ee68726c091c194d5a1dc2ecc -r 6fef7faaec95455acdaf432aae4585a767f83145 yt/frontends/exodus_ii/simulation_handling.py
--- a/yt/frontends/exodus_ii/simulation_handling.py
+++ b/yt/frontends/exodus_ii/simulation_handling.py
@@ -14,7 +14,6 @@
 from yt.data_objects.time_series import \
     DatasetSeries, \
     RegisteredSimulationTimeSeries
-from yt.frontends.exodus_ii.api import ExodusIIDataset
 
 
 @add_metaclass(RegisteredSimulationTimeSeries)
@@ -45,6 +44,25 @@
         self.all_outputs = self._check_for_outputs(potential_outputs)
         self.all_outputs.sort(key=lambda obj: obj["filename"])
 
+    def __iter__(self):
+        for o in self._pre_outputs:
+            fn, step = o
+            ds = load(fn, step=step)
+            self._setup_function(ds)
+            yield ds
+
+    def __getitem__(self, key):
+        if isinstance(key, slice):
+            if isinstance(key.start, float):
+                return self.get_range(key.start, key.stop)
+            # This will return a sliced up object!
+            return DatasetSeries(self._pre_outputs[key], self.parallel)
+        o = self._pre_outputs[key]
+        fn, step = o
+        o = load(fn, step=step)
+        self._setup_function(o)
+        return o
+
     def get_time_series(self, parallel=False, setup_function=None):
         r"""
         Instantiate a DatasetSeries object for a set of outputs.
@@ -55,15 +73,14 @@
         Fine-level filtering is currently not implemented.
         
         """
-        
+
         all_outputs = self.all_outputs
         ds_list = []
         for output in all_outputs:
             num_steps = output['num_steps']
             fn = output['filename']
             for step in range(num_steps):
-                ds = ExodusIIDataset(fn, step=step)
-                ds_list.append(ds)
+                ds_list.append((fn, step))
         super(ExodusIISimulation, self).__init__(ds_list, 
                                                  parallel=parallel, 
                                                  setup_function=setup_function)

diff -r 5f4fc6ce70895d0ee68726c091c194d5a1dc2ecc -r 6fef7faaec95455acdaf432aae4585a767f83145 yt/frontends/flash/data_structures.py
--- a/yt/frontends/flash/data_structures.py
+++ b/yt/frontends/flash/data_structures.py
@@ -30,7 +30,8 @@
 from yt.geometry.particle_geometry_handler import \
     ParticleIndex
 from yt.utilities.file_handler import \
-    HDF5FileHandler
+    HDF5FileHandler, \
+    warn_h5py
 from yt.utilities.physical_ratios import cm_per_mpc
 from .fields import FLASHFieldInfo
 
@@ -439,7 +440,7 @@
             fileh = HDF5FileHandler(args[0])
             if "bounding box" in fileh["/"].keys():
                 return True
-        except:
+        except (IOError, OSError, ImportError):
             pass
         return False
 
@@ -489,12 +490,13 @@
 
     @classmethod
     def _is_valid(self, *args, **kwargs):
+        warn_h5py(args[0])
         try:
             fileh = HDF5FileHandler(args[0])
             if "bounding box" not in fileh["/"].keys() \
                 and "localnp" in fileh["/"].keys():
                 return True
-        except IOError:
+        except (IOError, OSError, ImportError):
             pass
         return False
 

diff -r 5f4fc6ce70895d0ee68726c091c194d5a1dc2ecc -r 6fef7faaec95455acdaf432aae4585a767f83145 yt/frontends/open_pmd/data_structures.py
--- a/yt/frontends/open_pmd/data_structures.py
+++ b/yt/frontends/open_pmd/data_structures.py
@@ -31,7 +31,8 @@
     get_component
 from yt.funcs import setdefaultattr
 from yt.geometry.grid_geometry_handler import GridIndex
-from yt.utilities.file_handler import HDF5FileHandler
+from yt.utilities.file_handler import HDF5FileHandler, \
+    warn_h5py
 from yt.utilities.logger import ytLogger as mylog
 from yt.utilities.on_demand_imports import _h5py as h5
 
@@ -495,9 +496,10 @@
     def _is_valid(self, *args, **kwargs):
         """Checks whether the supplied file can be read by this frontend.
         """
+        warn_h5py(args[0])
         try:
             f = h5.File(args[0], "r")
-        except (IOError, OSError):
+        except (IOError, OSError, ImportError):
             return False
 
         requirements = ["openPMD", "basePath", "meshesPath", "particlesPath"]

diff -r 5f4fc6ce70895d0ee68726c091c194d5a1dc2ecc -r 6fef7faaec95455acdaf432aae4585a767f83145 yt/geometry/grid_container.pyx
--- a/yt/geometry/grid_container.pyx
+++ b/yt/geometry/grid_container.pyx
@@ -63,9 +63,9 @@
                   np.ndarray[np.int64_t, ndim=1] num_children):
 
         cdef int i, j, k
-        cdef np.ndarray[np.int64_t, ndim=1] child_ptr
+        cdef np.ndarray[np.int_t, ndim=1] child_ptr
 
-        child_ptr = np.zeros(num_grids, dtype='int64')
+        child_ptr = np.zeros(num_grids, dtype='int')
 
         self.num_grids = num_grids
         self.num_root_grids = 0

diff -r 5f4fc6ce70895d0ee68726c091c194d5a1dc2ecc -r 6fef7faaec95455acdaf432aae4585a767f83145 yt/geometry/grid_visitors.pxd
--- a/yt/geometry/grid_visitors.pxd
+++ b/yt/geometry/grid_visitors.pxd
@@ -37,7 +37,7 @@
     np.float64_t right_edge_x
     np.float64_t right_edge_y
     np.float64_t right_edge_z
-    np.int64_t children_pointers
+    np.int_t children_pointers
     np.int64_t start_index_x
     np.int64_t start_index_y
     np.int64_t start_index_z

diff -r 5f4fc6ce70895d0ee68726c091c194d5a1dc2ecc -r 6fef7faaec95455acdaf432aae4585a767f83145 yt/geometry/particle_smooth.pyx
--- a/yt/geometry/particle_smooth.pyx
+++ b/yt/geometry/particle_smooth.pyx
@@ -314,8 +314,7 @@
         # Note that what we will be providing to our processing functions will
         # actually be indirectly-sorted fields.  This preserves memory at the
         # expense of additional pointer lookups.
-        pind = np.argsort(pdoms)
-        pind = np.asarray(pind, dtype='int64', order='C')
+        pind = np.asarray(np.argsort(pdoms), dtype='int64', order='C')
         # So what this means is that we now have all the oct-0 particle indices
         # in order, then the oct-1, etc etc.
         # This now gives us the indices to the particles for each domain.

diff -r 5f4fc6ce70895d0ee68726c091c194d5a1dc2ecc -r 6fef7faaec95455acdaf432aae4585a767f83145 yt/utilities/exodusII_reader.py
--- a/yt/utilities/exodusII_reader.py
+++ /dev/null
@@ -1,51 +0,0 @@
-import string
-from itertools import takewhile
-from netCDF4 import Dataset
-import numpy as np
-from yt.config import ytcfg
-import os
-import warnings
-
-
-def sanitize_string(s):
-    s = "".join(_ for _ in takewhile(lambda a: a in string.printable, s))
-    return s
-
-
-def get_data(fn):
-    warnings.warn("The yt.utilities.exodusII_reader module is deprecated "
-                  "and will be removed in a future release. "
-                  "Please use the normal yt.load() command to access "
-                  "your data instead.")
-    try:
-        f = Dataset(fn)
-    except RuntimeError:
-        f = Dataset(os.path.join(ytcfg.get("yt", "test_data_dir"), fn))
-    fvars = f.variables
-    # Is this correct?
-    etypes = fvars["eb_status"][:]
-    nelem = etypes.shape[0]
-    varnames = [sanitize_string(v.tostring()) for v in
-                fvars["name_elem_var"][:]]
-    nodnames = [sanitize_string(v.tostring()) for v in
-                fvars["name_nod_var"][:]]
-    coord = np.array([fvars["coord%s" % ax][:]
-                     for ax in 'xyz']).transpose().copy()
-    coords = []
-    connects = []
-    data = []
-    for i in range(nelem):
-        connects.append(fvars["connect%s" % (i+1)][:].astype("i8"))
-        ci = connects[-1]
-        coords.append(coord)  # Same for all
-        vals = {}
-        for j, v in enumerate(varnames):
-            values = fvars["vals_elem_var%seb%s" % (j+1, i+1)][:]
-            vals['gas', v] = values.astype("f8")[-1, :]
-        for j, v in enumerate(nodnames):
-            # We want just for this set of nodes all the node variables
-            # Use (ci - 1) to get these values
-            values = fvars["vals_nod_var%s" % (j+1)][:]
-            vals['gas', v] = values.astype("f8")[-1, ci - 1, ...]
-        data.append(vals)
-    return coords, connects, data

diff -r 5f4fc6ce70895d0ee68726c091c194d5a1dc2ecc -r 6fef7faaec95455acdaf432aae4585a767f83145 yt/utilities/file_handler.py
--- a/yt/utilities/file_handler.py
+++ b/yt/utilities/file_handler.py
@@ -14,6 +14,25 @@
 #-----------------------------------------------------------------------------
 
 from yt.utilities.on_demand_imports import _h5py as h5py
+from yt.utilities.on_demand_imports import NotAModule
+from contextlib import contextmanager
+
+def valid_hdf5_signature(fn):
+    signature = b'\x89HDF\r\n\x1a\n'
+    try:
+        with open(fn, 'rb') as f:
+            header = f.read(8)
+            return header == signature
+    except:
+        return False
+
+
+def warn_h5py(fn):
+    needs_h5py = valid_hdf5_signature(fn)
+    if needs_h5py and isinstance(h5py.File, NotAModule):
+        raise RuntimeError("This appears to be an HDF5 file, "
+                           "but h5py is not installed.")
+
 
 class HDF5FileHandler(object):
     handle = None
@@ -67,8 +86,33 @@
     def close(self):
         self.handle.close()
 
+
+def valid_netcdf_classic_signature(filename):
+    signature_v1 = b'CDF\x01'
+    signature_v2 = b'CDF\x02'
+    try:
+        with open(filename, 'rb') as f:
+            header = f.read(4)
+            return (header == signature_v1 or header == signature_v2)
+    except:
+        return False
+
+
+def warn_netcdf(fn):
+    needs_netcdf = valid_netcdf_classic_signature(fn)
+    from yt.utilities.on_demand_imports import _netCDF4 as netCDF4
+    if needs_netcdf and isinstance(netCDF4.Dataset, NotAModule):
+        raise RuntimeError("This appears to be a netCDF file, but the "
+                           "python bindings for netCDF4 are not installed.")
+
+
 class NetCDF4FileHandler(object):
     def __init__(self, filename):
-        from netCDF4 import Dataset
-        ds = Dataset(filename)
-        self.dataset = ds
+        self.filename = filename
+
+    @contextmanager
+    def open_ds(self):
+        from yt.utilities.on_demand_imports import _netCDF4 as netCDF4
+        ds = netCDF4.Dataset(self.filename)
+        yield ds
+        ds.close()

diff -r 5f4fc6ce70895d0ee68726c091c194d5a1dc2ecc -r 6fef7faaec95455acdaf432aae4585a767f83145 yt/utilities/lib/alt_ray_tracers.pyx
--- a/yt/utilities/lib/alt_ray_tracers.pyx
+++ b/yt/utilities/lib/alt_ray_tracers.pyx
@@ -101,7 +101,7 @@
                                           rleft, rright, zleft, zright, \
                                           cleft, cright, thetaleft, thetaright, \
                                           tmleft, tpleft, tmright, tpright, tsect
-    cdef np.ndarray[np.int64_t, ndim=1, cast=True] inds, tinds, sinds
+    cdef np.ndarray[np.int_t, ndim=1, cast=True] inds, tinds, sinds
     cdef np.ndarray[np.float64_t, ndim=2] xyz, rztheta, ptemp, b1, b2, dsect
 
     # set up  points

diff -r 5f4fc6ce70895d0ee68726c091c194d5a1dc2ecc -r 6fef7faaec95455acdaf432aae4585a767f83145 yt/utilities/lib/pixelization_routines.pyx
--- a/yt/utilities/lib/pixelization_routines.pyx
+++ b/yt/utilities/lib/pixelization_routines.pyx
@@ -244,7 +244,7 @@
                        np.float64_t[:] pdz,
                        np.float64_t[:] center,
                        np.float64_t[:,:] inv_mat,
-                       np.int64_t[:] indices,
+                       np.int_t[:] indices,
                        np.float64_t[:] data,
                        bounds):
     cdef np.float64_t x_min, x_max, y_min, y_max

diff -r 5f4fc6ce70895d0ee68726c091c194d5a1dc2ecc -r 6fef7faaec95455acdaf432aae4585a767f83145 yt/utilities/on_demand_imports.py
--- a/yt/utilities/on_demand_imports.py
+++ b/yt/utilities/on_demand_imports.py
@@ -30,6 +30,23 @@
     def __call__(self, *args, **kwargs):
         raise self.error
 
+class netCDF4_imports(object):
+    _name = "netCDF4"
+    _Dataset = None
+    @property
+    def Dataset(self):
+        if self._Dataset is None:
+            try:
+                from netCDF4 import Dataset
+            except ImportError:
+                Dataset = NotAModule(self._name)
+            self._Dataset = Dataset
+        return self._Dataset
+
+
+_netCDF4 = netCDF4_imports()
+
+
 class astropy_imports(object):
     _name = "astropy"
     _pyfits = None

diff -r 5f4fc6ce70895d0ee68726c091c194d5a1dc2ecc -r 6fef7faaec95455acdaf432aae4585a767f83145 yt/visualization/plot_modifications.py
--- a/yt/visualization/plot_modifications.py
+++ b/yt/visualization/plot_modifications.py
@@ -2336,12 +2336,12 @@
                                                  plot.data,
                                                  self.field_x,
                                                  bounds,
-                                                 (nx,ny))
+                                                 (ny,nx))
         pixY = plot.data.ds.coordinates.pixelize(plot.data.axis,
                                                  plot.data,
                                                  self.field_y,
                                                  bounds,
-                                                 (nx,ny))
+                                                 (ny,nx))
 
         vectors = np.concatenate((pixX[...,np.newaxis],
                                   pixY[...,np.newaxis]),axis=2)

diff -r 5f4fc6ce70895d0ee68726c091c194d5a1dc2ecc -r 6fef7faaec95455acdaf432aae4585a767f83145 yt/visualization/profile_plotter.py
--- a/yt/visualization/profile_plotter.py
+++ b/yt/visualization/profile_plotter.py
@@ -84,6 +84,7 @@
     def __init__(self, plots):
         self.plots = plots
         self.ylim = {}
+        self.xlim = (None, None)
         super(AxesContainer, self).__init__()
 
     def __missing__(self, key):
@@ -357,6 +358,7 @@
             axes.set_xlabel(xtitle)
             axes.set_ylabel(ytitle)
             axes.set_ylim(*self.axes.ylim[fname])
+            axes.set_xlim(*self.axes.xlim)
             if any(self.label):
                 axes.legend(loc="best")
         self._set_font_properties()
@@ -539,6 +541,7 @@
         >>> pp.save()
 
         """
+        self.axes.xlim = (xmin, xmax)
         for i, p in enumerate(self.profiles):
             if xmin is None:
                 xmi = p.x_bins.min()


https://bitbucket.org/yt_analysis/yt/commits/2fd33a133cc3/
Changeset:   2fd33a133cc3
Branch:      yt
User:        ngoldbaum
Date:        2017-04-11 15:35:57+00:00
Summary:     Merged in brittonsmith/yt (pull request #2536)

Minor HaloCatalog upgrades

Approved-by: chummels <chummels at gmail.com>
Approved-by: Nathan Goldbaum <ngoldbau at illinois.edu>
Affected #:  3 files

diff -r 2441e1324406c79c6fe366223f8b5ee69b2f1b54 -r 2fd33a133cc39a889a72a6285a47cca8f01ebf3c yt/analysis_modules/halo_analysis/halo_catalog.py
--- a/yt/analysis_modules/halo_analysis/halo_catalog.py
+++ b/yt/analysis_modules/halo_analysis/halo_catalog.py
@@ -478,10 +478,12 @@
                         field_types=ftypes, extra_attrs=extra_attrs)
 
     def add_default_quantities(self, field_type='halos'):
-        self.add_quantity("particle_identifier", field_type=field_type,prepend=True)
-        self.add_quantity("particle_mass", field_type=field_type,prepend=True)
-        self.add_quantity("particle_position_x", field_type=field_type,prepend=True)
-        self.add_quantity("particle_position_y", field_type=field_type,prepend=True)
-        self.add_quantity("particle_position_z", field_type=field_type,prepend=True)
-        self.add_quantity("virial_radius", field_type=field_type,prepend=True)
-
+        for field in ["particle_identifier", "particle_mass",
+                      "particle_position_x", "particle_position_y",
+                      "particle_position_z", "virial_radius"]:
+            field_name = (field_type, field)
+            if field_name not in self.halos_ds.field_list:
+                mylog.warn("Halo dataset %s has no field %s." %
+                           (self.halos_ds, str(field_name)))
+                continue
+            self.add_quantity(field, field_type=field_type, prepend=True)

diff -r 2441e1324406c79c6fe366223f8b5ee69b2f1b54 -r 2fd33a133cc39a889a72a6285a47cca8f01ebf3c yt/frontends/halo_catalog/data_structures.py
--- a/yt/frontends/halo_catalog/data_structures.py
+++ b/yt/frontends/halo_catalog/data_structures.py
@@ -30,6 +30,20 @@
 from yt.data_objects.static_output import \
     ParticleFile
 
+class HaloCatalogParticleIndex(ParticleIndex):
+    def _setup_filenames(self):
+        template = self.dataset.filename_template
+        ndoms = self.dataset.file_count
+        cls = self.dataset._file_class
+        if ndoms > 1:
+            self.data_files = \
+              [cls(self.dataset, self.io, template % {'num':i}, i)
+               for i in range(ndoms)]
+        else:
+            self.data_files = \
+              [cls(self.dataset, self.io,
+                   self.dataset.parameter_filename, 0)]
+
 class HaloCatalogHDF5File(ParticleFile):
     def __init__(self, ds, io, filename, file_id):
         with h5py.File(filename, "r") as f:

diff -r 2441e1324406c79c6fe366223f8b5ee69b2f1b54 -r 2fd33a133cc39a889a72a6285a47cca8f01ebf3c yt/geometry/particle_geometry_handler.py
--- a/yt/geometry/particle_geometry_handler.py
+++ b/yt/geometry/particle_geometry_handler.py
@@ -69,13 +69,18 @@
     def convert(self, unit):
         return self.dataset.conversion_factors[unit]
 
-    def _initialize_particle_handler(self):
-        self._setup_data_io()
+    def _setup_filenames(self):
         template = self.dataset.filename_template
         ndoms = self.dataset.file_count
         cls = self.dataset._file_class
-        self.data_files = [cls(self.dataset, self.io, template % {'num':i}, i)
-                           for i in range(ndoms)]
+        self.data_files = \
+          [cls(self.dataset, self.io, template % {'num':i}, i)
+           for i in range(ndoms)]
+
+    def _initialize_particle_handler(self):
+        self._setup_data_io()
+        self._setup_filenames()
+
         index_ptype = self.index_ptype
         if index_ptype == "all":
             self.total_particles = sum(

Repository URL: https://bitbucket.org/yt_analysis/yt/

--

This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.



More information about the yt-svn mailing list