[yt-svn] commit/yt: 9 new changesets

commits-noreply at bitbucket.org commits-noreply at bitbucket.org
Mon Nov 23 11:13:44 PST 2015


9 new commits in yt:

https://bitbucket.org/yt_analysis/yt/commits/b2ef757577fa/
Changeset:   b2ef757577fa
Branch:      yt
User:        MatthewTurk
Date:        2015-09-18 21:06:33+00:00
Summary:     A simple implementation of a fields attribute.

This implements ds.fields, which has as its top level the field types and as
the next level down the field names.  For instance,
ds.fields.deposit.all_density would return the field info for a given field.
Affected #:  1 file

diff -r f818f29712491ce9f597decaea69297a06603393 -r b2ef757577fac9c830782db19fee1b1d7d82f6c1 yt/data_objects/static_output.py
--- a/yt/data_objects/static_output.py
+++ b/yt/data_objects/static_output.py
@@ -76,6 +76,36 @@
         output_type_registry[name] = cls
         mylog.debug("Registering: %s as %s", name, cls)
 
+class FieldTypeContainer(object):
+    def __init__(self, ds):
+        self.ds = weakref.proxy(ds)
+
+    def __getattr__(self, attr):
+        ds = self.__getattribute__('ds')
+        fnc = FieldNameContainer(ds, attr)
+        if len(dir(fnc)) == 0:
+            return self.__getattribute__(attr)
+        return fnc
+
+    def __dir__(self):
+        return list(set(t for t, n in self.ds.field_info))
+
+class FieldNameContainer(object):
+    def __init__(self, ds, field_type):
+        self.ds = ds
+        self.field_type = field_type
+
+    def __getattr__(self, attr):
+        ft = self.__getattribute__("field_type")
+        ds = self.__getattribute__("ds")
+        if (ft, attr) not in ds.field_info:
+            return self.__getattribute__(attr)
+        return ds.field_info[ft, attr]
+
+    def __dir__(self):
+        return [n for t, n in self.ds.field_info
+                if t == self.field_type]
+
 class IndexProxy(object):
     # This is a simple proxy for Index objects.  It enables backwards
     # compatibility so that operations like .h.sphere, .h.print_stats and
@@ -124,6 +154,7 @@
     _index_class = None
     field_units = None
     derived_field_list = requires_index("derived_field_list")
+    fields = requires_index("fields")
     _instantiated = False
 
     def __new__(cls, filename=None, *args, **kwargs):
@@ -380,6 +411,7 @@
         self.field_info.load_all_plugins()
         deps, unloaded = self.field_info.check_derived_fields()
         self.field_dependencies.update(deps)
+        self.fields = FieldTypeContainer(self)
 
     def setup_deprecated_fields(self):
         from yt.fields.field_aliases import _field_name_aliases
@@ -911,6 +943,7 @@
         deps, _ = self.field_info.check_derived_fields([name])
         self.field_dependencies.update(deps)
 
+
     def add_deposited_particle_field(self, deposit_field, method):
         """Add a new deposited particle field
 


https://bitbucket.org/yt_analysis/yt/commits/35efc778ef31/
Changeset:   35efc778ef31
Branch:      yt
User:        MatthewTurk
Date:        2015-09-18 21:33:28+00:00
Summary:     Adding a nice __repr__ for derived fields
Affected #:  1 file

diff -r b2ef757577fac9c830782db19fee1b1d7d82f6c1 -r 35efc778ef31ea5ef1794848b94263c88c770460 yt/fields/derived_field.py
--- a/yt/fields/derived_field.py
+++ b/yt/fields/derived_field.py
@@ -211,6 +211,20 @@
         data_label += r"$"
         return data_label
 
+    def __repr__(self):
+        s = "Derived Field "
+        if isinstance(self.name, tuple):
+            s += "(%s, %s): " % self.name
+        else:
+            s += "%s: " % (self.name)
+        s += "(units: %s" % self.units
+        if self.display_name is not None:
+            s += ", display_name: '%s'" % (self.display_name)
+        if self.particle_type:
+            s += ", particle field"
+        s += ")"
+        return s
+
 class FieldValidator(object):
     pass
 


https://bitbucket.org/yt_analysis/yt/commits/e0af77a45c5b/
Changeset:   e0af77a45c5b
Branch:      yt
User:        MatthewTurk
Date:        2015-09-19 00:57:04+00:00
Summary:     Adding tests for field_lists.
Affected #:  1 file

diff -r 35efc778ef31ea5ef1794848b94263c88c770460 -r e0af77a45c5b5961080555762d11c80789761be0 yt/fields/tests/test_field_name_container.py
--- /dev/null
+++ b/yt/fields/tests/test_field_name_container.py
@@ -0,0 +1,19 @@
+from yt.utilities.answer_testing.framework import \
+    requires_ds, \
+    small_patch_amr, \
+    big_patch_amr, \
+    data_dir_load, \
+    AnalyticHaloMassFunctionTest, \
+    SimulatedHaloMassFunctionTest
+from yt.frontends.enzo.api import EnzoDataset
+
+enzotiny = "enzo_tiny_cosmology/DD0046/DD0046"
+ at requires_ds(enzotiny)
+def test_simulated_halo_mass_function():
+    ds = data_dir_load(enzotiny)
+    ds = yt.load(enzotiny)
+    for field_type in dir(ds.fields):
+        ft = getattr(ds.fields, field_type)
+        for field_name in dir(ft):
+            f = getattr(ft, field_name)
+            assert ((field_type, field_name) == f.name)


https://bitbucket.org/yt_analysis/yt/commits/7e150e1b959f/
Changeset:   7e150e1b959f
Branch:      yt
User:        MatthewTurk
Date:        2015-09-21 18:29:08+00:00
Summary:     Add some more useful __repr__ outputs
Affected #:  1 file

diff -r e0af77a45c5b5961080555762d11c80789761be0 -r 7e150e1b959f73601beb86b50e819086f50a3126 yt/fields/derived_field.py
--- a/yt/fields/derived_field.py
+++ b/yt/fields/derived_field.py
@@ -35,6 +35,7 @@
     def _TranslationFunc(field, data):
         # We do a bunch of in-place modifications, so we will copy this.
         return data[field_name].copy()
+    _TranslationFunc.alias_name = field_name
     return _TranslationFunc
 
 def NullFunc(field, data):
@@ -212,7 +213,12 @@
         return data_label
 
     def __repr__(self):
-        s = "Derived Field "
+        if self._function == NullFunc:
+            s = "On-Disk Field "
+        elif self._function.func_name == "_TranslationFunc":
+            s = "Alias Field for \"%s\" " % (self._function.alias_name,)
+        else:
+            s = "Derived Field "
         if isinstance(self.name, tuple):
             s += "(%s, %s): " % self.name
         else:


https://bitbucket.org/yt_analysis/yt/commits/5ea9009614a9/
Changeset:   5ea9009614a9
Branch:      yt
User:        MatthewTurk
Date:        2015-11-18 04:40:04+00:00
Summary:     Merging from upstream
Affected #:  426 files

diff -r 7e150e1b959f73601beb86b50e819086f50a3126 -r 5ea9009614a9d1e890c19daaed5966f0c917d2c4 .hgignore
--- a/.hgignore
+++ b/.hgignore
@@ -10,6 +10,7 @@
 yt/analysis_modules/halo_finding/rockstar/rockstar_groupies.c
 yt/analysis_modules/halo_finding/rockstar/rockstar_interface.c
 yt/analysis_modules/ppv_cube/ppv_utils.c
+yt/analysis_modules/photon_simulator/utils.c
 yt/frontends/ramses/_ramses_reader.cpp
 yt/frontends/sph/smoothing_kernel.c
 yt/geometry/fake_octree.c
@@ -31,6 +32,7 @@
 yt/utilities/lib/CICDeposit.c
 yt/utilities/lib/ContourFinding.c
 yt/utilities/lib/DepthFirstOctree.c
+yt/utilities/lib/element_mappings.c
 yt/utilities/lib/FixedInterpolator.c
 yt/utilities/lib/fortran_reader.c
 yt/utilities/lib/freetype_writer.c

diff -r 7e150e1b959f73601beb86b50e819086f50a3126 -r 5ea9009614a9d1e890c19daaed5966f0c917d2c4 MANIFEST.in
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -1,10 +1,11 @@
-include distribute_setup.py README* CREDITS COPYING.txt CITATION requirements.txt optional-requirements.txt
+include README* CREDITS COPYING.txt CITATION requirements.txt optional-requirements.txt
 include yt/visualization/mapserver/html/map_index.html
 include yt/visualization/mapserver/html/leaflet/*.css
 include yt/visualization/mapserver/html/leaflet/*.js
 include yt/visualization/mapserver/html/leaflet/images/*.png
+exclude scripts/pr_backport.py
 recursive-include yt *.py *.pyx *.pxd *.h README* *.txt LICENSE* *.cu
-recursive-include doc *.rst *.txt *.py *.ipynb *.png *.jpg *.css *.inc *.html
+recursive-include doc *.rst *.txt *.py *.ipynb *.png *.jpg *.css *.html
 recursive-include doc *.h *.c *.sh *.svgz *.pdf *.svg *.pyx
 include doc/README doc/activate doc/activate.csh doc/cheatsheet.tex
 include doc/extensions/README doc/Makefile
@@ -12,5 +13,3 @@
 prune doc/build
 recursive-include yt/analysis_modules/halo_finding/rockstar *.py *.pyx
 prune yt/frontends/_skeleton
-prune tests
-exclude clean.sh .hgchurn

diff -r 7e150e1b959f73601beb86b50e819086f50a3126 -r 5ea9009614a9d1e890c19daaed5966f0c917d2c4 coding_styleguide.txt
--- /dev/null
+++ b/coding_styleguide.txt
@@ -0,0 +1,101 @@
+Style Guide for Coding in yt
+============================
+
+Coding Style Guide
+------------------
+
+ * In general, follow PEP-8 guidelines.
+   http://www.python.org/dev/peps/pep-0008/
+ * Classes are ``ConjoinedCapitals``, methods and functions are
+   ``lowercase_with_underscores``.
+ * Use 4 spaces, not tabs, to represent indentation.
+ * Line widths should not be more than 80 characters.
+ * Do not use nested classes unless you have a very good reason to, such as
+   requiring a namespace or class-definition modification.  Classes should live
+   at the top level.  ``__metaclass__`` is exempt from this.
+ * Do not use unnecessary parenthesis in conditionals.  ``if((something) and
+   (something_else))`` should be rewritten as
+   ``if something and something_else``. Python is more forgiving than C.
+ * Avoid copying memory when possible. For example, don't do
+   ``a = a.reshape(3,4)`` when ``a.shape = (3,4)`` will do, and ``a = a * 3``
+   should be ``np.multiply(a, 3, a)``.
+ * In general, avoid all double-underscore method names: ``__something`` is
+   usually unnecessary.
+ * When writing a subclass, use the super built-in to access the super class,
+   rather than explicitly. Ex: ``super(SpecialGridSubclass, self).__init__()``
+   rather than ``SpecialGrid.__init__()``.
+ * Docstrings should describe input, output, behavior, and any state changes
+   that occur on an object.  See the file ``doc/docstring_example.txt`` for a
+   fiducial example of a docstring.
+ * Use only one top-level import per line. Unless there is a good reason not to,
+   imports should happen at the top of the file, after the copyright blurb.
+ * Never compare with ``True`` or ``False`` using ``==`` or ``!=``, always use
+   ``is`` or ``is not``.
+ * If you are comparing with a numpy boolean array, just refer to the array.
+   Ex: do ``np.all(array)`` instead of ``np.all(array == True)``.
+ * Never comapre with None using ``==`` or ``!=``, use ``is None`` or
+   ``is not None``.
+ * Use ``statement is not True`` instead of ``not statement is True``
+ * Only one statement per line, do not use semicolons to put two or more
+   statements on a single line.
+ * Only declare local variables if they will be used later. If you do not use the
+   return value of a function, do not store it in a variable.
+ * Add tests for new functionality. When fixing a bug, consider adding a test to
+   prevent the bug from recurring.
+
+API Guide
+---------
+
+ * Do not use ``from some_module import *``
+ * Internally, only import from source files directly -- instead of:
+
+     ``from yt.visualization.api import ProjectionPlot``
+
+   do:
+
+     ``from yt.visualization.plot_window import ProjectionPlot``
+
+ * Import symbols from the module where they are defined, avoid transitive
+   imports.
+ * Import standard library modules, functions, and classes from builtins, do not
+   import them from other yt files.
+ * Numpy is to be imported as ``np``.
+ * Do not use too many keyword arguments.  If you have a lot of keyword
+   arguments, then you are doing too much in ``__init__`` and not enough via
+   parameter setting.
+ * In function arguments, place spaces before commas.  ``def something(a,b,c)``
+   should be ``def something(a, b, c)``.
+ * Don't create a new class to replicate the functionality of an old class --
+   replace the old class.  Too many options makes for a confusing user
+   experience.
+ * Parameter files external to yt are a last resort.
+ * The usage of the ``**kwargs`` construction should be avoided.  If they cannot
+   be avoided, they must be explained, even if they are only to be passed on to
+   a nested function.
+
+Variable Names and Enzo-isms
+----------------------------
+Avoid Enzo-isms.  This includes but is not limited to:
+
+ * Hard-coding parameter names that are the same as those in Enzo.  The
+   following translation table should be of some help.  Note that the
+   parameters are now properties on a ``Dataset`` subclass: you access them
+   like ds.refine_by .
+
+    - ``RefineBy `` => `` refine_by``
+    - ``TopGridRank `` => `` dimensionality``
+    - ``TopGridDimensions `` => `` domain_dimensions``
+    - ``InitialTime `` => `` current_time``
+    - ``DomainLeftEdge `` => `` domain_left_edge``
+    - ``DomainRightEdge `` => `` domain_right_edge``
+    - ``CurrentTimeIdentifier `` => `` unique_identifier``
+    - ``CosmologyCurrentRedshift `` => `` current_redshift``
+    - ``ComovingCoordinates `` => `` cosmological_simulation``
+    - ``CosmologyOmegaMatterNow `` => `` omega_matter``
+    - ``CosmologyOmegaLambdaNow `` => `` omega_lambda``
+    - ``CosmologyHubbleConstantNow `` => `` hubble_constant``
+
+ * Do not assume that the domain runs from 0 .. 1.  This is not true
+   everywhere.
+ * Variable names should be short but descriptive.
+ * No globals!

diff -r 7e150e1b959f73601beb86b50e819086f50a3126 -r 5ea9009614a9d1e890c19daaed5966f0c917d2c4 doc/coding_styleguide.txt
--- a/doc/coding_styleguide.txt
+++ /dev/null
@@ -1,80 +0,0 @@
-Style Guide for Coding in yt
-============================
-
-Coding Style Guide
-------------------
-
- * In general, follow PEP-8 guidelines.
-   http://www.python.org/dev/peps/pep-0008/
- * Classes are ConjoinedCapitals, methods and functions are
-   lowercase_with_underscores.
- * Use 4 spaces, not tabs, to represent indentation.
- * Line widths should not be more than 80 characters.
- * Do not use nested classes unless you have a very good reason to, such as
-   requiring a namespace or class-definition modification.  Classes should live
-   at the top level.  __metaclass__ is exempt from this.
- * Do not use unnecessary parenthesis in conditionals.  if((something) and
-   (something_else)) should be rewritten as if something and something_else.
-   Python is more forgiving than C.
- * Avoid copying memory when possible. For example, don't do 
-   "a = a.reshape(3,4)" when "a.shape = (3,4)" will do, and "a = a * 3" should
-   be "np.multiply(a, 3, a)".
- * In general, avoid all double-underscore method names: __something is usually
-   unnecessary.
- * When writing a subclass, use the super built-in to access the super class,
-   rather than explicitly. Ex: "super(SpecialGrid, self).__init__()" rather than
-   "SpecialGrid.__init__()".
- * Doc strings should describe input, output, behavior, and any state changes
-   that occur on an object.  See the file `doc/docstring_example.txt` for a
-   fiducial example of a docstring.
-
-API Guide
----------
-
- * Do not import "*" from anything other than "yt.funcs".
- * Internally, only import from source files directly -- instead of:
-
-   from yt.visualization.api import ProjectionPlot
-
-   do:
-
-   from yt.visualization.plot_window import ProjectionPlot
-
- * Numpy is to be imported as "np", after a long time of using "na".
- * Do not use too many keyword arguments.  If you have a lot of keyword
-   arguments, then you are doing too much in __init__ and not enough via
-   parameter setting.
- * In function arguments, place spaces before commas.  def something(a,b,c)
-   should be def something(a, b, c).
- * Don't create a new class to replicate the functionality of an old class --
-   replace the old class.  Too many options makes for a confusing user
-   experience.
- * Parameter files external to yt are a last resort.
- * The usage of the **kwargs construction should be avoided.  If they cannot
-   be avoided, they must be explained, even if they are only to be passed on to
-   a nested function.
-
-Variable Names and Enzo-isms
-----------------------------
-
- * Avoid Enzo-isms.  This includes but is not limited to:
-   * Hard-coding parameter names that are the same as those in Enzo.  The
-     following translation table should be of some help.  Note that the
-     parameters are now properties on a Dataset subclass: you access them
-     like ds.refine_by .
-     * RefineBy => refine_by
-     * TopGridRank => dimensionality
-     * TopGridDimensions => domain_dimensions
-     * InitialTime => current_time
-     * DomainLeftEdge => domain_left_edge
-     * DomainRightEdge => domain_right_edge
-     * CurrentTimeIdentifier => unique_identifier
-     * CosmologyCurrentRedshift => current_redshift
-     * ComovingCoordinates => cosmological_simulation
-     * CosmologyOmegaMatterNow => omega_matter
-     * CosmologyOmegaLambdaNow => omega_lambda
-     * CosmologyHubbleConstantNow => hubble_constant
-   * Do not assume that the domain runs from 0 .. 1.  This is not true
-     everywhere.
- * Variable names should be short but descriptive.
- * No globals!

diff -r 7e150e1b959f73601beb86b50e819086f50a3126 -r 5ea9009614a9d1e890c19daaed5966f0c917d2c4 doc/get_yt.sh
--- a/doc/get_yt.sh
+++ b/doc/get_yt.sh
@@ -23,7 +23,7 @@
 DEST_SUFFIX="yt-conda"
 DEST_DIR="`pwd`/${DEST_SUFFIX/ /}"   # Installation location
 BRANCH="yt" # This is the branch to which we will forcibly update.
-INST_YT_SOURCE=1 # Do we do a source install of yt?
+INST_YT_SOURCE=0 # Do we do a source install of yt?
 
 ##################################################################
 #                                                                #
@@ -37,7 +37,7 @@
 # ( SOMECOMMAND 2>&1 ) 1>> ${LOG_FILE} || do_exit
 
 MINICONDA_URLBASE="http://repo.continuum.io/miniconda"
-MINICONDA_VERSION="1.9.1"
+MINICONDA_VERSION="latest"
 YT_RECIPE_REPO="https://bitbucket.org/yt_analysis/yt_conda/raw/default"
 
 function do_exit
@@ -61,12 +61,14 @@
     ( $* 2>&1 ) 1>> ${LOG_FILE} || do_exit
 }
 
-function get_ytproject
-{
-    [ -e $1 ] && return
-    echo "Downloading $1 from yt-project.org"
-    ${GETFILE} "http://yt-project.org/dependencies/$1" || do_exit
-    ( ${SHASUM} -c $1.sha512 2>&1 ) 1>> ${LOG_FILE} || do_exit
+# These are needed to prevent pushd and popd from printing to stdout
+
+function pushd () {
+    command pushd "$@" > /dev/null
+}
+
+function popd () {
+    command popd "$@" > /dev/null
 }
 
 function get_ytdata
@@ -101,122 +103,125 @@
 echo "This will install Miniconda from Continuum Analytics, the necessary"
 echo "packages to run yt, and create a self-contained environment for you to"
 echo "use yt.  Additionally, Conda itself provides the ability to install"
-echo "many other packages that can be used for other purposes."
+echo "many other packages that can be used for other purposes using the"
+echo "'conda install' command."
 echo
 MYOS=`uname -s`       # A guess at the OS
-if [ "${MYOS##Darwin}" != "${MYOS}" ]
+if [ $INST_YT_SOURCE -ne 0 ]
 then
-  echo "Looks like you're running on Mac OSX."
-  echo
-  echo "NOTE: you must have the Xcode command line tools installed."
-  echo
-  echo "The instructions for obtaining these tools varies according"
-  echo "to your exact OS version.  On older versions of OS X, you"
-  echo "must register for an account on the apple developer tools"
-  echo "website: https://developer.apple.com/downloads to obtain the"
-  echo "download link."
-  echo
-  echo "We have gathered some additional instructions for each"
-  echo "version of OS X below. If you have trouble installing yt"
-  echo "after following these instructions, don't hesitate to contact"
-  echo "the yt user's e-mail list."
-  echo
-  echo "You can see which version of OSX you are running by clicking"
-  echo "'About This Mac' in the apple menu on the left hand side of"
-  echo "menu bar.  We're assuming that you've installed all operating"
-  echo "system updates; if you have an older version, we suggest"
-  echo "running software update and installing all available updates."
-  echo
-  echo "OS X 10.5.8: search for and download Xcode 3.1.4 from the"
-  echo "Apple developer tools website."
-  echo
-  echo "OS X 10.6.8: search for and download Xcode 3.2 from the Apple"
-  echo "developer tools website.  You can either download the"
-  echo "Xcode 3.2.2 Developer Tools package (744 MB) and then use"
-  echo "Software Update to update to XCode 3.2.6 or"
-  echo "alternatively, you can download the Xcode 3.2.6/iOS SDK"
-  echo "bundle (4.1 GB)."
-  echo
-  echo "OS X 10.7.5: download Xcode 4.2 from the mac app store"
-  echo "(search for Xcode)."
-  echo "Alternatively, download the Xcode command line tools from"
-  echo "the Apple developer tools website."
-  echo
-  echo "OS X 10.8.2: download Xcode 4.6.1 from the mac app store."
-  echo "(search for Xcode)."
-  echo "Additionally, you will have to manually install the Xcode"
-  echo "command line tools, see:"
-  echo "http://stackoverflow.com/questions/9353444"
-  echo "Alternatively, download the Xcode command line tools from"
-  echo "the Apple developer tools website."
-  echo
-  echo "NOTE: It's possible that the installation will fail, if so,"
-  echo "please set the following environment variables, remove any"
-  echo "broken installation tree, and re-run this script verbatim."
-  echo
-  echo "$ export CC=gcc"
-  echo "$ export CXX=g++"
-  echo
-  MINICONDA_OS="MacOSX-x86_64"
+    if [ "${MYOS##Darwin}" != "${MYOS}" ]
+    then
+        echo "Looks like you're running on Mac OSX."
+        echo
+        echo "NOTE: you must have the Xcode command line tools installed."
+        echo
+        echo "The instructions for obtaining these tools varies according"
+        echo "to your exact OS version.  On older versions of OS X, you"
+        echo "must register for an account on the apple developer tools"
+        echo "website: https://developer.apple.com/downloads to obtain the"
+        echo "download link."
+        echo
+        echo "We have gathered some additional instructions for each"
+        echo "version of OS X below. If you have trouble installing yt"
+        echo "after following these instructions, don't hesitate to contact"
+        echo "the yt user's e-mail list."
+        echo
+        echo "You can see which version of OSX you are running by clicking"
+        echo "'About This Mac' in the apple menu on the left hand side of"
+        echo "menu bar.  We're assuming that you've installed all operating"
+        echo "system updates; if you have an older version, we suggest"
+        echo "running software update and installing all available updates."
+        echo
+        echo "OS X 10.5.8: search for and download Xcode 3.1.4 from the"
+        echo "Apple developer tools website."
+        echo
+        echo "OS X 10.6.8: search for and download Xcode 3.2 from the Apple"
+        echo "developer tools website.  You can either download the"
+        echo "Xcode 3.2.2 Developer Tools package (744 MB) and then use"
+        echo "Software Update to update to XCode 3.2.6 or"
+        echo "alternatively, you can download the Xcode 3.2.6/iOS SDK"
+        echo "bundle (4.1 GB)."
+        echo
+        echo "OS X 10.7.5: download Xcode 4.2 from the mac app store"
+        echo "(search for Xcode)."
+        echo "Alternatively, download the Xcode command line tools from"
+        echo "the Apple developer tools website."
+        echo
+        echo "OS X 10.8.4, 10.9, 10.10, and 10.11:"
+        echo "download the appropriate version of Xcode from the"
+        echo "mac app store (search for Xcode)."
+        echo
+        echo "Additionally, you will have to manually install the Xcode"
+        echo "command line tools."
+        echo
+        echo "For OS X 10.8, see:"
+        echo "http://stackoverflow.com/questions/9353444"
+        echo
+        echo "For OS X 10.9 and newer the command line tools can be installed"
+        echo "with the following command:"
+        echo "    xcode-select --install"
+    fi
+    if [ "${MYOS##Linux}" != "${MYOS}" ]
+    then
+        echo "Looks like you're on Linux."
+        echo
+        echo "Please make sure you have the developer tools for your OS "
+        echo "installed."
+        echo
+        if [ -f /etc/SuSE-release ] && [ `grep --count SUSE /etc/SuSE-release` -gt 0 ]
+        then
+            echo "Looks like you're on an OpenSUSE-compatible machine."
+            echo
+            echo "You need to have these packages installed:"
+            echo
+            echo "  * devel_C_C++"
+            echo "  * libuuid-devel"
+            echo "  * gcc-c++"
+            echo "  * chrpath"
+            echo
+            echo "You can accomplish this by executing:"
+            echo
+            echo "$ sudo zypper install -t pattern devel_C_C++"
+            echo "$ sudo zypper install gcc-c++ libuuid-devel zip"
+            echo "$ sudo zypper install chrpath"
+        fi
+        if [ -f /etc/lsb-release ] && [ `grep --count buntu /etc/lsb-release` -gt 0 ]
+        then
+            echo "Looks like you're on an Ubuntu-compatible machine."
+            echo
+            echo "You need to have these packages installed:"
+            echo
+            echo "  * libssl-dev"
+            echo "  * build-essential"
+            echo "  * libncurses5"
+            echo "  * libncurses5-dev"
+            echo "  * uuid-dev"
+            echo "  * chrpath"
+            echo
+            echo "You can accomplish this by executing:"
+            echo
+            echo "$ sudo apt-get install libssl-dev build-essential libncurses5 libncurses5-dev zip uuid-dev chrpath"
+            echo
+        fi
+        echo
+        echo "If you are running on a supercomputer or other module-enabled"
+        echo "system, please make sure that the GNU module has been loaded."
+        echo
+    fi
 fi
-if [ "${MYOS##Linux}" != "${MYOS}" ]
+if [ "${MYOS##x86_64}" != "${MYOS}" ]
 then
-  echo "Looks like you're on Linux."
-  echo
-  echo "Please make sure you have the developer tools for your OS installed."
-  echo
-  if [ -f /etc/SuSE-release ] && [ `grep --count SUSE /etc/SuSE-release` -gt 0 ]
-  then
-    echo "Looks like you're on an OpenSUSE-compatible machine."
-    echo
-    echo "You need to have these packages installed:"
-    echo
-    echo "  * devel_C_C++"
-    echo "  * libopenssl-devel"
-    echo "  * libuuid-devel"
-    echo "  * zip"
-    echo "  * gcc-c++"
-    echo "  * chrpath"
-    echo
-    echo "You can accomplish this by executing:"
-    echo
-    echo "$ sudo zypper install -t pattern devel_C_C++"
-    echo "$ sudo zypper install gcc-c++ libopenssl-devel libuuid-devel zip"
-    echo "$ sudo zypper install chrpath"
-  fi
-  if [ -f /etc/lsb-release ] && [ `grep --count buntu /etc/lsb-release` -gt 0 ]
-  then
-    echo "Looks like you're on an Ubuntu-compatible machine."
-    echo
-    echo "You need to have these packages installed:"
-    echo
-    echo "  * libssl-dev"
-    echo "  * build-essential"
-    echo "  * libncurses5"
-    echo "  * libncurses5-dev"
-    echo "  * zip"
-    echo "  * uuid-dev"
-    echo "  * chrpath"
-    echo
-    echo "You can accomplish this by executing:"
-    echo
-    echo "$ sudo apt-get install libssl-dev build-essential libncurses5 libncurses5-dev zip uuid-dev chrpath"
-    echo
-  fi
-  echo
-  echo "If you are running on a supercomputer or other module-enabled"
-  echo "system, please make sure that the GNU module has been loaded."
-  echo
-  if [ "${MYOS##x86_64}" != "${MYOS}" ]
-  then
     MINICONDA_OS="Linux-x86_64"
-  elif [ "${MYOS##i386}" != "${MYOS}" ]
-  then
+elif [ "${MYOS##i386}" != "${MYOS}" ]
+then
     MINICONDA_OS="Linux-x86"
-  else
-    echo "Not sure which type of Linux you're on.  Going with x86_64."
+elif [ "${MYOS##Darwin}" != "${MYOS}" ]
+then
+     MINICONDA_OS="MacOSX-x86_64"
+else
+    echo "Not sure which Linux distro you are running."
+    echo "Going with x86_64 architecture."
     MINICONDA_OS="Linux-x86_64"
-  fi
 fi
 echo
 echo "If you'd rather not continue, hit Ctrl-C."
@@ -233,7 +238,7 @@
 if type -P wget &>/dev/null
 then
     echo "Using wget"
-    export GETFILE="wget -nv"
+    export GETFILE="wget -nv -nc"
 else
     echo "Using curl"
     export GETFILE="curl -sSO"
@@ -250,9 +255,6 @@
 
 log_cmd bash ./${MINICONDA_PKG} -b -p $DEST_DIR
 
-# I don't think we need OR want this anymore:
-#export LD_LIBRARY_PATH=${DEST_DIR}/lib:$LD_LIBRARY_PATH
-
 # This we *do* need.
 export PATH=${DEST_DIR}/bin:$PATH
 
@@ -261,51 +263,40 @@
 
 declare -a YT_DEPS
 YT_DEPS+=('python')
-YT_DEPS+=('distribute')
-YT_DEPS+=('libpng')
+YT_DEPS+=('setuptools')
 YT_DEPS+=('numpy')
-YT_DEPS+=('pygments')
-YT_DEPS+=('jinja2')
-YT_DEPS+=('tornado')
-YT_DEPS+=('pyzmq')
+YT_DEPS+=('jupyter')
 YT_DEPS+=('ipython')
 YT_DEPS+=('sphinx')
 YT_DEPS+=('h5py')
 YT_DEPS+=('matplotlib')
 YT_DEPS+=('cython')
 YT_DEPS+=('nose')
+YT_DEPS+=('conda-build')
+YT_DEPS+=('mercurial')
+YT_DEPS+=('sympy')
 
 # Here is our dependency list for yt
-log_cmd conda config --system --add channels http://repo.continuum.io/pkgs/free
-log_cmd conda config --system --add channels http://repo.continuum.io/pkgs/dev
-log_cmd conda config --system --add channels http://repo.continuum.io/pkgs/gpl
 log_cmd conda update --yes conda
 
-echo "Current dependencies: ${YT_DEPS[@]}"
 log_cmd echo "DEPENDENCIES" ${YT_DEPS[@]}
-log_cmd conda install --yes ${YT_DEPS[@]}
-
-echo "Installing mercurial."
-get_ytrecipe mercurial
+for YT_DEP in "${YT_DEPS[@]}"; do
+    echo "Installing $YT_DEP"
+    log_cmd conda install --yes ${YT_DEP}
+done
 
 if [ $INST_YT_SOURCE -eq 0 ]
 then
-  echo "Installing yt as a package."
-  get_ytrecipe yt
+  echo "Installing yt"
+  log_cmd conda install --yes yt
 else
-  # We do a source install.
-  YT_DIR="${DEST_DIR}/src/yt-hg"
-  export PNG_DIR=${DEST_DIR}
-  export FTYPE_DIR=${DEST_DIR}
-  export HDF5_DIR=${DEST_DIR}
-  log_cmd hg clone -r ${BRANCH} https://bitbucket.org/yt_analysis/yt ${YT_DIR}
-  pushd ${YT_DIR}
-  log_cmd python setup.py develop
-  popd
-  log_cmd cp ${YT_DIR}/doc/activate ${DEST_DIR}/bin/activate 
-  log_cmd sed -i.bak -e "s,__YT_DIR__,${DEST_DIR}," ${DEST_DIR}/bin/activate
-  log_cmd cp ${YT_DIR}/doc/activate.csh ${DEST_DIR}/bin/activate.csh
-  log_cmd sed -i.bak -e "s,__YT_DIR__,${DEST_DIR}," ${DEST_DIR}/bin/activate.csh
+    # We do a source install.
+    echo "Installing yt from source"
+    YT_DIR="${DEST_DIR}/src/yt-hg"
+    log_cmd hg clone -r ${BRANCH} https://bitbucket.org/yt_analysis/yt ${YT_DIR}
+    pushd ${YT_DIR}
+    log_cmd python setup.py develop
+    popd
 fi
 
 echo
@@ -314,34 +305,26 @@
 echo
 echo "yt and the Conda system are now installed in $DEST_DIR ."
 echo
-if [ $INST_YT_SOURCE -eq 0 ]
-then
-  echo "You must now modify your PATH variable by prepending:"
-  echo 
-  echo "   $DEST_DIR/bin"
-  echo
-  echo "For example, if you use bash, place something like this at the end"
-  echo "of your ~/.bashrc :"
-  echo
-  echo "   export PATH=$DEST_DIR/bin:$PATH"
-else
-  echo "To run from this new installation, use the activate script for this "
-  echo "environment."
-  echo
-  echo "    $ source $DEST_DIR/bin/activate"
-  echo
-  echo "This modifies the environment variables YT_DEST, PATH, PYTHONPATH, and"
-  echo "LD_LIBRARY_PATH to match your new yt install.  If you use csh, just"
-  echo "append .csh to the above."
-fi
+echo "You must now modify your PATH variable by prepending:"
+echo 
+echo "   $DEST_DIR/bin"
+echo
+echo "On Bash-style shells you can copy/paste the following command to "
+echo "temporarily activate the yt installtion:"
+echo
+echo "    export PATH=$DEST_DIR/bin:\$PATH"
+echo
+echo "and on csh-style shells:"
+echo
+echo "    setenv PATH $DEST_DIR/bin:\$PATH"
+echo
+echo "You can also update the init file appropriate for your shell to include"
+echo "the same command."
 echo
 echo "To get started with yt, check out the orientation:"
 echo
 echo "    http://yt-project.org/doc/orientation/"
 echo
-echo "or just activate your environment and run 'yt serve' to bring up the"
-echo "yt GUI."
-echo
 echo "For support, see the website and join the mailing list:"
 echo
 echo "    http://yt-project.org/"

diff -r 7e150e1b959f73601beb86b50e819086f50a3126 -r 5ea9009614a9d1e890c19daaed5966f0c917d2c4 doc/install_script.sh
--- a/doc/install_script.sh
+++ b/doc/install_script.sh
@@ -233,53 +233,61 @@
         echo
         echo "NOTE: you must have the Xcode command line tools installed."
         echo
-	echo "The instructions for obtaining these tools varies according"
-	echo "to your exact OS version.  On older versions of OS X, you"
-	echo "must register for an account on the apple developer tools"
-	echo "website: https://developer.apple.com/downloads to obtain the"
-	echo "download link."
-	echo
-	echo "We have gathered some additional instructions for each"
-	echo "version of OS X below. If you have trouble installing yt"
-	echo "after following these instructions, don't hesitate to contact"
-	echo "the yt user's e-mail list."
-	echo
-	echo "You can see which version of OSX you are running by clicking"
-	echo "'About This Mac' in the apple menu on the left hand side of"
-	echo "menu bar.  We're assuming that you've installed all operating"
-	echo "system updates; if you have an older version, we suggest"
-	echo "running software update and installing all available updates."
-	echo
+        echo "The instructions for obtaining these tools varies according"
+        echo "to your exact OS version.  On older versions of OS X, you"
+        echo "must register for an account on the apple developer tools"
+        echo "website: https://developer.apple.com/downloads to obtain the"
+        echo "download link."
+        echo
+        echo "We have gathered some additional instructions for each"
+        echo "version of OS X below. If you have trouble installing yt"
+        echo "after following these instructions, don't hesitate to contact"
+        echo "the yt user's e-mail list."
+        echo
+        echo "You can see which version of OSX you are running by clicking"
+        echo "'About This Mac' in the apple menu on the left hand side of"
+        echo "menu bar.  We're assuming that you've installed all operating"
+        echo "system updates; if you have an older version, we suggest"
+        echo "running software update and installing all available updates."
+        echo
         echo "OS X 10.5.8: search for and download Xcode 3.1.4 from the"
-	echo "Apple developer tools website."
+        echo "Apple developer tools website."
         echo
         echo "OS X 10.6.8: search for and download Xcode 3.2 from the Apple"
-	echo "developer tools website.  You can either download the"
-	echo "Xcode 3.2.2 Developer Tools package (744 MB) and then use"
-	echo "Software Update to update to XCode 3.2.6 or"
-	echo "alternatively, you can download the Xcode 3.2.6/iOS SDK"
-	echo "bundle (4.1 GB)."
+        echo "developer tools website.  You can either download the"
+        echo "Xcode 3.2.2 Developer Tools package (744 MB) and then use"
+        echo "Software Update to update to XCode 3.2.6 or"
+        echo "alternatively, you can download the Xcode 3.2.6/iOS SDK"
+        echo "bundle (4.1 GB)."
         echo
         echo "OS X 10.7.5: download Xcode 4.2 from the mac app store"
-	echo "(search for Xcode)."
+        echo "(search for Xcode)."
         echo "Alternatively, download the Xcode command line tools from"
         echo "the Apple developer tools website."
         echo
-	echo "OS X 10.8.4, 10.9, and 10.10: download the appropriate version of"
-	echo "Xcode from the mac app store (search for Xcode)."
-    echo
-	echo "Additionally, you will have to manually install the Xcode"
-	echo "command line tools."
-    echo
-    echo "For OS X 10.8, see:"
-   	echo "http://stackoverflow.com/questions/9353444"
-	echo
-    echo "For OS X 10.9 and 10.10, the command line tools can be installed"
-    echo "with the following command:"
-    echo "    xcode-select --install"
-    echo
-    OSX_VERSION=`sw_vers -productVersion`
-    if [ "${OSX_VERSION##10.8}" != "${OSX_VERSION}" ]
+        echo "OS X 10.8.4, 10.9, 10.10, and 10.11:"
+        echo "download the appropriate version of Xcode from the"
+        echo "mac app store (search for Xcode)."
+        echo
+        echo "Additionally, you will have to manually install the Xcode"
+        echo "command line tools."
+        echo
+        echo "For OS X 10.8, see:"
+        echo "http://stackoverflow.com/questions/9353444"
+        echo
+        echo "For OS X 10.9 and newer the command line tools can be installed"
+        echo "with the following command:"
+        echo "    xcode-select --install"
+        echo
+        echo "For OS X 10.11, you will additionally need to install the OpenSSL"
+        echo "library using a package manager like homebrew or macports."
+        echo "If you install fails with a message like"
+        echo "    ImportError: cannot import HTTPSHandler"
+        echo "then you do not have the OpenSSL headers available in a location"
+        echo "visible to your C compiler. Consider installing yt using the"
+        echo "get_yt.sh script instead, as that bundles OpenSSL."
+        OSX_VERSION=`sw_vers -productVersion`
+        if [ "${OSX_VERSION##10.8}" != "${OSX_VERSION}" ]
         then
             MPL_SUPP_CFLAGS="${MPL_SUPP_CFLAGS} -mmacosx-version-min=10.7"
             MPL_SUPP_CXXFLAGS="${MPL_SUPP_CXXFLAGS} -mmacosx-version-min=10.7"
@@ -358,17 +366,17 @@
     fi
     if [ $INST_SCIPY -eq 1 ]
     then
-	echo
-	echo "Looks like you've requested that the install script build SciPy."
-	echo
-	echo "If the SciPy build fails, please uncomment one of the the lines"
-	echo "at the top of the install script that sets NUMPY_ARGS, delete"
-	echo "any broken installation tree, and re-run the install script"
-	echo "verbatim."
-	echo
-	echo "If that doesn't work, don't hesitate to ask for help on the yt"
-	echo "user's mailing list."
-	echo
+    echo
+    echo "Looks like you've requested that the install script build SciPy."
+    echo
+    echo "If the SciPy build fails, please uncomment one of the the lines"
+    echo "at the top of the install script that sets NUMPY_ARGS, delete"
+    echo "any broken installation tree, and re-run the install script"
+    echo "verbatim."
+    echo
+    echo "If that doesn't work, don't hesitate to ask for help on the yt"
+    echo "user's mailing list."
+    echo
     fi
     if [ ! -z "${CFLAGS}" ]
     then
@@ -490,9 +498,9 @@
 
 if [ $INST_PY3 -eq 1 ]
 then
-	 PYTHON_EXEC='python3.4'
+     PYTHON_EXEC='python3.4'
 else 
-	 PYTHON_EXEC='python2.7'
+     PYTHON_EXEC='python2.7'
 fi
 
 function do_setup_py
@@ -899,28 +907,28 @@
 else
     if [ ! -e $SCIPY/done ]
     then
-	if [ ! -e BLAS/done ]
-	then
-	    tar xfz blas.tar.gz
-	    echo "Building BLAS"
-	    cd BLAS
-	    gfortran -O2 -fPIC -fno-second-underscore -c *.f
-	    ( ar r libfblas.a *.o 2>&1 ) 1>> ${LOG_FILE}
-	    ( ranlib libfblas.a 2>&1 ) 1>> ${LOG_FILE}
-	    rm -rf *.o
-	    touch done
-	    cd ..
-	fi
-	if [ ! -e $LAPACK/done ]
-	then
-	    tar xfz $LAPACK.tar.gz
-	    echo "Building LAPACK"
-	    cd $LAPACK/
-	    cp INSTALL/make.inc.gfortran make.inc
-	    ( make lapacklib OPTS="-fPIC -O2" NOOPT="-fPIC -O0" CFLAGS=-fPIC LDFLAGS=-fPIC 2>&1 ) 1>> ${LOG_FILE} || do_exit
-	    touch done
-	    cd ..
-	fi
+    if [ ! -e BLAS/done ]
+    then
+        tar xfz blas.tar.gz
+        echo "Building BLAS"
+        cd BLAS
+        gfortran -O2 -fPIC -fno-second-underscore -c *.f
+        ( ar r libfblas.a *.o 2>&1 ) 1>> ${LOG_FILE}
+        ( ranlib libfblas.a 2>&1 ) 1>> ${LOG_FILE}
+        rm -rf *.o
+        touch done
+        cd ..
+    fi
+    if [ ! -e $LAPACK/done ]
+    then
+        tar xfz $LAPACK.tar.gz
+        echo "Building LAPACK"
+        cd $LAPACK/
+        cp INSTALL/make.inc.gfortran make.inc
+        ( make lapacklib OPTS="-fPIC -O2" NOOPT="-fPIC -O0" CFLAGS=-fPIC LDFLAGS=-fPIC 2>&1 ) 1>> ${LOG_FILE} || do_exit
+        touch done
+        cd ..
+    fi
     fi
     export BLAS=$PWD/BLAS/libfblas.a
     export LAPACK=$PWD/$LAPACK/liblapack.a
@@ -1030,7 +1038,7 @@
 cd $MY_PWD
 
 if !( ( ${DEST_DIR}/bin/${PYTHON_EXEC} -c "import readline" 2>&1 )>> ${LOG_FILE}) || \
-	[[ "${MYOS##Darwin}" != "${MYOS}" && $INST_PY3 -eq 1 ]] 
+    [[ "${MYOS##Darwin}" != "${MYOS}" && $INST_PY3 -eq 1 ]] 
 then
     if !( ( ${DEST_DIR}/bin/${PYTHON_EXEC} -c "import gnureadline" 2>&1 )>> ${LOG_FILE})
     then

diff -r 7e150e1b959f73601beb86b50e819086f50a3126 -r 5ea9009614a9d1e890c19daaed5966f0c917d2c4 doc/source/analyzing/analysis_modules/ellipsoid_analysis.rst
--- a/doc/source/analyzing/analysis_modules/ellipsoid_analysis.rst
+++ b/doc/source/analyzing/analysis_modules/ellipsoid_analysis.rst
@@ -59,7 +59,7 @@
   from yt.analysis_modules.halo_finding.api import *
 
   ds = yt.load('Enzo_64/RD0006/RedshiftOutput0006')
-  halo_list = parallelHF(ds)
+  halo_list = HaloFinder(ds)
   halo_list.dump('MyHaloList')
 
 Ellipsoid Parameters

diff -r 7e150e1b959f73601beb86b50e819086f50a3126 -r 5ea9009614a9d1e890c19daaed5966f0c917d2c4 doc/source/analyzing/analysis_modules/photon_simulator.rst
--- a/doc/source/analyzing/analysis_modules/photon_simulator.rst
+++ b/doc/source/analyzing/analysis_modules/photon_simulator.rst
@@ -1,3 +1,5 @@
+.. _photon_simulator:
+
 Constructing Mock X-ray Observations
 ------------------------------------
 
@@ -98,9 +100,8 @@
    `AtomDB <http://www.atomdb.org>`_ and get the files from the
    `xray_data <http://yt-project.org/data/xray_data.tar.gz>`_ auxiliary
    data package (see the ``xray_data`` `README <xray_data_README.html>`_ 
-   for details on the latter). Make sure that
-   in what follows you specify the full path to the locations of these
-   files.
+   for details on the latter). Make sure that in what follows you 
+   specify the full path to the locations of these files.
 
 To generate photons from this dataset, we have several different things
 we need to set up. The first is a standard yt data object. It could
@@ -132,7 +133,7 @@
 
 .. code:: python
 
-    apec_model = TableApecModel("atomdb_v2.0.2",
+    apec_model = TableApecModel("$SPECTRAL_DATA/spectral",
                                 0.01, 20.0, 20000,
                                 thermal_broad=False,
                                 apec_vers="2.0.2")
@@ -197,7 +198,7 @@
 
 .. code:: python
 
-    A = 6000.
+    A = 3000.
     exp_time = 4.0e5
     redshift = 0.05
     cosmo = Cosmology()
@@ -298,7 +299,7 @@
 
 The second option, ``TableAbsorbModel``, takes as input an HDF5 file
 containing two datasets, ``"energy"`` (in keV), and ``"cross_section"``
-(in cm2), and the Galactic column density :math:`N_H`:
+(in :math:`cm^2`), and the Galactic column density :math:`N_H`:
 
 .. code:: python
 
@@ -307,7 +308,7 @@
 Now we're ready to project the photons. First, we choose a line-of-sight
 vector ``normal``. Second, we'll adjust the exposure time and the redshift.
 Third, we'll pass in the absorption ``SpectrumModel``. Fourth, we'll
-specify a ``sky_center`` in RA,DEC on the sky in degrees.
+specify a ``sky_center`` in RA and DEC on the sky in degrees.
 
 Also, we're going to convolve the photons with instrument ``responses``.
 For this, you need a ARF/RMF pair with matching energy bins. This is of
@@ -322,8 +323,8 @@
 
 .. code:: python
 
-    ARF = "chandra_ACIS-S3_onaxis_arf.fits"
-    RMF = "chandra_ACIS-S3_onaxis_rmf.fits"
+    ARF = "acisi_aimpt_cy17.arf"
+    RMF = "acisi_aimpt_cy17.rmf"
     normal = [0.0,0.0,1.0]
     events = photons.project_photons(normal, exp_time_new=2.0e5, redshift_new=0.07, dist_new=None, 
                                      absorb_model=abs_model, sky_center=(187.5,12.333), responses=[ARF,RMF], 
@@ -540,7 +541,7 @@
 
    sphere = ds.sphere("c", (1.0,"Mpc"))
        
-   A = 6000.
+   A = 3000.
    exp_time = 2.0e5
    redshift = 0.05
    cosmo = Cosmology()
@@ -555,7 +556,8 @@
 
 
    events = photons.project_photons([0.0,0.0,1.0], 
-                                    responses=["sim_arf.fits","sim_rmf.fits"], 
+                                    responses=["acisi_aimpt_cy17.arf",
+                                               "acisi_aimpt_cy17.rmf"], 
                                     absorb_model=abs_model,
                                     north_vector=[0.0,1.0,0.0])
 

diff -r 7e150e1b959f73601beb86b50e819086f50a3126 -r 5ea9009614a9d1e890c19daaed5966f0c917d2c4 doc/source/analyzing/fields.rst
--- a/doc/source/analyzing/fields.rst
+++ b/doc/source/analyzing/fields.rst
@@ -374,6 +374,17 @@
 "Gas_smoothed_Temperature")``, which in most cases would be aliased to the
 field ``("gas", "temperature")`` for convenience.
 
+Other smoothing kernels besides the cubic spline one are available through a
+keyword argument ``kernel_name`` of the method ``add_smoothed_particle_field``.
+Current available kernel names include:
+
+* ``cubic``, ``quartic``, and ``quintic`` - spline kernels.
+* ``wendland2``, ``wendland4`` and ``wendland6`` - Wendland kernels.
+
+The added smoothed particle field can be accessed by
+``("deposit", "particletype_kernelname_smoothed_fieldname")`` (except for the
+cubic spline kernel, which obeys the naming scheme given above).
+
 Computing the Nth Nearest Neighbor
 ----------------------------------
 

diff -r 7e150e1b959f73601beb86b50e819086f50a3126 -r 5ea9009614a9d1e890c19daaed5966f0c917d2c4 doc/source/analyzing/filtering.rst
--- a/doc/source/analyzing/filtering.rst
+++ b/doc/source/analyzing/filtering.rst
@@ -23,7 +23,7 @@
 ----------------------
 
 Mesh fields can be filtered by two methods: cut region objects 
-(:class:`~yt.data_objects.selection_data_containers.YTCutRegionBase`) 
+(:class:`~yt.data_objects.selection_data_containers.YTCutRegion`) 
 and NumPy boolean masks.  Boolean masks are simpler, but they only work
 for examining datasets, whereas cut regions objects create wholly new
 data objects suitable for full analysis (data examination, image generation, 

diff -r 7e150e1b959f73601beb86b50e819086f50a3126 -r 5ea9009614a9d1e890c19daaed5966f0c917d2c4 doc/source/analyzing/generating_processed_data.rst
--- a/doc/source/analyzing/generating_processed_data.rst
+++ b/doc/source/analyzing/generating_processed_data.rst
@@ -54,10 +54,13 @@
  
 .. code-block:: python
 
-   frb.export_hdf5("my_images.h5", fields=["density","temperature"])
+   frb.save_as_dataset("my_images.h5", fields=["density","temperature"])
    frb.export_fits("my_images.fits", fields=["density","temperature"],
                    clobber=True, units="kpc")
 
+In the HDF5 case, the created file can be reloaded just like a regular dataset with
+``yt.load`` and will, itself, be a first-class dataset.  For more information on
+this, see :ref:`saving-grid-data-containers`.
 In the FITS case, there is an option for setting the ``units`` of the coordinate system in
 the file. If you want to overwrite a file with the same name, set ``clobber=True``. 
 
@@ -173,7 +176,7 @@
 ---------------------------------
 
 To calculate the values along a line connecting two points in a simulation, you
-can use the object :class:`~yt.data_objects.selection_data_containers.YTRayBase`,
+can use the object :class:`~yt.data_objects.selection_data_containers.YTRay`,
 accessible as the ``ray`` property on a index.  (See :ref:`data-objects`
 for more information on this.)  To do so, you can supply two points and access
 fields within the returned object.  For instance, this code will generate a ray

diff -r 7e150e1b959f73601beb86b50e819086f50a3126 -r 5ea9009614a9d1e890c19daaed5966f0c917d2c4 doc/source/analyzing/index.rst
--- a/doc/source/analyzing/index.rst
+++ b/doc/source/analyzing/index.rst
@@ -20,5 +20,6 @@
    units/index
    filtering
    generating_processed_data
+   saving_data
    time_series_analysis
    parallel_computation

diff -r 7e150e1b959f73601beb86b50e819086f50a3126 -r 5ea9009614a9d1e890c19daaed5966f0c917d2c4 doc/source/analyzing/objects.rst
--- a/doc/source/analyzing/objects.rst
+++ b/doc/source/analyzing/objects.rst
@@ -1,4 +1,4 @@
-.. _data-objects:
+.. _Data-objects:
 
 Data Objects
 ============
@@ -97,7 +97,7 @@
 """"""""""
 
 **Point** 
-    | Class :class:`~yt.data_objects.selection_data_containers.YTPointBase`    
+    | Class :class:`~yt.data_objects.selection_data_containers.YTPoint`    
     | Usage: ``point(coord, ds=None, field_parameters=None, data_source=None)``
     | A point defined by a single cell at specified coordinates.
 
@@ -105,7 +105,7 @@
 """"""""""
 
 **Ray (Axis-Aligned)** 
-    | Class :class:`~yt.data_objects.selection_data_containers.YTOrthoRayBase`
+    | Class :class:`~yt.data_objects.selection_data_containers.YTOrthoRay`
     | Usage: ``ortho_ray(axis, coord, ds=None, field_parameters=None, data_source=None)``
     | A line (of data cells) stretching through the full domain 
       aligned with one of the x,y,z axes.  Defined by an axis and a point
@@ -113,7 +113,7 @@
       :ref:`note about ray data value ordering <ray-data-ordering>`.
 
 **Ray (Arbitrarily-Aligned)** 
-    | Class :class:`~yt.data_objects.selection_data_containers.YTRayBase`
+    | Class :class:`~yt.data_objects.selection_data_containers.YTRay`
     | Usage: ``ray(start_coord, end_coord, ds=None, field_parameters=None, data_source=None)``
     | A line (of data cells) defined by arbitrary start and end coordinates. 
       Please see this 
@@ -123,13 +123,13 @@
 """"""""""
 
 **Slice (Axis-Aligned)** 
-    | Class :class:`~yt.data_objects.selection_data_containers.YTSliceBase`
+    | Class :class:`~yt.data_objects.selection_data_containers.YTSlice`
     | Usage: ``slice(axis, coord, center=None, ds=None, field_parameters=None, data_source=None)``
     | A plane normal to one of the axes and intersecting a particular 
       coordinate.
 
 **Slice (Arbitrarily-Aligned)** 
-    | Class :class:`~yt.data_objects.selection_data_containers.YTCuttingPlaneBase`
+    | Class :class:`~yt.data_objects.selection_data_containers.YTCuttingPlane`
     | Usage: ``cutting(normal, coord, north_vector=None, ds=None, field_parameters=None, data_source=None)``
     | A plane normal to a specified vector and intersecting a particular 
       coordinate.
@@ -145,7 +145,7 @@
       ``ds.region(ds.domain_center, ds.domain_left_edge, ds.domain_right_edge)``.
 
 **Box Region** 
-    | Class :class:`~yt.data_objects.selection_data_containers.YTRegionBase`
+    | Class :class:`~yt.data_objects.selection_data_containers.YTRegion`
     | Usage: ``region(center, left_edge, right_edge, fields=None, ds=None, field_parameters=None, data_source=None)``
     | Alternatively: ``box(left_edge, right_edge, fields=None, ds=None, field_parameters=None, data_source=None)``
     | A box-like region aligned with the grid axis orientation.  It is 
@@ -156,14 +156,14 @@
       is assumed to be the midpoint between the left and right edges.
 
 **Disk/Cylinder** 
-    | Class: :class:`~yt.data_objects.selection_data_containers.YTDiskBase`
+    | Class: :class:`~yt.data_objects.selection_data_containers.YTDisk`
     | Usage: ``disk(center, normal, radius, height, fields=None, ds=None, field_parameters=None, data_source=None)``
     | A cylinder defined by a point at the center of one of the circular bases,
       a normal vector to it defining the orientation of the length of the
       cylinder, and radius and height values for the cylinder's dimensions.
 
 **Ellipsoid** 
-    | Class :class:`~yt.data_objects.selection_data_containers.YTEllipsoidBase`
+    | Class :class:`~yt.data_objects.selection_data_containers.YTEllipsoid`
     | Usage: ``ellipsoid(center, semi_major_axis_length, semi_medium_axis_length, semi_minor_axis_length, semi_major_vector, tilt, fields=None, ds=None, field_parameters=None, data_source=None)``
     | An ellipsoid with axis magnitudes set by semi_major_axis_length, 
      semi_medium_axis_length, and semi_minor_axis_length.  semi_major_vector 
@@ -171,7 +171,7 @@
      of the semi-medium and semi_minor axes.
 
 **Sphere** 
-    | Class :class:`~yt.data_objects.selection_data_containers.YTSphereBase`
+    | Class :class:`~yt.data_objects.selection_data_containers.YTSphere`
     | Usage: ``sphere(center, radius, ds=None, field_parameters=None, data_source=None)``
     | A sphere defined by a central coordinate and a radius.
 
@@ -194,7 +194,7 @@
     | See :ref:`boolean_data_objects`.
 
 **Filter** 
-    | Class :class:`~yt.data_objects.selection_data_containers.YTCutRegionBase`
+    | Class :class:`~yt.data_objects.selection_data_containers.YTCutRegion`
     | Usage: ``cut_region(base_object, conditionals, ds=None, field_parameters=None)``
     | A ``cut_region`` is a filter which can be applied to any other data 
       object.  The filter is defined by the conditionals present, which 
@@ -203,7 +203,7 @@
       For more detailed information and examples, see :ref:`cut-regions`.
 
 **Collection of Data Objects** 
-    | Class :class:`~yt.data_objects.selection_data_containers.YTDataCollectionBase`
+    | Class :class:`~yt.data_objects.selection_data_containers.YTDataCollection`
     | Usage: ``data_collection(center, obj_list, ds=None, field_parameters=None)``
     | A ``data_collection`` is a list of data objects that can be 
       sampled and processed as a whole in a single data object.
@@ -214,13 +214,13 @@
 ^^^^^^^^^^^^^^^^^^^^
 
 **Fixed-Resolution Region** 
-    | Class :class:`~yt.data_objects.construction_data_containers.YTCoveringGridBase`
+    | Class :class:`~yt.data_objects.construction_data_containers.YTCoveringGrid`
     | Usage: ``covering_grid(level, left_edge, dimensions, fields=None, ds=None, num_ghost_zones=0, use_pbar=True, field_parameters=None)``
     | A 3D region with all data extracted to a single, specified resolution.
       See :ref:`examining-grid-data-in-a-fixed-resolution-array`.
 
 **Fixed-Resolution Region with Smoothing** 
-    | Class :class:`~yt.data_objects.construction_data_containers.YTSmoothedCoveringGridBase`
+    | Class :class:`~yt.data_objects.construction_data_containers.YTSmoothedCoveringGrid`
     | Usage: ``smoothed_covering_grid(level, left_edge, dimensions, fields=None, ds=None, num_ghost_zones=0, use_pbar=True, field_parameters=None)``
     | A 3D region with all data extracted and interpolated to a single, 
       specified resolution.  Identical to covering_grid, except that it 
@@ -228,7 +228,7 @@
       :ref:`examining-grid-data-in-a-fixed-resolution-array`.
 
 **Fixed-Resolution Region for Particle Deposition** 
-    | Class :class:`~yt.data_objects.construction_data_containers.YTArbitraryGridBase`
+    | Class :class:`~yt.data_objects.construction_data_containers.YTArbitraryGrid`
     | Usage: ``arbitrary_grid(left_edge, right_edge, dimensions, ds=None, field_parameters=None)``
     | When particles are deposited on to mesh fields, they use the existing
       mesh structure, but this may have too much or too little resolution
@@ -238,7 +238,7 @@
       information.
 
 **Projection** 
-    | Class :class:`~yt.data_objects.construction_data_containers.YTQuadTreeProjBase`
+    | Class :class:`~yt.data_objects.construction_data_containers.YTQuadTreeProj`
     | Usage: ``proj(field, axis, weight_field=None, center=None, ds=None, data_source=None, method="integrate", field_parameters=None)``
     | A 2D projection of a 3D volume along one of the axis directions.  
       By default, this is a line integral through the entire simulation volume 
@@ -248,14 +248,14 @@
       of the projection outcome.  See :ref:`projection-types` for more information.
 
 **Streamline** 
-    | Class :class:`~yt.data_objects.construction_data_containers.YTStreamlineBase`
+    | Class :class:`~yt.data_objects.construction_data_containers.YTStreamline`
     | Usage: ``streamline(coord_list, length, fields=None, ds=None, field_parameters=None)``
     | A ``streamline`` can be traced out by identifying a starting coordinate (or 
       list of coordinates) and allowing it to trace a vector field, like gas
       velocity.  See :ref:`streamlines` for more information.
 
 **Surface** 
-    | Class :class:`~yt.data_objects.construction_data_containers.YTSurfaceBase`
+    | Class :class:`~yt.data_objects.construction_data_containers.YTSurface`
     | Usage: ``surface(data_source, field, field_value)``
     | The surface defined by all an isocontour in any mesh field.  An existing 
       data object must be provided as the source, as well as a mesh field
@@ -358,7 +358,7 @@
 holdover from the time when yt was used exclusively for data that came in
 regularly structured grid patches, and does not necessarily work as well for
 data that is composed of discrete objects like particles.  To augment this, the
-:class:`~yt.data_objects.construction_data_containers.YTArbitraryGridBase` object 
+:class:`~yt.data_objects.construction_data_containers.YTArbitraryGrid` object 
 was created, which enables construction of meshes (onto which particles can be
 deposited or smoothed) in arbitrary regions.  This eliminates any assumptions
 on yt's part about how the data is organized, and will allow for more
@@ -444,7 +444,7 @@
 set of level sets.  The second (``connected_sets``) will be a dict of dicts.
 The key for the first (outer) dict is the level of the contour, corresponding
 to ``contour_values``.  The inner dict returned is keyed by the contour ID.  It
-contains :class:`~yt.data_objects.selection_data_containers.YTCutRegionBase`
+contains :class:`~yt.data_objects.selection_data_containers.YTCutRegion`
 objects.  These can be queried just as any other data object.  The clump finder 
 (:ref:`clump_finding`) differs from the above method in that the contour 
 identification is performed recursively within each individual structure, and 
@@ -457,69 +457,9 @@
 ---------------------------
 
 Often, when operating interactively or via the scripting interface, it is
-convenient to save an object or multiple objects out to disk and then restart
-the calculation later.  For example, this is useful after clump finding 
-(:ref:`clump_finding`), which can be very time consuming.  
-Typically, the save and load operations are used on 3D data objects.  yt
-has a separate set of serialization operations for 2D objects such as
-projections.
-
-yt will save out objects to disk under the presupposition that the
-construction of the objects is the difficult part, rather than the generation
-of the data -- this means that you can save out an object as a description of
-how to recreate it in space, but not the actual data arrays affiliated with
-that object.  The information that is saved includes the dataset off of
-which the object "hangs."  It is this piece of information that is the most
-difficult; the object, when reloaded, must be able to reconstruct a dataset
-from whatever limited information it has in the save file.
-
-You can save objects to an output file using the function 
-:func:`~yt.data_objects.index.save_object`: 
-
-.. code-block:: python
-
-   import yt
-   ds = yt.load("my_data")
-   sp = ds.sphere([0.5, 0.5, 0.5], (10.0, 'kpc'))
-   sp.save_object("sphere_name", "save_file.cpkl")
-
-This will store the object as ``sphere_name`` in the file
-``save_file.cpkl``, which will be created or accessed using the standard
-python module :mod:`shelve`.  
-
-To re-load an object saved this way, you can use the shelve module directly:
-
-.. code-block:: python
-
-   import yt
-   import shelve
-   ds = yt.load("my_data") 
-   saved_fn = shelve.open("save_file.cpkl")
-   ds, sp = saved_fn["sphere_name"]
-
-Additionally, we can store multiple objects in a single shelve file, so we 
-have to call the sphere by name.
-
-For certain data objects such as projections, serialization can be performed
-automatically if ``serialize`` option is set to ``True`` in :ref:`the
-configuration file <configuration-file>` or set directly in the script:
-
-.. code-block:: python
-
-   from yt.config import ytcfg; ytcfg["yt", "serialize"] = "True"
-
-.. note:: Use serialization with caution. Enabling serialization means that
-   once a projection of a dataset has been created (and stored in the .yt file
-   in the same directory), any subsequent changes to that dataset will be
-   ignored when attempting to create the same projection. So if you take a
-   density projection of your dataset in the 'x' direction, then somehow tweak
-   that dataset significantly, and take the density projection again, yt will
-   default to finding the original projection and 
-   :ref:`not your new one <faq-old-data>`.
-
-.. note:: It's also possible to use the standard :mod:`cPickle` module for
-          loading and storing objects -- so in theory you could even save a
-          list of objects!
-
-This method works for clumps, as well, and the entire clump index will be
-stored and restored upon load.
+convenient to save an object to disk and then restart the calculation later or
+transfer the data from a container to another filesystem.  This can be
+particularly useful when working with extremely large datasets.  Field data
+can be saved to disk in a format that allows for it to be reloaded just like
+a regular dataset.  For information on how to do this, see
+:ref:`saving-data-containers`.

diff -r 7e150e1b959f73601beb86b50e819086f50a3126 -r 5ea9009614a9d1e890c19daaed5966f0c917d2c4 doc/source/analyzing/parallel_computation.rst
--- a/doc/source/analyzing/parallel_computation.rst
+++ b/doc/source/analyzing/parallel_computation.rst
@@ -501,11 +501,7 @@
 subtle art in estimating the amount of memory needed for halo finding, but a
 rule of thumb is that the HOP halo finder is the most memory intensive
 (:func:`HaloFinder`), and Friends of Friends (:func:`FOFHaloFinder`) being the
-most memory-conservative.  It has been found that :func:`parallelHF` needs
-roughly 1 MB of memory per 5,000 particles, although recent work has improved
-this and the memory requirement is now smaller than this. But this is a good
-starting point for beginning to calculate the memory required for halo-finding.
-For more information, see :ref:`halo_finding`.
+most memory-conservative. For more information, see :ref:`halo_finding`.
 
 **Volume Rendering**
 

diff -r 7e150e1b959f73601beb86b50e819086f50a3126 -r 5ea9009614a9d1e890c19daaed5966f0c917d2c4 doc/source/analyzing/saving_data.rst
--- /dev/null
+++ b/doc/source/analyzing/saving_data.rst
@@ -0,0 +1,243 @@
+.. _saving_data
+
+Saving Reloadable Data
+======================
+
+Most of the data loaded into or generated with yt can be saved to a
+format that can be reloaded as a first-class dataset.  This includes
+the following:
+
+  * geometric data containers (regions, spheres, disks, rays, etc.)
+
+  * grid data containers (covering grids, arbitrary grids, fixed
+    resolution buffers)
+
+  * spatial plots (projections, slices, cutting planes)
+
+  * profiles
+
+  * generic array data
+
+In the case of projections, slices, and profiles, reloaded data can be
+used to remake plots.  For information on this, see :ref:`remaking-plots`.
+
+.. _saving-data-containers:
+
+Geometric Data Containers
+-------------------------
+
+Data from geometric data containers can be saved with the
+:func:`~yt.data_objects.data_containers.save_as_dataset`` function.
+
+.. notebook-cell::
+
+   import yt
+   ds = yt.load("enzo_tiny_cosmology/DD0046/DD0046")
+
+   sphere = ds.sphere([0.5]*3, (10, "Mpc"))
+   fn = sphere.save_as_dataset(fields=["density", "particle_mass"])
+   print (fn)
+
+This function will return the name of the file to which the dataset
+was saved.  The filename will be a combination of the name of the
+original dataset and the type of data container.  Optionally, a
+specific filename can be given with the ``filename`` keyword.  If no
+fields are given, the fields that have previously been queried will
+be saved.
+
+The newly created dataset can be loaded like all other supported
+data through ``yt.load``.  Once loaded, field data can be accessed
+through the traditional data containers or through the ``data``
+attribute, which will be a data container configured like the
+original data container used to make the dataset.  Grid data is
+accessed by the ``grid`` data type and particle data is accessed
+with the original particle type.  As with the original dataset, grid
+positions and cell sizes are accessible with, for example,
+("grid", "x") and ("grid", "dx").  Particle positions are
+accessible as (<particle_type>, "particle_position_x").  All original
+simulation parameters are accessible in the ``parameters``
+dictionary, normally associated with all datasets.
+
+.. code-block:: python
+
+   sphere_ds = yt.load("DD0046_sphere.h5")
+
+   # use the original data container
+   print (sphere_ds.data["grid", "density"])
+
+   # create a new data container
+   ad = sphere_ds.all_data()
+
+   # grid data
+   print (ad["grid", "density"])
+   print (ad["grid", "x"])
+   print (ad["grid", "dx"])
+
+   # particle data
+   print (ad["all", "particle_mass"])
+   print (ad["all", "particle_position_x"])
+
+Note that because field data queried from geometric containers is
+returned as unordered 1D arrays, data container datasets are treated,
+effectively, as particle data.  Thus, 3D indexing of grid data from
+these datasets is not possible.
+
+.. _saving-grid-data-containers:
+
+Grid Data Containers
+--------------------
+
+Data containers that return field data as multidimensional arrays
+can be saved so as to preserve this type of access.  This includes
+covering grids, arbitrary grids, and fixed resolution buffers.
+Saving data from these containers works just as with geometric data
+containers.  Field data can be accessed through geometric data
+containers.
+
+.. code-block:: python
+
+   cg = ds.covering_grid(level=0, left_edge=[0.25]*3, dims=[16]*3)
+   fn = cg.save_as_dataset(fields=["density", "particle_mass"])
+
+   cg_ds = yt.load(fn)
+   ad = cg_ds.all_data()
+   print (ad["grid", "density"])
+
+Multidimensional indexing of field data is also available through
+the ``data`` attribute.
+
+.. code-block:: python
+
+   print (cg_ds.data["grid", "density"])
+
+Fixed resolution buffers work just the same.
+
+.. code-block:: python
+
+   my_proj = ds.proj("density", "x", weight_field="density")
+   frb = my_proj.to_frb(1.0, (800, 800))
+   fn = frb.save_as_dataset(fields=["density"])
+   frb_ds = yt.load(fn)
+   print (frb_ds.data["density"])
+
+.. _saving-spatial-plots:
+
+Spatial Plots
+-------------
+
+Spatial plots, such as projections, slices, and off-axis slices
+(cutting planes) can also be saved and reloaded.
+
+.. code-block:: python
+
+   proj = ds.proj("density", "x", weight_field="density")
+   proj.save_as_dataset()
+
+Once reloaded, they can be handed to their associated plotting
+functions to make images.
+
+.. code-block:: python
+
+   proj_ds = yt.load("DD0046_proj.h5")
+   p = yt.ProjectionPlot(proj_ds, "x", "density",
+                         weight_field="density")
+   p.save()
+
+.. _saving-profile-data:
+
+Profiles
+--------
+
+Profiles created with :func:`~yt.data_objects.profiles.create_profile`,
+:class:`~yt.visualization.profile_plotter.ProfilePlot`, and
+:class:`~yt.visualization.profile_plotter.PhasePlot` can be saved with
+the :func:`~yt.data_objects.profiles.save_as_dataset` function, which
+works just as above.  Profile datasets are a type of non-spatial grid
+datasets.  Geometric selection is not possible, but data can be
+accessed through the ``.data`` attribute.
+
+.. notebook-cell::
+
+   import yt
+   ds = yt.load("enzo_tiny_cosmology/DD0046/DD0046")
+   ad = ds.all_data()
+
+   profile_2d = yt.create_profile(ad, ["density", "temperature"],
+                                  "cell_mass", weight_field=None,
+                                  n_bins=(128, 128))
+   profile_2d.save_as_dataset()
+
+   prof_2d_ds = yt.load("DD0046_Profile2D.h5")
+   print (prof_2d_ds.data["cell_mass"])
+
+The x, y (if at least 2D), and z (if 3D) bin fields can be accessed as 1D
+arrays with "x", "y", and "z".
+
+.. code-block:: python
+
+   print (prof_2d_ds.data["x"])
+
+The bin fields can also be returned with the same shape as the profile
+data by accessing them with their original names.  This allows for
+boolean masking of profile data using the bin fields.
+
+.. code-block:: python
+
+   # density is the x bin field
+   print (prof_2d_ds.data["density"])
+
+For 1, 2, and 3D profile datasets, a fake profile object will be
+constructed by accessing the ".profile" attribute.  This is used
+primarily in the case of 1 and 2D profiles to create figures using
+:class:`~yt.visualization.profile_plotter.ProfilePlot` and
+:class:`~yt.visualization.profile_plotter.PhasePlot`.
+
+.. code-block:: python
+
+   p = yt.PhasePlot(prof_2d_ds.data, "density", "temperature",
+                    "cell_mass", weight_field=None)
+   p.save()
+
+.. _saving-array-data:
+
+Generic Array Data
+------------------
+
+Generic arrays can be saved and reloaded as non-spatial data using
+the :func:`~yt.frontends.ytdata.utilities.save_as_dataset` function,
+also available as ``yt.save_as_dataset``.  As with profiles, geometric
+selection is not possible, but the data can be accessed through the
+``.data`` attribute.
+
+.. notebook-cell::
+
+   import yt
+   ds = yt.load("enzo_tiny_cosmology/DD0046/DD0046")
+
+   region = ds.box([0.25]*3, [0.75]*3)
+   sphere = ds.sphere(ds.domain_center, (10, "Mpc"))
+   my_data = {}
+   my_data["region_density"] = region["density"]
+   my_data["sphere_density"] = sphere["density"]
+   yt.save_as_dataset(ds, "test_data.h5", my_data)
+
+   array_ds = yt.load("test_data.h5")
+   print (array_ds.data["region_density"])
+   print (array_ds.data["sphere_density"])
+
+Array data can be saved with or without a dataset loaded.  If no
+dataset has been loaded, as fake dataset can be provided as a
+dictionary.
+
+.. notebook-cell::
+
+   import numpy as np
+   import yt
+
+   my_data = {"density": yt.YTArray(np.random.random(10), "g/cm**3"),
+              "temperature": yt.YTArray(np.random.random(10), "K")}
+   fake_ds = {"current_time": yt.YTQuantity(10, "Myr")}
+   yt.save_as_dataset(fake_ds, "random_data.h5", my_data)
+
+   new_ds = yt.load("random_data.h5")
+   print (new_ds.data["density"])

diff -r 7e150e1b959f73601beb86b50e819086f50a3126 -r 5ea9009614a9d1e890c19daaed5966f0c917d2c4 doc/source/conf.py
--- a/doc/source/conf.py
+++ b/doc/source/conf.py
@@ -67,7 +67,7 @@
 # built documents.
 #
 # The short X.Y version.
-version = '3.3'
+version = '3.3-dev'
 # The full version, including alpha/beta/rc tags.
 release = '3.3-dev'
 

diff -r 7e150e1b959f73601beb86b50e819086f50a3126 -r 5ea9009614a9d1e890c19daaed5966f0c917d2c4 doc/source/cookbook/amrkdtree_downsampling.py
--- a/doc/source/cookbook/amrkdtree_downsampling.py
+++ b/doc/source/cookbook/amrkdtree_downsampling.py
@@ -4,6 +4,13 @@
 # In this example we will show how to use the AMRKDTree to take a simulation
 # with 8 levels of refinement and only use levels 0-3 to render the dataset.
 
+# Currently this cookbook is flawed in that the data that is covered by the
+# higher resolution data gets masked during the rendering.  This should be
+# fixed by changing either the data source or the code in
+# yt/utilities/amr_kdtree.py where data is being masked for the partitioned
+# grid.  Right now the quick fix is to create a data_collection, but this
+# will only work for patch based simulations that have ds.index.grids.
+
 # We begin by loading up yt, and importing the AMRKDTree
 import numpy as np
 
@@ -12,58 +19,58 @@
 
 # Load up a dataset and define the kdtree
 ds = yt.load('IsolatedGalaxy/galaxy0030/galaxy0030')
-kd = AMRKDTree(ds)
+im, sc = yt.volume_render(ds, 'density', fname='v0.png')
+sc.camera.set_width(ds.arr(100, 'kpc'))
+render_source = sc.get_source(0)
+kd=render_source.volume
 
 # Print out specifics of KD Tree
 print("Total volume of all bricks = %i" % kd.count_volume())
 print("Total number of cells = %i" % kd.count_cells())
 
-# Define a camera and take an volume rendering.
-tf = yt.ColorTransferFunction((-30, -22))
-cam = ds.camera([0.5, 0.5, 0.5], [0.2, 0.3, 0.4], 0.10, 256,
-                  tf, volume=kd)
-tf.add_layers(4, 0.01, col_bounds=[-27.5, -25.5], colormap='RdBu_r')
-cam.snapshot("v1.png", clip_ratio=6.0)
-
-# This rendering is okay, but lets say I'd like to improve it, and I don't want
-# to spend the time rendering the high resolution data.  What we can do is
-# generate a low resolution version of the AMRKDTree and pass that in to the
-# camera.  We do this by specifying a maximum refinement level of 6.
-
-kd_low_res = AMRKDTree(ds, max_level=6)
+new_source = ds.all_data()
+new_source.max_level=3
+kd_low_res = AMRKDTree(ds, data_source=new_source)
 print(kd_low_res.count_volume())
 print(kd_low_res.count_cells())
 
 # Now we pass this in as the volume to our camera, and render the snapshot
 # again.
 
-cam.volume = kd_low_res
-cam.snapshot("v4.png", clip_ratio=6.0)
+render_source.set_volume(kd_low_res)
+render_source.set_fields('density')
+sc.render()
+sc.save("v1.png", sigma_clip=6.0)
 
 # This operation was substantiall faster.  Now lets modify the low resolution
 # rendering until we find something we like.
 
+tf = render_source.transfer_function
 tf.clear()
 tf.add_layers(4, 0.01, col_bounds=[-27.5, -25.5],
               alpha=np.ones(4, dtype='float64'), colormap='RdBu_r')
-cam.snapshot("v2.png", clip_ratio=6.0)
+sc.render()
+sc.save("v2.png", sigma_clip=6.0)
 
 # This looks better.  Now let's try turning on opacity.
 
 tf.grey_opacity = True
-cam.snapshot("v4.png", clip_ratio=6.0)
-
-# That seemed to pick out som interesting structures.  Now let's bump up the
-# opacity.
-
+sc.render()
+sc.save("v3.png", sigma_clip=6.0)
+#
+## That seemed to pick out som interesting structures.  Now let's bump up the
+## opacity.
+#
 tf.clear()
 tf.add_layers(4, 0.01, col_bounds=[-27.5, -25.5],
               alpha=10.0 * np.ones(4, dtype='float64'), colormap='RdBu_r')
-cam.snapshot("v3.png", clip_ratio=6.0)
-
-# This looks pretty good, now lets go back to the full resolution AMRKDTree
-
-cam.volume = kd
-cam.snapshot("v4.png", clip_ratio=6.0)
+sc.render()
+sc.save("v4.png", sigma_clip=6.0)
+#
+## This looks pretty good, now lets go back to the full resolution AMRKDTree
+#
+render_source.set_volume(kd)
+sc.render()
+sc.save("v5.png", sigma_clip=6.0)
 
 # This looks great!

diff -r 7e150e1b959f73601beb86b50e819086f50a3126 -r 5ea9009614a9d1e890c19daaed5966f0c917d2c4 doc/source/cookbook/camera_movement.py
--- a/doc/source/cookbook/camera_movement.py
+++ b/doc/source/cookbook/camera_movement.py
@@ -3,40 +3,29 @@
 
 # Follow the simple_volume_rendering cookbook for the first part of this.
 ds = yt.load("IsolatedGalaxy/galaxy0030/galaxy0030")  # load data
-ad = ds.all_data()
-mi, ma = ad.quantities.extrema("density")
-
-# Set up transfer function
-tf = yt.ColorTransferFunction((np.log10(mi), np.log10(ma)))
-tf.add_layers(6, w=0.05)
-
-# Set up camera paramters
-c = [0.5, 0.5, 0.5]  # Center
-L = [1, 1, 1]  # Normal Vector
-W = 1.0  # Width
-Nvec = 512  # Pixels on a side
-
-# Specify a north vector, which helps with rotations.
-north_vector = [0., 0., 1.]
+sc = yt.create_scene(ds)
+cam = sc.camera
+cam.resolution = (512, 512)
+cam.set_width(ds.domain_width/20.0)
 
 # Find the maximum density location, store it in max_c
 v, max_c = ds.find_max('density')
 
-# Initialize the Camera
-cam = ds.camera(c, L, W, (Nvec, Nvec), tf, north_vector=north_vector)
 frame = 0
-
-# Do a rotation over 5 frames
-for i, snapshot in enumerate(cam.rotation(np.pi, 5, clip_ratio=8.0)):
-    snapshot.write_png('camera_movement_%04i.png' % frame)
-    frame += 1
-
 # Move to the maximum density location over 5 frames
-for i, snapshot in enumerate(cam.move_to(max_c, 5, clip_ratio=8.0)):
-    snapshot.write_png('camera_movement_%04i.png' % frame)
+for _ in cam.iter_move(max_c, 5):
+    sc.render()
+    sc.save('camera_movement_%04i.png' % frame, sigma_clip=8.0)
     frame += 1
 
 # Zoom in by a factor of 10 over 5 frames
-for i, snapshot in enumerate(cam.zoomin(10.0, 5, clip_ratio=8.0)):
-    snapshot.write_png('camera_movement_%04i.png' % frame)
+for _ in cam.iter_zoom(10.0, 5):
+    sc.render()
+    sc.save('camera_movement_%04i.png' % frame, sigma_clip=8.0)
     frame += 1
+
+# Do a rotation over 5 frames
+for _ in cam.iter_rotate(np.pi, 5):
+    sc.render()
+    sc.save('camera_movement_%04i.png' % frame, sigma_clip=8.0)
+    frame += 1

diff -r 7e150e1b959f73601beb86b50e819086f50a3126 -r 5ea9009614a9d1e890c19daaed5966f0c917d2c4 doc/source/cookbook/complex_plots.rst
--- a/doc/source/cookbook/complex_plots.rst
+++ b/doc/source/cookbook/complex_plots.rst
@@ -196,10 +196,41 @@
 
 In this recipe, we move a camera through a domain and take multiple volume
 rendering snapshots.
-See :ref:`volume_rendering` for more information.
+See :ref:`camera_movement` for more information.
 
 .. yt_cookbook:: camera_movement.py
 
+Volume Rendering with Custom Camera
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+In this recipe we modify the :ref:`cookbook-simple_volume_rendering` recipe to
+use customized camera properties. See :ref:`volume_rendering` for more
+information.
+
+.. yt_cookbook:: custom_camera_volume_rendering.py
+
+.. _cookbook-custom-transfer-function:
+
+Volume Rendering with a Custom Transfer Function
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+In this recipe we modify the :ref:`cookbook-simple_volume_rendering` recipe to
+use customized camera properties. See :ref:`volume_rendering` for more
+information.
+
+.. yt_cookbook:: custom_transfer_function_volume_rendering.py
+
+.. _cookbook-sigma_clip:
+
+Volume Rendering with Sigma Clipping
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+In this recipe we output several images with different values of sigma_clip
+set in order to change the contrast of the resulting image.  See 
+:ref:`sigma_clip` for more information.
+
+.. yt_cookbook:: sigma_clip.py
+
 Zooming into an Image
 ~~~~~~~~~~~~~~~~~~~~~
 
@@ -212,6 +243,15 @@
 
 .. yt_cookbook:: zoomin_frames.py
 
+.. _cookbook-various_lens:
+
+Various Lens Types for Volume Rendering
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+This example illustrates the usage and feature of different lenses for volume rendering.
+
+.. yt_cookbook:: various_lens.py
+
 .. _cookbook-opaque_rendering:
 
 Opaque Volume Rendering
@@ -220,7 +260,7 @@
 This recipe demonstrates how to make semi-opaque volume renderings, but also
 how to step through and try different things to identify the type of volume
 rendering you want.
-See :ref:`volume_rendering` for more information.
+See :ref:`opaque_rendering` for more information.
 
 .. yt_cookbook:: opaque_rendering.py
 
@@ -235,23 +275,27 @@
 
 .. yt_cookbook:: amrkdtree_downsampling.py
 
+.. _cookbook-volume_rendering_annotations:
+
 Volume Rendering with Bounding Box and Overlaid Grids
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
 This recipe demonstrates how to overplot a bounding box on a volume rendering
 as well as overplotting grids representing the level of refinement achieved
 in different regions of the code.
-See :ref:`volume_rendering` for more information.
+See :ref:`volume_rendering_annotations` for more information.
 
 .. yt_cookbook:: rendering_with_box_and_grids.py
 
 Volume Rendering with Annotation
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
 This recipe demonstrates how to write the simulation time, show an
 axis triad indicating the direction of the coordinate system, and show
-the transfer function on a volume rendering.
-See :ref:`volume_rendering` for more information.
+the transfer function on a volume rendering.  Please note that this 
+recipe relies on the old volume rendering interface.  While one can
+continue to use this interface, it may be incompatible with some of the
+new developments and the infrastructure described in :ref:`volume_rendering`.
 
 .. yt_cookbook:: vol-annotated.py
 

diff -r 7e150e1b959f73601beb86b50e819086f50a3126 -r 5ea9009614a9d1e890c19daaed5966f0c917d2c4 doc/source/cookbook/custom_camera_volume_rendering.py
--- /dev/null
+++ b/doc/source/cookbook/custom_camera_volume_rendering.py
@@ -0,0 +1,22 @@
+import yt
+
+# Load the dataset
+ds = yt.load("Enzo_64/DD0043/data0043")
+
+# Create a volume rendering
+sc = yt.create_scene(ds, field=('gas', 'density'))
+
+# Now increase the resolution
+sc.camera.resolution = (1024, 1024)
+
+# Set the camera focus to a position that is offset from the center of
+# the domain
+sc.camera.focus = ds.arr([0.3, 0.3, 0.3], 'unitary')
+
+# Move the camera position to the other side of the dataset
+sc.camera.position = ds.arr([0, 0, 0], 'unitary')
+
+# save to disk with a custom filename and apply sigma clipping to eliminate
+# very bright pixels, producing an image with better contrast.
+sc.render()
+sc.save('custom.png', sigma_clip=4)

diff -r 7e150e1b959f73601beb86b50e819086f50a3126 -r 5ea9009614a9d1e890c19daaed5966f0c917d2c4 doc/source/cookbook/custom_transfer_function_volume_rendering.py
--- /dev/null
+++ b/doc/source/cookbook/custom_transfer_function_volume_rendering.py
@@ -0,0 +1,24 @@
+import yt
+import numpy as np
+
+# Load the dataset
+ds = yt.load("Enzo_64/DD0043/data0043")
+
+# Create a volume rendering
+sc = yt.create_scene(ds, field=('gas', 'density'))
+
+# Modify the transfer function
+
+# First get the render source, in this case the entire domain, with field ('gas','density')
+render_source = sc.get_source(0)
+
+# Clear the transfer function
+render_source.transfer_function.clear()
+
+# Map a range of density values (in log space) to the Reds_r colormap
+render_source.transfer_function.map_to_colormap(
+    np.log10(ds.quan(5.0e-31, 'g/cm**3')),
+    np.log10(ds.quan(1.0e-29, 'g/cm**3')),
+    scale=30.0, colormap='RdBu_r')
+
+sc.save('new_tf.png')

diff -r 7e150e1b959f73601beb86b50e819086f50a3126 -r 5ea9009614a9d1e890c19daaed5966f0c917d2c4 doc/source/cookbook/image_background_colors.py
--- a/doc/source/cookbook/image_background_colors.py
+++ b/doc/source/cookbook/image_background_colors.py
@@ -2,27 +2,14 @@
 # volume renderings, to pngs with varying backgrounds.
 
 # First we use the simple_volume_rendering.py recipe from above to generate
-# a standard volume rendering.  The only difference is that we use 
-# grey_opacity=True with our TransferFunction, as the colored background 
-# functionality requires images with an opacity between 0 and 1. 
-
-# We have removed all the comments from the volume rendering recipe for 
-# brevity here, but consult the recipe for more details.
+# a standard volume rendering.
 
 import yt
 import numpy as np
 
 ds = yt.load("Enzo_64/DD0043/data0043")
-ad = ds.all_data()
-mi, ma = ad.quantities.extrema("density")
-tf = yt.ColorTransferFunction((np.log10(mi)+1, np.log10(ma)), grey_opacity=True)
-tf.add_layers(5, w=0.02, colormap="spectral")
-c = [0.5, 0.5, 0.5]
-L = [0.5, 0.2, 0.7]
-W = 1.0
-Npixels = 512
-cam = ds.camera(c, L, W, Npixels, tf)
-im = cam.snapshot("original.png" % ds, clip_ratio=8.0)
+im, sc = yt.volume_render(ds, 'density')
+im.write_png("original.png", sigma_clip=8.0)
 
 # Our image array can now be transformed to include different background
 # colors.  By default, the background color is black.  The following
@@ -35,10 +22,10 @@
 # None  (0.,0.,0.,0.) <-- Transparent!
 # any rgba list/array: [r,g,b,a], bounded by 0..1
 
-# We include the clip_ratio=8 keyword here to bring out more contrast between
+# We include the sigma_clip=8 keyword here to bring out more contrast between
 # the background and foreground, but it is entirely optional.
 
-im.write_png('black_bg.png', background='black', clip_ratio=8.0)
-im.write_png('white_bg.png', background='white', clip_ratio=8.0)
-im.write_png('green_bg.png', background=[0.,1.,0.,1.], clip_ratio=8.0)
-im.write_png('transparent_bg.png', background=None, clip_ratio=8.0)
+im.write_png('black_bg.png', background='black', sigma_clip=8.0)
+im.write_png('white_bg.png', background='white', sigma_clip=8.0)
+im.write_png('green_bg.png', background=[0.,1.,0.,1.], sigma_clip=8.0)
+im.write_png('transparent_bg.png', background=None, sigma_clip=8.0)

diff -r 7e150e1b959f73601beb86b50e819086f50a3126 -r 5ea9009614a9d1e890c19daaed5966f0c917d2c4 doc/source/cookbook/index.rst
--- a/doc/source/cookbook/index.rst
+++ b/doc/source/cookbook/index.rst
@@ -44,8 +44,10 @@
    embedded_webm_animation
    gadget_notebook
    owls_notebook
+   ../visualizing/transfer_function_helper
    ../analyzing/analysis_modules/sunyaev_zeldovich
    fits_radio_cubes
    fits_xray_images
    tipsy_notebook
    halo_analysis_example
+   ../visualizing/volume_rendering_tutorial

diff -r 7e150e1b959f73601beb86b50e819086f50a3126 -r 5ea9009614a9d1e890c19daaed5966f0c917d2c4 doc/source/cookbook/offaxis_projection.py
--- a/doc/source/cookbook/offaxis_projection.py
+++ b/doc/source/cookbook/offaxis_projection.py
@@ -11,7 +11,7 @@
 # objects, you could set it the way you would a cutting plane -- but for this
 # dataset, we'll just choose an off-axis value at random.  This gets normalized
 # automatically.
-L = [0.5, 0.4, 0.7]
+L = [1.0, 0.0, 0.0]
 
 # Our "width" is the width of the image plane as well as the depth.
 # The first element is the left to right width, the second is the
@@ -26,7 +26,7 @@
 # Create the off axis projection.
 # Setting no_ghost to False speeds up the process, but makes a
 # slighly lower quality image.
-image = yt.off_axis_projection(ds, c, L, W, Npixels, "density", no_ghost=False)
+image, sc= yt.off_axis_projection(ds, c, L, W, Npixels, "density", no_ghost=False)
 
 # Write out the final image and give it a name
 # relating to what our dataset is called.

diff -r 7e150e1b959f73601beb86b50e819086f50a3126 -r 5ea9009614a9d1e890c19daaed5966f0c917d2c4 doc/source/cookbook/offaxis_projection_colorbar.py
--- a/doc/source/cookbook/offaxis_projection_colorbar.py
+++ b/doc/source/cookbook/offaxis_projection_colorbar.py
@@ -32,7 +32,7 @@
 # Also note that we set the field which we want to project as "density", but
 # really we could use any arbitrary field like "temperature", "metallicity"
 # or whatever.
-image = yt.off_axis_projection(ds, c, L, W, Npixels, "density", no_ghost=False)
+image, sc = yt.off_axis_projection(ds, c, L, W, Npixels, "density", no_ghost=False)
 
 # Image is now an NxN array representing the intensities of the various pixels.
 # And now, we call our direct image saver.  We save the log of the result.

This diff is so big that we needed to truncate the remainder.

https://bitbucket.org/yt_analysis/yt/commits/935e0716b1ee/
Changeset:   935e0716b1ee
Branch:      yt
User:        MatthewTurk
Date:        2015-11-18 04:45:16+00:00
Summary:     Adding docs about field access
Affected #:  1 file

diff -r 5ea9009614a9d1e890c19daaed5966f0c917d2c4 -r 935e0716b1ee30b2e28ec885d56cc5c9611c59cf doc/source/analyzing/fields.rst
--- a/doc/source/analyzing/fields.rst
+++ b/doc/source/analyzing/fields.rst
@@ -195,10 +195,32 @@
 :ref:`field-list`.  If you want to create additional custom derived fields, 
 see :ref:`creating-derived-fields`.
 
-The full list of fields available for a dataset can be found as 
-the attribute ``field_list`` for native, on-disk fields and ``derived_field_list``
-for derived fields (``derived_field_list`` is a superset of ``field_list``).
-You can view these lists by examining a dataset like this:
+Every dataset has an attribute, ``fields``.  This attribute possesses
+attributes itself, each of which is a "field type," and each field type has as
+its attributes the fields themselves.  When one of these is printed, it returns
+information about the field and things like units and so on.  You can use this
+for tab-completing as well as easier access to information.
+
+As an example, you might browse the available fields like so:::
+
+  print dir(ds.fields)
+  print dir(ds.fields.gas)
+  print ds.fields.gas.density
+
+On an Enzo dataset, the result from the final command would look something like
+this:::
+
+  Alias Field for "('enzo', 'Density')" (gas, density): (units: g/cm**3)
+
+You can use this to easily explore available fields, particularly through
+tab-completion in Jupyter/IPython.
+
+For a more programmatic method of accessing fields, you can utilize the
+``field_list``, ``derived_field_list`` and some accessor methods to gain
+information about fields.  The full list of fields available for a dataset can
+be found as the attribute ``field_list`` for native, on-disk fields and
+``derived_field_list`` for derived fields (``derived_field_list`` is a superset
+of ``field_list``).  You can view these lists by examining a dataset like this:
 
 .. code-block:: python
 


https://bitbucket.org/yt_analysis/yt/commits/5421e4e15c89/
Changeset:   5421e4e15c89
Branch:      yt
User:        MatthewTurk
Date:        2015-11-18 13:53:30+00:00
Summary:     Updating for style changes
Affected #:  1 file

diff -r 935e0716b1ee30b2e28ec885d56cc5c9611c59cf -r 5421e4e15c89ac7c8da608a7a9691fc25a859f80 yt/fields/tests/test_field_name_container.py
--- a/yt/fields/tests/test_field_name_container.py
+++ b/yt/fields/tests/test_field_name_container.py
@@ -1,17 +1,11 @@
 from yt.utilities.answer_testing.framework import \
     requires_ds, \
-    small_patch_amr, \
-    big_patch_amr, \
-    data_dir_load, \
-    AnalyticHaloMassFunctionTest, \
-    SimulatedHaloMassFunctionTest
-from yt.frontends.enzo.api import EnzoDataset
+    data_dir_load
 
 enzotiny = "enzo_tiny_cosmology/DD0046/DD0046"
 @requires_ds(enzotiny)
 def test_simulated_halo_mass_function():
     ds = data_dir_load(enzotiny)
-    ds = yt.load(enzotiny)
     for field_type in dir(ds.fields):
         ft = getattr(ds.fields, field_type)
         for field_name in dir(ft):


https://bitbucket.org/yt_analysis/yt/commits/19de2adc9263/
Changeset:   19de2adc9263
Branch:      yt
User:        MatthewTurk
Date:        2015-11-19 19:23:30+00:00
Summary:     Comments from Nathan
Affected #:  1 file

diff -r 5421e4e15c89ac7c8da608a7a9691fc25a859f80 -r 19de2adc9263fde14731a9e548d7b424fe3adda1 doc/source/analyzing/fields.rst
--- a/doc/source/analyzing/fields.rst
+++ b/doc/source/analyzing/fields.rst
@@ -195,7 +195,7 @@
 :ref:`field-list`.  If you want to create additional custom derived fields, 
 see :ref:`creating-derived-fields`.
 
-Every dataset has an attribute, ``fields``.  This attribute possesses
+Every dataset has an attribute, ``ds.fields``.  This attribute possesses
 attributes itself, each of which is a "field type," and each field type has as
 its attributes the fields themselves.  When one of these is printed, it returns
 information about the field and things like units and so on.  You can use this
@@ -203,9 +203,9 @@
 
 As an example, you might browse the available fields like so:::
 
-  print dir(ds.fields)
-  print dir(ds.fields.gas)
-  print ds.fields.gas.density
+  print(dir(ds.fields))
+  print(dir(ds.fields.gas))
+  print(ds.fields.gas.density)
 
 On an Enzo dataset, the result from the final command would look something like
 this:::
@@ -216,7 +216,7 @@
 tab-completion in Jupyter/IPython.
 
 For a more programmatic method of accessing fields, you can utilize the
-``field_list``, ``derived_field_list`` and some accessor methods to gain
+``ds.field_list``, ``ds.derived_field_list`` and some accessor methods to gain
 information about fields.  The full list of fields available for a dataset can
 be found as the attribute ``field_list`` for native, on-disk fields and
 ``derived_field_list`` for derived fields (``derived_field_list`` is a superset


https://bitbucket.org/yt_analysis/yt/commits/b949b80ab094/
Changeset:   b949b80ab094
Branch:      yt
User:        bwkeller
Date:        2015-11-23 19:13:38+00:00
Summary:     Merged in MatthewTurk/yt (pull request #1760)

Add "fields" attribute to datasets
Affected #:  4 files

diff -r 903268eb8d717ff3af3fc5acd7634f5db854a1b4 -r b949b80ab0941331195e1653ce85e836909f20a9 doc/source/analyzing/fields.rst
--- a/doc/source/analyzing/fields.rst
+++ b/doc/source/analyzing/fields.rst
@@ -195,10 +195,32 @@
 :ref:`field-list`.  If you want to create additional custom derived fields, 
 see :ref:`creating-derived-fields`.
 
-The full list of fields available for a dataset can be found as 
-the attribute ``field_list`` for native, on-disk fields and ``derived_field_list``
-for derived fields (``derived_field_list`` is a superset of ``field_list``).
-You can view these lists by examining a dataset like this:
+Every dataset has an attribute, ``ds.fields``.  This attribute possesses
+attributes itself, each of which is a "field type," and each field type has as
+its attributes the fields themselves.  When one of these is printed, it returns
+information about the field and things like units and so on.  You can use this
+for tab-completing as well as easier access to information.
+
+As an example, you might browse the available fields like so:::
+
+  print(dir(ds.fields))
+  print(dir(ds.fields.gas))
+  print(ds.fields.gas.density)
+
+On an Enzo dataset, the result from the final command would look something like
+this:::
+
+  Alias Field for "('enzo', 'Density')" (gas, density): (units: g/cm**3)
+
+You can use this to easily explore available fields, particularly through
+tab-completion in Jupyter/IPython.
+
+For a more programmatic method of accessing fields, you can utilize the
+``ds.field_list``, ``ds.derived_field_list`` and some accessor methods to gain
+information about fields.  The full list of fields available for a dataset can
+be found as the attribute ``field_list`` for native, on-disk fields and
+``derived_field_list`` for derived fields (``derived_field_list`` is a superset
+of ``field_list``).  You can view these lists by examining a dataset like this:
 
 .. code-block:: python
 

diff -r 903268eb8d717ff3af3fc5acd7634f5db854a1b4 -r b949b80ab0941331195e1653ce85e836909f20a9 yt/data_objects/static_output.py
--- a/yt/data_objects/static_output.py
+++ b/yt/data_objects/static_output.py
@@ -87,6 +87,36 @@
         output_type_registry[name] = cls
         mylog.debug("Registering: %s as %s", name, cls)
 
+class FieldTypeContainer(object):
+    def __init__(self, ds):
+        self.ds = weakref.proxy(ds)
+
+    def __getattr__(self, attr):
+        ds = self.__getattribute__('ds')
+        fnc = FieldNameContainer(ds, attr)
+        if len(dir(fnc)) == 0:
+            return self.__getattribute__(attr)
+        return fnc
+
+    def __dir__(self):
+        return list(set(t for t, n in self.ds.field_info))
+
+class FieldNameContainer(object):
+    def __init__(self, ds, field_type):
+        self.ds = ds
+        self.field_type = field_type
+
+    def __getattr__(self, attr):
+        ft = self.__getattribute__("field_type")
+        ds = self.__getattribute__("ds")
+        if (ft, attr) not in ds.field_info:
+            return self.__getattribute__(attr)
+        return ds.field_info[ft, attr]
+
+    def __dir__(self):
+        return [n for t, n in self.ds.field_info
+                if t == self.field_type]
+
 class IndexProxy(object):
     # This is a simple proxy for Index objects.  It enables backwards
     # compatibility so that operations like .h.sphere, .h.print_stats and
@@ -136,6 +166,7 @@
     _index_class = None
     field_units = None
     derived_field_list = requires_index("derived_field_list")
+    fields = requires_index("fields")
     _instantiated = False
 
     def __new__(cls, filename=None, *args, **kwargs):
@@ -391,6 +422,7 @@
         self.field_info.load_all_plugins()
         deps, unloaded = self.field_info.check_derived_fields()
         self.field_dependencies.update(deps)
+        self.fields = FieldTypeContainer(self)
 
     def setup_deprecated_fields(self):
         from yt.fields.field_aliases import _field_name_aliases

diff -r 903268eb8d717ff3af3fc5acd7634f5db854a1b4 -r b949b80ab0941331195e1653ce85e836909f20a9 yt/fields/derived_field.py
--- a/yt/fields/derived_field.py
+++ b/yt/fields/derived_field.py
@@ -36,6 +36,7 @@
     def _TranslationFunc(field, data):
         # We do a bunch of in-place modifications, so we will copy this.
         return data[field_name].copy()
+    _TranslationFunc.alias_name = field_name
     return _TranslationFunc
 
 def NullFunc(field, data):
@@ -212,6 +213,25 @@
         data_label += r"$"
         return data_label
 
+    def __repr__(self):
+        if self._function == NullFunc:
+            s = "On-Disk Field "
+        elif self._function.func_name == "_TranslationFunc":
+            s = "Alias Field for \"%s\" " % (self._function.alias_name,)
+        else:
+            s = "Derived Field "
+        if isinstance(self.name, tuple):
+            s += "(%s, %s): " % self.name
+        else:
+            s += "%s: " % (self.name)
+        s += "(units: %s" % self.units
+        if self.display_name is not None:
+            s += ", display_name: '%s'" % (self.display_name)
+        if self.particle_type:
+            s += ", particle field"
+        s += ")"
+        return s
+
 class FieldValidator(object):
     pass
 

diff -r 903268eb8d717ff3af3fc5acd7634f5db854a1b4 -r b949b80ab0941331195e1653ce85e836909f20a9 yt/fields/tests/test_field_name_container.py
--- /dev/null
+++ b/yt/fields/tests/test_field_name_container.py
@@ -0,0 +1,13 @@
+from yt.utilities.answer_testing.framework import \
+    requires_ds, \
+    data_dir_load
+
+enzotiny = "enzo_tiny_cosmology/DD0046/DD0046"
+ at requires_ds(enzotiny)
+def test_simulated_halo_mass_function():
+    ds = data_dir_load(enzotiny)
+    for field_type in dir(ds.fields):
+        ft = getattr(ds.fields, field_type)
+        for field_name in dir(ft):
+            f = getattr(ft, field_name)
+            assert ((field_type, field_name) == f.name)

Repository URL: https://bitbucket.org/yt_analysis/yt/

--

This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.



More information about the yt-svn mailing list