[yt-svn] commit/yt: 66 new changesets

commits-noreply at bitbucket.org commits-noreply at bitbucket.org
Tue Sep 9 13:10:02 PDT 2014


66 new commits in yt:

https://bitbucket.org/yt_analysis/yt/commits/deae5bdfcda3/
Changeset:   deae5bdfcda3
Branch:      yt
User:        drudd
Date:        2014-08-28 21:35:52
Summary:     Fix bug where list compared with integer (assumed meant length)
Affected #:  1 file

diff -r a7736c8f4158cb76b0453eca1a6f5046775ede01 -r deae5bdfcda365c7eb03b8e3251326459160df04 yt/data_objects/data_containers.py
--- a/yt/data_objects/data_containers.py
+++ b/yt/data_objects/data_containers.py
@@ -621,7 +621,7 @@
                 fields_to_generate.append(field)
                 continue
             fields_to_get.append(field)
-        if len(fields_to_get) == 0 and fields_to_generate == 0:
+        if len(fields_to_get) == 0 and len(fields_to_generate) == 0:
             return
         elif self._locked == True:
             raise GenerationInProgress(fields)


https://bitbucket.org/yt_analysis/yt/commits/7f748f15be50/
Changeset:   7f748f15be50
Branch:      yt
User:        drudd
Date:        2014-08-28 22:19:12
Summary:     Moved chunking calls in _read_particle_fields and _read_fluid_fields until after possible empty return points (avoids unnecessary I/O
Affected #:  1 file

diff -r deae5bdfcda365c7eb03b8e3251326459160df04 -r 7f748f15be5006c2e1d6d4f79b37f0c25e9a72eb yt/geometry/geometry_handler.py
--- a/yt/geometry/geometry_handler.py
+++ b/yt/geometry/geometry_handler.py
@@ -220,13 +220,12 @@
 
     def _read_particle_fields(self, fields, dobj, chunk = None):
         if len(fields) == 0: return {}, []
+        fields_to_read, fields_to_generate = self._split_fields(fields)
+        if len(fields_to_read) == 0:
+            return {}, fields_to_generate
         selector = dobj.selector
         if chunk is None:
             self._identify_base_chunk(dobj)
-        fields_to_return = {}
-        fields_to_read, fields_to_generate = self._split_fields(fields)
-        if len(fields_to_read) == 0:
-            return {}, fields_to_generate
         fields_to_return = self.io._read_particle_selection(
             self._chunk_io(dobj, cache = False),
             selector,
@@ -238,16 +237,15 @@
 
     def _read_fluid_fields(self, fields, dobj, chunk = None):
         if len(fields) == 0: return {}, []
+        fields_to_read, fields_to_generate = self._split_fields(fields)
+        if len(fields_to_read) == 0:
+            return {}, fields_to_generate
         selector = dobj.selector
         if chunk is None:
             self._identify_base_chunk(dobj)
             chunk_size = dobj.size
         else:
             chunk_size = chunk.data_size
-        fields_to_return = {}
-        fields_to_read, fields_to_generate = self._split_fields(fields)
-        if len(fields_to_read) == 0:
-            return {}, fields_to_generate
         fields_to_return = self.io._read_fluid_selection(
             self._chunk_io(dobj),
             selector,


https://bitbucket.org/yt_analysis/yt/commits/c81958f888c6/
Changeset:   c81958f888c6
Branch:      yt
User:        drudd
Date:        2014-08-28 23:36:32
Summary:     Tentative error handling in Index._split_fields, preventing fields not in field_list or derived_field_list from being passed along
Affected #:  1 file

diff -r 7f748f15be5006c2e1d6d4f79b37f0c25e9a72eb -r c81958f888c67a7e375952a0cd1d8bb10f73a56a yt/geometry/geometry_handler.py
--- a/yt/geometry/geometry_handler.py
+++ b/yt/geometry/geometry_handler.py
@@ -214,8 +214,10 @@
         for ftype, fname in fields:
             if fname in self.field_list or (ftype, fname) in self.field_list:
                 fields_to_read.append((ftype, fname))
+            elif fname in self.ds.derived_field_list or (ftype, fname) in self.ds.derived_field_list:
+                fields_to_generate.append((ftype, fname))
             else:
-                fields_to_generate.append((ftype, fname))
+                raise RuntimeError("Invalid field encountered in _split_fields: (%s, %s)" % (ftype,fname))
         return fields_to_read, fields_to_generate
 
     def _read_particle_fields(self, fields, dobj, chunk = None):


https://bitbucket.org/yt_analysis/yt/commits/561d1bf4b2a9/
Changeset:   561d1bf4b2a9
Branch:      yt
User:        drudd
Date:        2014-08-29 22:35:06
Summary:     Remove returning of cached field info object when only field name matches
Affected #:  1 file

diff -r c81958f888c67a7e375952a0cd1d8bb10f73a56a -r 561d1bf4b2a9f7dc89a1a99dc80719ce107798c3 yt/data_objects/static_output.py
--- a/yt/data_objects/static_output.py
+++ b/yt/data_objects/static_output.py
@@ -460,8 +460,9 @@
             self._last_freq = field
             self._last_finfo = self.field_info[(ftype, fname)]
             return self._last_finfo
-        if fname == self._last_freq[1]:
-            return self._last_finfo
+        #Removed as it can suggest invalid fields are valid
+        #if fname == self._last_freq[1]:
+        #    return self._last_finfo
         if fname in self.field_info:
             # Sometimes, if guessing_type == True, this will be switched for
             # the type of field it is.  So we look at the field type and


https://bitbucket.org/yt_analysis/yt/commits/a9aa29ba3512/
Changeset:   a9aa29ba3512
Branch:      yt
User:        drudd
Date:        2014-08-29 22:36:19
Summary:     Add strict field existence check to _determine_fields
Affected #:  1 file

diff -r 561d1bf4b2a9f7dc89a1a99dc80719ce107798c3 -r a9aa29ba3512f0e81f2ffff40443c0da499b4ebe yt/data_objects/data_containers.py
--- a/yt/data_objects/data_containers.py
+++ b/yt/data_objects/data_containers.py
@@ -494,6 +494,18 @@
                     ftype = self._current_fluid_type
                     if (ftype, fname) not in self.ds.field_info:
                         ftype = self.ds._last_freq[0]
+
+                # really ugly check to ensure that this field really does exist somewhere,
+                # in some naming convention, before returning it as a possible field type
+                if (ftype,fname) not in self.ds.field_list and \
+                        fname not in self.ds.field_list and \
+                        (ftype,fname) not in self.ds.derived_field_list and \
+                        fname not in self.ds.derived_field_list and \
+                        (ftype,fname) not in self._container_fields:
+                    raise YTFieldNotFound((ftype,fname),self.ds)
+
+            # these tests are really insufficient as a field type may be valid, and the
+            # field name may be valid, but not the combination (field type, field name)
             if finfo.particle_type and ftype not in self.ds.particle_types:
                 raise YTFieldTypeNotFound(ftype)
             elif not finfo.particle_type and ftype not in self.ds.fluid_types:


https://bitbucket.org/yt_analysis/yt/commits/32768ff7a1d5/
Changeset:   32768ff7a1d5
Branch:      yt
User:        drudd
Date:        2014-08-29 22:51:19
Summary:     Change runtime error to YTFieldNotFound error
Affected #:  1 file

diff -r a9aa29ba3512f0e81f2ffff40443c0da499b4ebe -r 32768ff7a1d54be679cf6ac360bbf20428faf9e2 yt/geometry/geometry_handler.py
--- a/yt/geometry/geometry_handler.py
+++ b/yt/geometry/geometry_handler.py
@@ -217,7 +217,7 @@
             elif fname in self.ds.derived_field_list or (ftype, fname) in self.ds.derived_field_list:
                 fields_to_generate.append((ftype, fname))
             else:
-                raise RuntimeError("Invalid field encountered in _split_fields: (%s, %s)" % (ftype,fname))
+                raise YTFieldNotFound((ftype,fname), self.ds)
         return fields_to_read, fields_to_generate
 
     def _read_particle_fields(self, fields, dobj, chunk = None):


https://bitbucket.org/yt_analysis/yt/commits/e0dc234175d5/
Changeset:   e0dc234175d5
Branch:      yt
User:        drudd
Date:        2014-08-30 20:03:47
Summary:     Change camera temporary weight fields to use tuple naming convention
Affected #:  1 file

diff -r 32768ff7a1d54be679cf6ac360bbf20428faf9e2 -r e0dc234175d5bac6a0bc1cac41e1444b4b886ce4 yt/visualization/volume_rendering/camera.py
--- a/yt/visualization/volume_rendering/camera.py
+++ b/yt/visualization/volume_rendering/camera.py
@@ -2080,18 +2080,19 @@
     if weight is not None:
         # This is a temporary field, which we will remove at the end.
         def _make_wf(f, w):
+            weightfield = ("index", "temp_weightfield")
             def temp_weightfield(a, b):
                 tr = b[f].astype("float64") * b[w]
                 return b.apply_units(tr, a.units)
                 return tr
             return temp_weightfield
-        ds.field_info.add_field("temp_weightfield",
+        ds.field_info.add_field(weightfield,
             function=_make_wf(field, weight))
         # Now we have to tell the dataset to add it and to calculate
         # its dependencies..
-        deps, _ = ds.field_info.check_derived_fields(["temp_weightfield"])
+        deps, _ = ds.field_info.check_derived_fields([weightfield])
         ds.field_dependencies.update(deps)
-        fields = ["temp_weightfield", weight]
+        fields = [weightfield, weight]
     nv = 12*nside**2
     image = np.zeros((nv,1,4), dtype='float64', order='C')
     vs = arr_pix2vec_nest(nside, np.arange(nv))
@@ -2128,8 +2129,8 @@
     else:
         image[:,:,0] /= image[:,:,1]
         image = ds.arr(image, finfo.units)
-        ds.field_info.pop("temp_weightfield")
-        ds.field_dependencies.pop("temp_weightfield")
+        ds.field_info.pop(weightfield)
+        ds.field_dependencies.pop(weightfield)
     return image[:,0,0]
 
 def plot_allsky_healpix(image, nside, fn, label = "", rotation = None,
@@ -2170,19 +2171,20 @@
         fields = [field]
         if self.weight is not None:
             # This is a temporary field, which we will remove at the end.
+            self.weightfield = ("index", "temp_weightfield")
             def _make_wf(f, w):
                 def temp_weightfield(a, b):
                     tr = b[f].astype("float64") * b[w]
                     return b.apply_units(tr, a.units)
                     return tr
                 return temp_weightfield
-            ds.field_info.add_field("temp_weightfield",
+            ds.field_info.add_field(self.weightfield,
                 function=_make_wf(self.field, self.weight))
             # Now we have to tell the dataset to add it and to calculate
             # its dependencies..
-            deps, _ = ds.field_info.check_derived_fields(["temp_weightfield"])
+            deps, _ = ds.field_info.check_derived_fields([self.weightfield])
             ds.field_dependencies.update(deps)
-            fields = ["temp_weightfield", self.weight]
+            fields = [self.weightfield, self.weight]
         
         self.fields = fields
         self.log_fields = [False]*len(self.fields)
@@ -2374,9 +2376,10 @@
                                no_ghost=no_ghost, interpolated=interpolated, 
                                north_vector=north_vector)
     image = projcam.snapshot()
+    # poor encapsulation, should be in a __exit__ method
     if weight is not None:
-        ds.field_info.pop("temp_weightfield")
-        ds.field_dependencies.pop("temp_weightfield")
+        ds.field_info.pop(projcam.weightfield)
+        ds.field_dependencies.pop(projcam.weightfield)
     del projcam
     return image[:,:]
 


https://bitbucket.org/yt_analysis/yt/commits/2c5d169a0cf5/
Changeset:   2c5d169a0cf5
Branch:      yt
User:        drudd
Date:        2014-08-30 20:11:17
Summary:     Change ProjectionCamera tests to use tuple field name
Affected #:  1 file

diff -r e0dc234175d5bac6a0bc1cac41e1444b4b886ce4 -r 2c5d169a0cf5a9ccd8a6dbad881dba8353d61173 yt/visualization/volume_rendering/tests/test_vr_cameras.py
--- a/yt/visualization/volume_rendering/tests/test_vr_cameras.py
+++ b/yt/visualization/volume_rendering/tests/test_vr_cameras.py
@@ -51,7 +51,7 @@
         self.L = np.array([0.5, 0.5, 0.5])
         self.W = 1.5*self.ds.domain_width
         self.N = 64
-        self.field = "density"
+        self.field = ("gas", "density")
 
     def tearDown(self):
         if use_tmpdir:
@@ -61,7 +61,7 @@
     def setup_transfer_function(self, camera_type):
         if camera_type in ['perspective', 'camera',
                            'stereopair', 'interactive']:
-            mi, ma = self.ds.all_data().quantities['Extrema']("density")
+            mi, ma = self.ds.all_data().quantities['Extrema'](self.field)
             tf = ColorTransferFunction((mi, ma),
                                        grey_opacity=True)
             tf.map_to_colormap(mi, ma, scale=10., colormap='RdBu_r')
@@ -110,7 +110,7 @@
         ds = self.ds
 
         cam = ProjectionCamera(self.c, self.L, self.W, self.N, ds=ds,
-                               field="density")
+                               field=self.field)
         cam.snapshot('projection.png')
         assert_fname('projection.png')
 


https://bitbucket.org/yt_analysis/yt/commits/055d22bc30fb/
Changeset:   055d22bc30fb
Branch:      yt
User:        drudd
Date:        2014-08-30 20:24:12
Summary:     weightfield variable initialized in wrong place
Affected #:  1 file

diff -r 2c5d169a0cf5a9ccd8a6dbad881dba8353d61173 -r 055d22bc30fbe6fe51d16bb0a1ab5e24d52dd560 yt/visualization/volume_rendering/camera.py
--- a/yt/visualization/volume_rendering/camera.py
+++ b/yt/visualization/volume_rendering/camera.py
@@ -2079,8 +2079,8 @@
     center = np.array(center, dtype='float64')
     if weight is not None:
         # This is a temporary field, which we will remove at the end.
+        weightfield = ("index", "temp_weightfield")
         def _make_wf(f, w):
-            weightfield = ("index", "temp_weightfield")
             def temp_weightfield(a, b):
                 tr = b[f].astype("float64") * b[w]
                 return b.apply_units(tr, a.units)


https://bitbucket.org/yt_analysis/yt/commits/1c7a5a5ea417/
Changeset:   1c7a5a5ea417
Branch:      yt
User:        drudd
Date:        2014-08-30 20:29:15
Summary:     Moved deletion of weightfield to class destructor
Affected #:  1 file

diff -r 055d22bc30fbe6fe51d16bb0a1ab5e24d52dd560 -r 1c7a5a5ea417e42316f39030ccd28f1789ecd862 yt/visualization/volume_rendering/camera.py
--- a/yt/visualization/volume_rendering/camera.py
+++ b/yt/visualization/volume_rendering/camera.py
@@ -2194,6 +2194,17 @@
                 north_vector=north_vector,
                 no_ghost=no_ghost)
 
+    # this would be better in an __exit__ function, but that would require
+    # changes in code that uses this class
+    def __del__(self):
+        if hasattr(self,"weightfield") and hasattr(self,"ds"):
+            try:
+                self.ds.field_info.pop(self.weightfield)
+                self.ds.field_dependencies.pop(self.weightfield)
+            except KeyError:
+                pass
+        Camera.__del__(self)
+
     def get_sampler(self, args):
         if self.interpolated:
             sampler = InterpolatedProjectionSampler(*args)
@@ -2376,10 +2387,5 @@
                                no_ghost=no_ghost, interpolated=interpolated, 
                                north_vector=north_vector)
     image = projcam.snapshot()
-    # poor encapsulation, should be in a __exit__ method
-    if weight is not None:
-        ds.field_info.pop(projcam.weightfield)
-        ds.field_dependencies.pop(projcam.weightfield)
-    del projcam
     return image[:,:]
 


https://bitbucket.org/yt_analysis/yt/commits/198766f3730b/
Changeset:   198766f3730b
Branch:      yt
User:        drudd
Date:        2014-08-30 20:40:47
Summary:     Ensure unique weightfield name so ProjectionCamera instances don't conflict
Affected #:  1 file

diff -r 1c7a5a5ea417e42316f39030ccd28f1789ecd862 -r 198766f3730bc46e0996a8123df050f9f6adb526 yt/visualization/volume_rendering/camera.py
--- a/yt/visualization/volume_rendering/camera.py
+++ b/yt/visualization/volume_rendering/camera.py
@@ -2170,8 +2170,10 @@
 
         fields = [field]
         if self.weight is not None:
-            # This is a temporary field, which we will remove at the end.
-            self.weightfield = ("index", "temp_weightfield")
+            # This is a temporary field, which we will remove at the end
+            # it is given a unique name to avoid conflicting with other 
+            # class instances
+            self.weightfield = ("index", "temp_weightfield_%u"%(id(self),))
             def _make_wf(f, w):
                 def temp_weightfield(a, b):
                     tr = b[f].astype("float64") * b[w]


https://bitbucket.org/yt_analysis/yt/commits/21db2cade8b2/
Changeset:   21db2cade8b2
Branch:      yt
User:        drudd
Date:        2014-08-31 19:43:50
Summary:     Removed commented code from static_output
Affected #:  1 file

diff -r 198766f3730bc46e0996a8123df050f9f6adb526 -r 21db2cade8b2b358f8c4019055c5909c34462aff yt/data_objects/static_output.py
--- a/yt/data_objects/static_output.py
+++ b/yt/data_objects/static_output.py
@@ -460,9 +460,6 @@
             self._last_freq = field
             self._last_finfo = self.field_info[(ftype, fname)]
             return self._last_finfo
-        #Removed as it can suggest invalid fields are valid
-        #if fname == self._last_freq[1]:
-        #    return self._last_finfo
         if fname in self.field_info:
             # Sometimes, if guessing_type == True, this will be switched for
             # the type of field it is.  So we look at the field type and


https://bitbucket.org/yt_analysis/yt/commits/ab6a668b84d4/
Changeset:   ab6a668b84d4
Branch:      yt
User:        drudd
Date:        2014-08-31 20:54:03
Summary:     Catch AttributeException in ProjectionCamera.__del__
Affected #:  1 file

diff -r 21db2cade8b2b358f8c4019055c5909c34462aff -r ab6a668b84d40e49d9ded14f8a75901ea807d87b yt/visualization/volume_rendering/camera.py
--- a/yt/visualization/volume_rendering/camera.py
+++ b/yt/visualization/volume_rendering/camera.py
@@ -2205,7 +2205,10 @@
                 self.ds.field_dependencies.pop(self.weightfield)
             except KeyError:
                 pass
-        Camera.__del__(self)
+        try:
+            Camera.__del__(self)
+        except AttributeError:
+            pass
 
     def get_sampler(self, args):
         if self.interpolated:


https://bitbucket.org/yt_analysis/yt/commits/7ea8927bd648/
Changeset:   7ea8927bd648
Branch:      yt
User:        drudd
Date:        2014-08-31 21:14:14
Summary:     Remove useless _get_field_info calls from _read_particle_fields
Affected #:  1 file

diff -r ab6a668b84d40e49d9ded14f8a75901ea807d87b -r 7ea8927bd64826419637dd928cf17c83ca34682f yt/geometry/geometry_handler.py
--- a/yt/geometry/geometry_handler.py
+++ b/yt/geometry/geometry_handler.py
@@ -232,9 +232,6 @@
             self._chunk_io(dobj, cache = False),
             selector,
             fields_to_read)
-        for field in fields_to_read:
-            ftype, fname = field
-            finfo = self.ds._get_field_info(*field)
         return fields_to_return, fields_to_generate
 
     def _read_fluid_fields(self, fields, dobj, chunk = None):


https://bitbucket.org/yt_analysis/yt/commits/bbb89c513752/
Changeset:   bbb89c513752
Branch:      yt
User:        drudd
Date:        2014-09-05 19:44:48
Summary:     Merged yt_analysis/yt into yt
Affected #:  61 files

diff -r 7ea8927bd64826419637dd928cf17c83ca34682f -r bbb89c51375248e2a98197e62d40c13a335e7db4 doc/install_script.sh
--- a/doc/install_script.sh
+++ b/doc/install_script.sh
@@ -500,13 +500,28 @@
     fi
     [ ! -e $LIB/extracted ] && tar xfz $LIB.tar.gz
     touch $LIB/extracted
+    BUILD_ARGS=""
+    case $LIB in
+        *h5py*)
+            BUILD_ARGS="--hdf5=${HDF5_DIR}"
+            ;;
+        *numpy*)
+            if [ -e ${DEST_DIR}/lib/python2.7/site-packages/numpy/__init__.py ]
+            then
+                VER=$(${DEST_DIR}/bin/python -c 'from distutils.version import StrictVersion as SV; \
+                                                 import numpy; print SV(numpy.__version__) < SV("1.8.0")')
+                if [ $VER == "True" ]
+                then
+                    echo "Removing previous NumPy instance (see issue #889)"
+                    rm -rf ${DEST_DIR}/lib/python2.7/site-packages/{numpy*,*.pth}
+                fi
+            fi
+            ;;
+        *)
+            ;;
+    esac
     cd $LIB
-    if [ ! -z `echo $LIB | grep h5py` ]
-    then
-	( ${DEST_DIR}/bin/python2.7 setup.py build --hdf5=${HDF5_DIR} $* 2>&1 ) 1>> ${LOG_FILE} || do_exit
-    else
-        ( ${DEST_DIR}/bin/python2.7 setup.py build   $* 2>&1 ) 1>> ${LOG_FILE} || do_exit
-    fi
+    ( ${DEST_DIR}/bin/python2.7 setup.py build ${BUILD_ARGS} $* 2>&1 ) 1>> ${LOG_FILE} || do_exit
     ( ${DEST_DIR}/bin/python2.7 setup.py install    2>&1 ) 1>> ${LOG_FILE} || do_exit
     touch done
     cd ..
@@ -726,7 +741,7 @@
         cd $FREETYPE_VER
         ( ./configure CFLAGS=-I${DEST_DIR}/include --prefix=${DEST_DIR}/ 2>&1 ) 1>> ${LOG_FILE} || do_exit
         ( make 2>&1 ) 1>> ${LOG_FILE} || do_exit
-		( make install 2>&1 ) 1>> ${LOG_FILE} || do_exit
+        ( make install 2>&1 ) 1>> ${LOG_FILE} || do_exit
         ( make clean 2>&1) 1>> ${LOG_FILE} || do_exit
         touch done
         cd ..
@@ -1022,7 +1037,7 @@
     echo
     echo "To get started with yt, check out the orientation:"
     echo
-    echo "    http://yt-project.org/doc/bootcamp/"
+    echo "    http://yt-project.org/doc/quickstart/"
     echo
     echo "The source for yt is located at:"
     echo "    $YT_DIR"

diff -r 7ea8927bd64826419637dd928cf17c83ca34682f -r bbb89c51375248e2a98197e62d40c13a335e7db4 doc/source/analyzing/units/index.rst
--- a/doc/source/analyzing/units/index.rst
+++ b/doc/source/analyzing/units/index.rst
@@ -37,7 +37,7 @@
 .. note::
 
    The notebooks use sample datasets that are available for download at
-   http://yt-project.org/data.  See :ref:`bootcamp-introduction` for more
+   http://yt-project.org/data.  See :ref:`quickstart-introduction` for more
    details.
 
 Let us know if you would like to contribute other example notebooks, or have

diff -r 7ea8927bd64826419637dd928cf17c83ca34682f -r bbb89c51375248e2a98197e62d40c13a335e7db4 doc/source/bootcamp/1)_Introduction.ipynb
--- a/doc/source/bootcamp/1)_Introduction.ipynb
+++ /dev/null
@@ -1,72 +0,0 @@
-{
- "metadata": {
-  "name": "",
-  "signature": "sha256:39620670ce7751b23f30d2123fd3598de1c7843331f65de13e29f4ae9f759e0f"
- },
- "nbformat": 3,
- "nbformat_minor": 0,
- "worksheets": [
-  {
-   "cells": [
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "# Welcome to the yt bootcamp!\n",
-      "\n",
-      "In this brief tutorial, we'll go over how to load up data, analyze things, inspect your data, and make some visualizations.\n",
-      "\n",
-      "Our documentation page can provide information on a variety of the commands that are used here, both in narrative documentation as well as recipes for specific functionality in our cookbook.  The documentation exists at http://yt-project.org/doc/.  If you encounter problems, look for help here: http://yt-project.org/doc/help/index.html.\n",
-      "\n",
-      "## Acquiring the datasets for this tutorial\n",
-      "\n",
-      "If you are executing these tutorials interactively, you need some sample datasets on which to run the code.  You can download these datasets at http://yt-project.org/data/.  The datasets necessary for each lesson are noted next to the corresponding tutorial.\n",
-      "\n",
-      "## What's Next?\n",
-      "\n",
-      "The Notebooks are meant to be explored in this order:\n",
-      "\n",
-      "1. Introduction\n",
-      "2. Data Inspection (IsolatedGalaxy dataset)\n",
-      "3. Simple Visualization (enzo_tiny_cosmology & Enzo_64 datasets)\n",
-      "4. Data Objects and Time Series (IsolatedGalaxy dataset)\n",
-      "5. Derived Fields and Profiles (IsolatedGalaxy dataset)\n",
-      "6. Volume Rendering (IsolatedGalaxy dataset)"
-     ]
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "The following code will download the data needed for this tutorial automatically using `curl`. It may take some time so please wait when the kernel is busy. You will need to set `download_datasets` to True before using it."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "download_datasets = False\n",
-      "if download_datasets:\n",
-      "    !curl -sSO http://yt-project.org/data/enzo_tiny_cosmology.tar\n",
-      "    print \"Got enzo_tiny_cosmology\"\n",
-      "    !tar xf enzo_tiny_cosmology.tar\n",
-      "    \n",
-      "    !curl -sSO http://yt-project.org/data/Enzo_64.tar\n",
-      "    print \"Got Enzo_64\"\n",
-      "    !tar xf Enzo_64.tar\n",
-      "    \n",
-      "    !curl -sSO http://yt-project.org/data/IsolatedGalaxy.tar\n",
-      "    print \"Got IsolatedGalaxy\"\n",
-      "    !tar xf IsolatedGalaxy.tar\n",
-      "    \n",
-      "    print \"All done!\""
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    }
-   ],
-   "metadata": {}
-  }
- ]
-}
\ No newline at end of file

diff -r 7ea8927bd64826419637dd928cf17c83ca34682f -r bbb89c51375248e2a98197e62d40c13a335e7db4 doc/source/bootcamp/2)_Data_Inspection.ipynb
--- a/doc/source/bootcamp/2)_Data_Inspection.ipynb
+++ /dev/null
@@ -1,384 +0,0 @@
-{
- "metadata": {
-  "name": "",
-  "signature": "sha256:a8fe78715c1f3900c37c675d84320fe65f0ba8734abba60fd12e74d957e5d8ee"
- },
- "nbformat": 3,
- "nbformat_minor": 0,
- "worksheets": [
-  {
-   "cells": [
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "# Starting Out and Loading Data\n",
-      "\n",
-      "We're going to get started by loading up yt.  This next command brings all of the libraries into memory and sets up our environment."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "import yt"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "Now that we've loaded yt, we can load up some data.  Let's load the `IsolatedGalaxy` dataset."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "ds = yt.load(\"IsolatedGalaxy/galaxy0030/galaxy0030\")"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "## Fields and Facts\n",
-      "\n",
-      "When you call the `load` function, yt tries to do very little -- this is designed to be a fast operation, just setting up some information about the simulation.  Now, the first time you access the \"index\" it will read and load the mesh and then determine where data is placed in the physical domain and on disk.  Once it knows that, yt can tell you some statistics about the simulation:"
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "ds.print_stats()"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "yt can also tell you the fields it found on disk:"
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "ds.field_list"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "And, all of the fields it thinks it knows how to generate:"
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "ds.derived_field_list"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "yt can also transparently generate fields.  However, we encourage you to examine exactly what yt is doing when it generates those fields.  To see, you can ask for the source of a given field."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "print ds.field_info[\"gas\", \"vorticity_x\"].get_source()"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "yt stores information about the domain of the simulation:"
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "print ds.domain_width"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "yt can also convert this into various units:"
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "print ds.domain_width.in_units(\"kpc\")\n",
-      "print ds.domain_width.in_units(\"au\")\n",
-      "print ds.domain_width.in_units(\"mile\")"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "# Mesh Structure\n",
-      "\n",
-      "If you're using a simulation type that has grids (for instance, here we're using an Enzo simulation) you can examine the structure of the mesh.  For the most part, you probably won't have to use this unless you're debugging a simulation or examining in detail what is going on."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "print ds.index.grid_left_edge"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "But, you may have to access information about individual grid objects!  Each grid object mediates accessing data from the disk and has a number of attributes that tell you about it.  The index (`ds.index` here) has an attribute `grids` which is all of the grid objects."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "print ds.index.grids[1]"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "g = ds.index.grids[1]\n",
-      "print g"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "Grids have dimensions, extents, level, and even a list of Child grids."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "g.ActiveDimensions"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "g.LeftEdge, g.RightEdge"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "g.Level"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "g.Children"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "## Advanced Grid Inspection\n",
-      "\n",
-      "If we want to examine grids only at a given level, we can!  Not only that, but we can load data and take a look at various fields.\n",
-      "\n",
-      "*This section can be skipped!*"
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "gs = ds.index.select_grids(ds.index.max_level)"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "g2 = gs[0]\n",
-      "print g2\n",
-      "print g2.Parent\n",
-      "print g2.get_global_startindex()"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "print g2[\"density\"][:,:,0]"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "print (g2.Parent.child_mask == 0).sum() * 8\n",
-      "print g2.ActiveDimensions.prod()"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "for f in ds.field_list:\n",
-      "    fv = g[f]\n",
-      "    if fv.size == 0: continue\n",
-      "    print f, fv.min(), fv.max()"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "# Examining Data in Regions\n",
-      "\n",
-      "yt provides data object selectors.  In subsequent notebooks we'll examine these in more detail, but we can select a sphere of data and perform a number of operations on it.  yt makes it easy to operate on fluid fields in an object in *bulk*, but you can also examine individual field values.\n",
-      "\n",
-      "This creates a sphere selector positioned at the most dense point in the simulation that has a radius of 10 kpc."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "sp = ds.sphere(\"max\", (10, 'kpc'))"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "print sp"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "We can calculate a bunch of bulk quantities.  Here's that list, but there's a list in the docs, too!"
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "print sp.quantities.keys()"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "Let's look at the total mass.  This is how you call a given quantity.  yt calls these \"Derived Quantities\".  We'll talk about a few in a later notebook."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "print sp.quantities.total_mass()"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    }
-   ],
-   "metadata": {}
-  }
- ]
-}
\ No newline at end of file

diff -r 7ea8927bd64826419637dd928cf17c83ca34682f -r bbb89c51375248e2a98197e62d40c13a335e7db4 doc/source/bootcamp/3)_Simple_Visualization.ipynb
--- a/doc/source/bootcamp/3)_Simple_Visualization.ipynb
+++ /dev/null
@@ -1,275 +0,0 @@
-{
- "metadata": {
-  "name": "",
-  "signature": "sha256:c00ba7fdbbd9ea957d06060ad70f06f629b1fd4ebf5379c1fdad2697ab0a4cd6"
- },
- "nbformat": 3,
- "nbformat_minor": 0,
- "worksheets": [
-  {
-   "cells": [
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "# Simple Visualizations of Data\n",
-      "\n",
-      "Just like in our first notebook, we have to load yt and then some data."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "import yt"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "For this notebook, we'll load up a cosmology dataset."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "ds = yt.load(\"enzo_tiny_cosmology/DD0046/DD0046\")\n",
-      "print \"Redshift =\", ds.current_redshift"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "In the terms that yt uses, a projection is a line integral through the domain.  This can either be unweighted (in which case a column density is returned) or weighted, in which case an average value is returned.  Projections are, like all other data objects in yt, full-fledged data objects that churn through data and present that to you.  However, we also provide a simple method of creating Projections and plotting them in a single step.  This is called a Plot Window, here specifically known as a `ProjectionPlot`.  One thing to note is that in yt, we project all the way through the entire domain at a single time.  This means that the first call to projecting can be somewhat time consuming, but panning, zooming and plotting are all quite fast.\n",
-      "\n",
-      "yt is designed to make it easy to make nice plots and straightforward to modify those plots directly.  The cookbook in the documentation includes detailed examples of this."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "p = yt.ProjectionPlot(ds, \"y\", \"density\")\n",
-      "p.show()"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "The `show` command simply sends the plot to the IPython notebook.  You can also call `p.save()` which will save the plot to the file system.  This function accepts an argument, which will be pre-prended to the filename and can be used to name it based on the width or to supply a location.\n",
-      "\n",
-      "Now we'll zoom and pan a bit."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "p.zoom(2.0)"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "p.pan_rel((0.1, 0.0))"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "p.zoom(10.0)"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "p.pan_rel((-0.25, -0.5))"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "p.zoom(0.1)"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "If we specify multiple fields, each time we call `show` we get multiple plots back.  Same for `save`!"
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "p = yt.ProjectionPlot(ds, \"z\", [\"density\", \"temperature\"], weight_field=\"density\")\n",
-      "p.show()"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "We can adjust the colormap on a field-by-field basis."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "p.set_cmap(\"temperature\", \"hot\")"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "And, we can re-center the plot on different locations.  One possible use of this would be to make a single `ProjectionPlot` which you move around to look at different regions in your simulation, saving at each one."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "v, c = ds.find_max(\"density\")\n",
-      "p.set_center((c[0], c[1]))\n",
-      "p.zoom(10)"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "Okay, let's load up a bigger simulation (from `Enzo_64` this time) and make a slice plot."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "ds = yt.load(\"Enzo_64/DD0043/data0043\")\n",
-      "s = yt.SlicePlot(ds, \"z\", [\"density\", \"velocity_magnitude\"], center=\"max\")\n",
-      "s.set_cmap(\"velocity_magnitude\", \"kamae\")\n",
-      "s.zoom(10.0)"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "We can adjust the logging of various fields:"
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "s.set_log(\"velocity_magnitude\", True)"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "yt provides many different annotations for your plots.  You can see all of these in the documentation, or if you type `s.annotate_` and press tab, a list will show up here.  We'll annotate with velocity arrows."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "s.annotate_velocity()"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "Contours can also be overlaid:"
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "s = yt.SlicePlot(ds, \"x\", [\"density\"], center=\"max\")\n",
-      "s.annotate_contour(\"temperature\")\n",
-      "s.zoom(2.5)"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "Finally, we can save out to the file system."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "s.save()"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    }
-   ],
-   "metadata": {}
-  }
- ]
-}
\ No newline at end of file

diff -r 7ea8927bd64826419637dd928cf17c83ca34682f -r bbb89c51375248e2a98197e62d40c13a335e7db4 doc/source/bootcamp/4)_Data_Objects_and_Time_Series.ipynb
--- a/doc/source/bootcamp/4)_Data_Objects_and_Time_Series.ipynb
+++ /dev/null
@@ -1,382 +0,0 @@
-{
- "metadata": {
-  "name": "",
-  "signature": "sha256:a46e1baa90d32045c2b524100f28bad41b3665249612c9a275ee0375a6f4be20"
- },
- "nbformat": 3,
- "nbformat_minor": 0,
- "worksheets": [
-  {
-   "cells": [
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "# Data Objects and Time Series Data\n",
-      "\n",
-      "Just like before, we will load up yt.  Since we'll be using pylab to plot some data in this notebook, we additionally tell matplotlib to place plots inline inside the notebook."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "%matplotlib inline\n",
-      "import yt\n",
-      "import numpy as np\n",
-      "from matplotlib import pylab\n",
-      "from yt.analysis_modules.halo_finding.api import HaloFinder"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "## Time Series Data\n",
-      "\n",
-      "Unlike before, instead of loading a single dataset, this time we'll load a bunch which we'll examine in sequence.  This command creates a `DatasetSeries` object, which can be iterated over (including in parallel, which is outside the scope of this bootcamp) and analyzed.  There are some other helpful operations it can provide, but we'll stick to the basics here.\n",
-      "\n",
-      "Note that you can specify either a list of filenames, or a glob (i.e., asterisk) pattern in this."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "ts = yt.DatasetSeries(\"enzo_tiny_cosmology/*/*.hierarchy\")"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "### Example 1: Simple Time Series\n",
-      "\n",
-      "As a simple example of how we can use this functionality, let's find the min and max of the density as a function of time in this simulation.  To do this we use the construction `for ds in ts` where `ds` means \"Dataset\" and `ts` is the \"Time Series\" we just loaded up.  For each dataset, we'll create an object (`dd`) that covers the entire domain.  (`all_data` is a shorthand function for this.)  We'll then call the `extrema` Derived Quantity, and append the min and max to our extrema outputs."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "rho_ex = []\n",
-      "times = []\n",
-      "for ds in ts:\n",
-      "    dd = ds.all_data()\n",
-      "    rho_ex.append(dd.quantities.extrema(\"density\"))\n",
-      "    times.append(ds.current_time.in_units(\"Gyr\"))\n",
-      "rho_ex = np.array(rho_ex)"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "Now we plot the minimum and the maximum:"
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "pylab.semilogy(times, rho_ex[:,0], '-xk', label='Minimum')\n",
-      "pylab.semilogy(times, rho_ex[:,1], '-xr', label='Maximum')\n",
-      "pylab.ylabel(\"Density ($g/cm^3$)\")\n",
-      "pylab.xlabel(\"Time (Gyr)\")\n",
-      "pylab.legend()\n",
-      "pylab.ylim(1e-32, 1e-21)\n",
-      "pylab.show()"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "### Example 2: Advanced Time Series\n",
-      "\n",
-      "Let's do something a bit different.  Let's calculate the total mass inside halos and outside halos.\n",
-      "\n",
-      "This actually touches a lot of different pieces of machinery in yt.  For every dataset, we will run the halo finder HOP.  Then, we calculate the total mass in the domain.  Then, for each halo, we calculate the sum of the baryon mass in that halo.  We'll keep running tallies of these two things."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "from yt.units import Msun\n",
-      "\n",
-      "mass = []\n",
-      "zs = []\n",
-      "for ds in ts:\n",
-      "    halos = HaloFinder(ds)\n",
-      "    dd = ds.all_data()\n",
-      "    total_mass = dd.quantities.total_quantity(\"cell_mass\").in_units(\"Msun\")\n",
-      "    total_in_baryons = 0.0*Msun\n",
-      "    for halo in halos:\n",
-      "        sp = halo.get_sphere()\n",
-      "        total_in_baryons += sp.quantities.total_quantity(\"cell_mass\").in_units(\"Msun\")\n",
-      "    mass.append(total_in_baryons/total_mass)\n",
-      "    zs.append(ds.current_redshift)"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "Now let's plot them!"
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "pylab.semilogx(zs, mass, '-xb')\n",
-      "pylab.xlabel(\"Redshift\")\n",
-      "pylab.ylabel(\"Mass in halos / Total mass\")\n",
-      "pylab.xlim(max(zs), min(zs))\n",
-      "pylab.ylim(-0.01, .18)"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "## Data Objects\n",
-      "\n",
-      "Time series data have many applications, but most of them rely on examining the underlying data in some way.  Below, we'll see how to use and manipulate data objects.\n",
-      "\n",
-      "### Ray Queries\n",
-      "\n",
-      "yt provides the ability to examine rays, or lines, through the domain.  Note that these are not periodic, unlike most other data objects.  We create a ray object and can then examine quantities of it.  Rays have the special fields `t` and `dts`, which correspond to the time the ray enters a given cell and the distance it travels through that cell.\n",
-      "\n",
-      "To create a ray, we specify the start and end points.\n",
-      "\n",
-      "Note that we need to convert these arrays to numpy arrays due to a bug in matplotlib 1.3.1."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "ray = ds.ray([0.1, 0.2, 0.3], [0.9, 0.8, 0.7])\n",
-      "pylab.semilogy(np.array(ray[\"t\"]), np.array(ray[\"density\"]))"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "print ray[\"dts\"]"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "print ray[\"t\"]"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "print ray[\"x\"]"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "### Slice Queries\n",
-      "\n",
-      "While slices are often used for visualization, they can be useful for other operations as well.  yt regards slices as multi-resolution objects.  They are an array of cells that are not all the same size; it only returns the cells at the highest resolution that it intersects.  (This is true for all yt data objects.)  Slices and projections have the special fields `px`, `py`, `pdx` and `pdy`, which correspond to the coordinates and half-widths in the pixel plane."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "ds = yt.load(\"IsolatedGalaxy/galaxy0030/galaxy0030\")\n",
-      "v, c = ds.find_max(\"density\")\n",
-      "sl = ds.slice(0, c[0])\n",
-      "print sl[\"index\", \"x\"]\n",
-      "print sl[\"index\", \"z\"]\n",
-      "print sl[\"pdx\"]\n",
-      "print sl[\"gas\", \"density\"].shape"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "If we want to do something interesting with a `Slice`, we can turn it into a `FixedResolutionBuffer`.  This object can be queried and will return a 2D array of values."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "frb = sl.to_frb((50.0, 'kpc'), 1024)\n",
-      "print frb[\"gas\", \"density\"].shape"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "yt provides a few functions for writing arrays to disk, particularly in image form.  Here we'll write out the log of `density`, and then use IPython to display it back here.  Note that for the most part, you will probably want to use a `PlotWindow` for this, but in the case that it is useful you can directly manipulate the data."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "yt.write_image(np.log10(frb[\"gas\", \"density\"]), \"temp.png\")\n",
-      "from IPython.display import Image\n",
-      "Image(filename = \"temp.png\")"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "### Off-Axis Slices\n",
-      "\n",
-      "yt provides not only slices, but off-axis slices that are sometimes called \"cutting planes.\"  These are specified by (in order) a normal vector and a center.  Here we've set the normal vector to `[0.2, 0.3, 0.5]` and the center to be the point of maximum density.\n",
-      "\n",
-      "We can then turn these directly into plot windows using `to_pw`.  Note that the `to_pw` and `to_frb` methods are available on slices, off-axis slices, and projections, and can be used on any of them."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "cp = ds.cutting([0.2, 0.3, 0.5], \"max\")\n",
-      "pw = cp.to_pw(fields = [(\"gas\", \"density\")])"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "Once we have our plot window from our cutting plane, we can show it here."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "pw.show()"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "We can, as noted above, do the same with our slice:"
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "pws = sl.to_pw(fields=[\"density\"])\n",
-      "#pws.show()\n",
-      "print pws.plots.keys()"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "### Covering Grids\n",
-      "\n",
-      "If we want to access a 3D array of data that spans multiple resolutions in our simulation, we can use a covering grid.  This will return a 3D array of data, drawing from up to the resolution level specified when creating the data.  For example, if you create a covering grid that spans two child grids of a single parent grid, it will fill those zones covered by a zone of a child grid with the data from that child grid.  Where it is covered only by the parent grid, the cells from the parent grid will be duplicated (appropriately) to fill the covering grid.\n",
-      "\n",
-      "There are two different types of covering grids: unsmoothed and smoothed.  Smoothed grids will be filled through a cascading interpolation process; they will be filled at level 0, interpolated to level 1, filled at level 1, interpolated to level 2, filled at level 2, etc.  This will help to reduce edge effects.  Unsmoothed covering grids will not be interpolated, but rather values will be duplicated multiple times.\n",
-      "\n",
-      "Here we create an unsmoothed covering grid at level 2, with the left edge at `[0.0, 0.0, 0.0]` and with dimensions equal to those that would cover the entire domain at level 2.  We can then ask for the Density field, which will be a 3D array."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "cg = ds.covering_grid(2, [0.0, 0.0, 0.0], ds.domain_dimensions * 2**2)\n",
-      "print cg[\"density\"].shape"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "In this example, we do exactly the same thing: except we ask for a *smoothed* covering grid, which will reduce edge effects."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "scg = ds.smoothed_covering_grid(2, [0.0, 0.0, 0.0], ds.domain_dimensions * 2**2)\n",
-      "print scg[\"density\"].shape"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    }
-   ],
-   "metadata": {}
-  }
- ]
-}
\ No newline at end of file

diff -r 7ea8927bd64826419637dd928cf17c83ca34682f -r bbb89c51375248e2a98197e62d40c13a335e7db4 doc/source/bootcamp/5)_Derived_Fields_and_Profiles.ipynb
--- a/doc/source/bootcamp/5)_Derived_Fields_and_Profiles.ipynb
+++ /dev/null
@@ -1,254 +0,0 @@
-{
- "metadata": {
-  "name": "",
-  "signature": "sha256:eca573e749829cacda0a8c07c6d5d11d07a5de657563a44b8c4ffff8f735caed"
- },
- "nbformat": 3,
- "nbformat_minor": 0,
- "worksheets": [
-  {
-   "cells": [
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "# Derived Fields and Profiles\n",
-      "\n",
-      "One of the most powerful features in yt is the ability to create derived fields that act and look exactly like fields that exist on disk.  This means that they will be generated on demand and can be used anywhere a field that exists on disk would be used.  Additionally, you can create them by just writing python functions."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "%matplotlib inline\n",
-      "import yt\n",
-      "import numpy as np\n",
-      "from yt import derived_field\n",
-      "from matplotlib import pylab"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "## Derived Fields\n",
-      "\n",
-      "This is an example of the simplest possible way to create a derived field.  All derived fields are defined by a function and some metadata; that metadata can include units, LaTeX-friendly names, conversion factors, and so on.  Fields can be defined in the way in the next cell.  What this does is create a function which accepts two arguments and then provide the units for that field.  In this case, our field is `dinosaurs` and our units are `K*cm/s`.  The function itself can access any fields that are in the simulation, and it does so by requesting data from the object called `data`."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "@derived_field(name = \"dinosaurs\", units = \"K * cm/s\")\n",
-      "def _dinos(field, data):\n",
-      "    return data[\"temperature\"] * data[\"velocity_magnitude\"]"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "One important thing to note is that derived fields must be defined *before* any datasets are loaded.  Let's load up our data and take a look at some quantities."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "ds = yt.load(\"IsolatedGalaxy/galaxy0030/galaxy0030\")\n",
-      "dd = ds.all_data()\n",
-      "print dd.quantities.keys()"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "One interesting question is, what are the minimum and maximum values of dinosaur production rates in our isolated galaxy?  We can do that by examining the `extrema` quantity -- the exact same way that we would for density, temperature, and so on."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "print dd.quantities.extrema(\"dinosaurs\")"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "We can do the same for the average quantities as well."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "print dd.quantities.weighted_average_quantity(\"dinosaurs\", weight=\"temperature\")"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "## A Few Other Quantities\n",
-      "\n",
-      "We can ask other quantities of our data, as well.  For instance, this sequence of operations will find the most dense point, center a sphere on it, calculate the bulk velocity of that sphere, calculate the baryonic angular momentum vector, and then the density extrema.  All of this is done in a memory conservative way: if you have an absolutely enormous dataset, yt will split that dataset into pieces, apply intermediate reductions and then a final reduction to calculate your quantity."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "sp = ds.sphere(\"max\", (10.0, 'kpc'))\n",
-      "bv = sp.quantities.bulk_velocity()\n",
-      "L = sp.quantities.angular_momentum_vector()\n",
-      "rho_min, rho_max = sp.quantities.extrema(\"density\")\n",
-      "print bv\n",
-      "print L\n",
-      "print rho_min, rho_max"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "## Profiles\n",
-      "\n",
-      "yt provides the ability to bin in 1, 2 and 3 dimensions.  This means discretizing in one or more dimensions of phase space (density, temperature, etc) and then calculating either the total value of a field in each bin or the average value of a field in each bin.\n",
-      "\n",
-      "We do this using the objects `Profile1D`, `Profile2D`, and `Profile3D`.  The first two are the most common since they are the easiest to visualize.\n",
-      "\n",
-      "This first set of commands manually creates a profile object the sphere we created earlier, binned in 32 bins according to density between `rho_min` and `rho_max`, and then takes the density-weighted average of the fields `temperature` and (previously-defined) `dinosaurs`.  We then plot it in a loglog plot."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "prof = yt.Profile1D(sp, \"density\", 32, rho_min, rho_max, True, weight_field=\"cell_mass\")\n",
-      "prof.add_fields([\"temperature\",\"dinosaurs\"])\n",
-      "pylab.loglog(np.array(prof.x), np.array(prof[\"temperature\"]), \"-x\")\n",
-      "pylab.xlabel('Density $(g/cm^3)$')\n",
-      "pylab.ylabel('Temperature $(K)$')"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "Now we plot the `dinosaurs` field."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "pylab.loglog(np.array(prof.x), np.array(prof[\"dinosaurs\"]), '-x')\n",
-      "pylab.xlabel('Density $(g/cm^3)$')\n",
-      "pylab.ylabel('Dinosaurs $(K cm / s)$')"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "If we want to see the total mass in every bin, we profile the `cell_mass` field with no weight.  Specifying `weight=None` will simply take the total value in every bin and add that up."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "prof = yt.Profile1D(sp, \"density\", 32, rho_min, rho_max, True, weight_field=None)\n",
-      "prof.add_fields([\"cell_mass\"])\n",
-      "pylab.loglog(np.array(prof.x), np.array(prof[\"cell_mass\"].in_units(\"Msun\")), '-x')\n",
-      "pylab.xlabel('Density $(g/cm^3)$')\n",
-      "pylab.ylabel('Cell mass $(M_\\odot)$')"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "In addition to the low-level `ProfileND` interface, it's also quite straightforward to quickly create plots of profiles using the `ProfilePlot` class.  Let's redo the last plot using `ProfilePlot`"
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "prof = yt.ProfilePlot(sp, 'density', 'cell_mass', weight_field=None)\n",
-      "prof.set_unit('cell_mass', 'Msun')\n",
-      "prof.show()"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "## Field Parameters\n",
-      "\n",
-      "Field parameters are a method of passing information to derived fields.  For instance, you might pass in information about a vector you want to use as a basis for a coordinate transformation.  yt often uses things like `bulk_velocity` to identify velocities that should be subtracted off.  Here we show how that works:"
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "sp_small = ds.sphere(\"max\", (50.0, 'kpc'))\n",
-      "bv = sp_small.quantities.bulk_velocity()\n",
-      "\n",
-      "sp = ds.sphere(\"max\", (0.1, 'Mpc'))\n",
-      "rv1 = sp.quantities.extrema(\"radial_velocity\")\n",
-      "\n",
-      "sp.clear_data()\n",
-      "sp.set_field_parameter(\"bulk_velocity\", bv)\n",
-      "rv2 = sp.quantities.extrema(\"radial_velocity\")\n",
-      "\n",
-      "print bv\n",
-      "print rv1\n",
-      "print rv2"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    }
-   ],
-   "metadata": {}
-  }
- ]
-}

diff -r 7ea8927bd64826419637dd928cf17c83ca34682f -r bbb89c51375248e2a98197e62d40c13a335e7db4 doc/source/bootcamp/6)_Volume_Rendering.ipynb
--- a/doc/source/bootcamp/6)_Volume_Rendering.ipynb
+++ /dev/null
@@ -1,96 +0,0 @@
-{
- "metadata": {
-  "name": "",
-  "signature": "sha256:2a24bbe82955f9d948b39cbd1b1302968ff57f62f73afb2c7a5c4953393d00ae"
- },
- "nbformat": 3,
- "nbformat_minor": 0,
- "worksheets": [
-  {
-   "cells": [
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "# A Brief Demo of Volume Rendering\n",
-      "\n",
-      "This shows a small amount of volume rendering.  Really, just enough to get your feet wet!"
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "import yt\n",
-      "ds = yt.load(\"IsolatedGalaxy/galaxy0030/galaxy0030\")"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "To create a volume rendering, we need a camera and a transfer function.  We'll use the `ColorTransferFunction`, which accepts (in log space) the minimum and maximum bounds of our transfer function.  This means behavior for data outside these values is undefined.\n",
-      "\n",
-      "We then add on \"layers\" like an onion.  This function can accept a width (here specified) in data units, and also a color map.  Here we add on four layers.\n",
-      "\n",
-      "Finally, we create a camera.  The focal point is `[0.5, 0.5, 0.5]`, the width is 20 kpc (including front-to-back integration) and we specify a transfer function.  Once we've done that, we call `show` to actually cast our rays and display them inline."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "tf = yt.ColorTransferFunction((-28, -24))\n",
-      "tf.add_layers(4, w=0.01)\n",
-      "cam = ds.camera([0.5, 0.5, 0.5], [1.0, 1.0, 1.0], (20, 'kpc'), 512, tf, fields=[\"density\"])\n",
-      "cam.show()"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "If we want to apply a clipping, we can specify the `clip_ratio`.  This will clip the upper bounds to this value times the standard deviation of the values in the image array."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "cam.show(clip_ratio=4)"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
-      "There are several other options we can specify.  Note that here we have turned on the use of ghost zones, shortened the data interval for the transfer function, and widened our gaussian layers."
-     ]
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "tf = yt.ColorTransferFunction((-28, -25))\n",
-      "tf.add_layers(4, w=0.03)\n",
-      "cam = ds.camera([0.5, 0.5, 0.5], [1.0, 1.0, 1.0], (20.0, 'kpc'), 512, tf, no_ghost=False)\n",
-      "cam.show(clip_ratio=4.0)"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    }
-   ],
-   "metadata": {}
-  }
- ]
-}
\ No newline at end of file

diff -r 7ea8927bd64826419637dd928cf17c83ca34682f -r bbb89c51375248e2a98197e62d40c13a335e7db4 doc/source/bootcamp/data_inspection.rst
--- a/doc/source/bootcamp/data_inspection.rst
+++ /dev/null
@@ -1,6 +0,0 @@
-.. _data_inspection:
-
-Data Inspection
----------------
-
-.. notebook:: 2)_Data_Inspection.ipynb

diff -r 7ea8927bd64826419637dd928cf17c83ca34682f -r bbb89c51375248e2a98197e62d40c13a335e7db4 doc/source/bootcamp/data_objects_and_time_series.rst
--- a/doc/source/bootcamp/data_objects_and_time_series.rst
+++ /dev/null
@@ -1,4 +0,0 @@
-Data Objects and Time Series
-----------------------------
-
-.. notebook:: 4)_Data_Objects_and_Time_Series.ipynb

diff -r 7ea8927bd64826419637dd928cf17c83ca34682f -r bbb89c51375248e2a98197e62d40c13a335e7db4 doc/source/bootcamp/derived_fields_and_profiles.rst
--- a/doc/source/bootcamp/derived_fields_and_profiles.rst
+++ /dev/null
@@ -1,4 +0,0 @@
-Derived Fields and Profiles
----------------------------
-
-.. notebook:: 5)_Derived_Fields_and_Profiles.ipynb

diff -r 7ea8927bd64826419637dd928cf17c83ca34682f -r bbb89c51375248e2a98197e62d40c13a335e7db4 doc/source/bootcamp/index.rst
--- a/doc/source/bootcamp/index.rst
+++ /dev/null
@@ -1,59 +0,0 @@
-.. _bootcamp:
-
-yt Bootcamp
-===========
-
-The bootcamp is a series of worked examples of how to use much of the
-funtionality of yt.  These are simple, short introductions to give you a taste
-of what the code can do and are not meant to be detailed walkthroughs.
-
-There are two ways in which you can go through the bootcamp: interactively and 
-non-interactively.  We recommend the interactive method, but if you're pressed 
-on time, you can non-interactively go through the linked pages below and view the 
-worked examples.
-
-To execute the bootcamp interactively, you need to download the repository and
-start the IPython notebook.  If you do not already have the yt repository, the
-easiest way to get the repository is to clone it using mercurial:
-
-.. code-block:: bash
-
-   hg clone https://bitbucket.org/yt_analysis/yt
-
-Now start the IPython notebook from within the repository:
-
-.. code-block:: bash
-
-   cd yt/doc/source/bootcamp
-   yt notebook
-
-This command will give you information about the notebook server and how to
-access it.  You will basically just pick a password (for security reasons) and then 
-redirect your web browser to point to the notebook server.
-Once you have done so, choose "Introduction" from the list of
-notebooks, which includes an introduction and information about how to download
-the sample data.
-
-.. warning:: The pre-filled out notebooks are *far* less fun than running them
-             yourselves!  Check out the repo and give it a try.
-
-Here are the notebooks, which have been filled in for inspection:
-
-.. toctree::
-   :maxdepth: 1
-
-   introduction
-   data_inspection
-   simple_visualization
-   data_objects_and_time_series
-   derived_fields_and_profiles
-   volume_rendering
-
-.. note::
-
-   The notebooks use sample datasets that are available for download at
-   http://yt-project.org/data.  See :ref:`bootcamp-introduction` for more
-   details.
-
-Let us know if you would like to contribute other example notebooks, or have
-any suggestions for how these can be improved.

diff -r 7ea8927bd64826419637dd928cf17c83ca34682f -r bbb89c51375248e2a98197e62d40c13a335e7db4 doc/source/bootcamp/introduction.rst
--- a/doc/source/bootcamp/introduction.rst
+++ /dev/null
@@ -1,6 +0,0 @@
-.. _bootcamp-introduction:
-
-Introduction
-------------
-
-.. notebook:: 1)_Introduction.ipynb

diff -r 7ea8927bd64826419637dd928cf17c83ca34682f -r bbb89c51375248e2a98197e62d40c13a335e7db4 doc/source/bootcamp/simple_visualization.rst
--- a/doc/source/bootcamp/simple_visualization.rst
+++ /dev/null
@@ -1,4 +0,0 @@
-Simple Visualization
---------------------
-
-.. notebook:: 3)_Simple_Visualization.ipynb

diff -r 7ea8927bd64826419637dd928cf17c83ca34682f -r bbb89c51375248e2a98197e62d40c13a335e7db4 doc/source/bootcamp/volume_rendering.rst
--- a/doc/source/bootcamp/volume_rendering.rst
+++ /dev/null
@@ -1,4 +0,0 @@
-Volume Rendering
-----------------
-
-.. notebook:: 6)_Volume_Rendering.ipynb

diff -r 7ea8927bd64826419637dd928cf17c83ca34682f -r bbb89c51375248e2a98197e62d40c13a335e7db4 doc/source/conf.py
--- a/doc/source/conf.py
+++ b/doc/source/conf.py
@@ -122,7 +122,7 @@
     bootswatch_theme = "readable",
     navbar_links = [
         ("How to get help", "help/index"),
-        ("Bootcamp notebooks", "bootcamp/index"),
+        ("Quickstart notebooks", "quickstart/index"),
         ("Cookbook", "cookbook/index"),
         ],
     navbar_sidebarrel = False,

diff -r 7ea8927bd64826419637dd928cf17c83ca34682f -r bbb89c51375248e2a98197e62d40c13a335e7db4 doc/source/cookbook/calculating_information.rst
--- a/doc/source/cookbook/calculating_information.rst
+++ b/doc/source/cookbook/calculating_information.rst
@@ -90,3 +90,14 @@
 See :ref:`filtering-particles` for more information.
 
 .. yt_cookbook:: particle_filter_sfr.py
+
+Making a Turbulent Kinetic Energy Power Spectrum
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+This recipe shows how to use `yt` to read data and put it on a uniform
+grid to interface with the NumPy FFT routines and create a turbulent
+kinetic energy power spectrum.  (Note: the dataset used here is of low
+resolution, so the turbulence is not very well-developed.  The spike
+at high wavenumbers is due to non-periodicity in the z-direction).
+
+.. yt_cookbook:: power_spectrum_example.py

diff -r 7ea8927bd64826419637dd928cf17c83ca34682f -r bbb89c51375248e2a98197e62d40c13a335e7db4 doc/source/cookbook/custom_colorbar_tickmarks.rst
--- a/doc/source/cookbook/custom_colorbar_tickmarks.rst
+++ b/doc/source/cookbook/custom_colorbar_tickmarks.rst
@@ -1,4 +1,4 @@
-Custom Colorabar Tickmarks
---------------------------
+Custom Colorbar Tickmarks
+-------------------------
 
 .. notebook:: custom_colorbar_tickmarks.ipynb

diff -r 7ea8927bd64826419637dd928cf17c83ca34682f -r bbb89c51375248e2a98197e62d40c13a335e7db4 doc/source/cookbook/power_spectrum_example.py
--- /dev/null
+++ b/doc/source/cookbook/power_spectrum_example.py
@@ -0,0 +1,118 @@
+import numpy as np
+import matplotlib.pyplot as plt
+import yt
+
+"""
+Make a turbulent KE power spectrum.  Since we are stratified, we use
+a rho**(1/3) scaling to the velocity to get something that would
+look Kolmogorov (if the turbulence were fully developed).
+
+Ultimately, we aim to compute:
+
+                      1  ^      ^*                                           
+     E(k) = integral  -  V(k) . V(k) dS                                      
+                      2                                                      
+ 
+             n                                               ^               
+where V = rho  U is the density-weighted velocity field, and V is the
+FFT of V.
+ 
+(Note: sometimes we normalize by 1/volume to get a spectral
+energy density spectrum).
+
+
+"""
+ 
+
+def doit(ds):
+
+    # a FFT operates on uniformly gridded data.  We'll use the yt
+    # covering grid for this.
+
+    max_level = ds.index.max_level
+
+    ref = int(np.product(ds.ref_factors[0:max_level]))
+
+    low = ds.domain_left_edge
+    dims = ds.domain_dimensions*ref
+
+    nx, ny, nz = dims
+
+    nindex_rho = 1./3.
+
+    Kk = np.zeros( (nx/2+1, ny/2+1, nz/2+1))
+
+    for vel in [("gas", "velocity_x"), ("gas", "velocity_y"), 
+                ("gas", "velocity_z")]:
+
+        Kk += 0.5*fft_comp(ds, ("gas", "density"), vel,
+                           nindex_rho, max_level, low, dims)
+
+    # wavenumbers
+    L = (ds.domain_right_edge - ds.domain_left_edge).d
+
+    kx = np.fft.rfftfreq(nx)*nx/L[0]
+    ky = np.fft.rfftfreq(ny)*ny/L[1]
+    kz = np.fft.rfftfreq(nz)*nz/L[2]
+    
+    # physical limits to the wavenumbers
+    kmin = np.min(1.0/L)
+    kmax = np.max(0.5*dims/L)
+    
+    kbins = np.arange(kmin, kmax, kmin)
+    N = len(kbins)
+
+    # bin the Fourier KE into radial kbins
+    kx3d, ky3d, kz3d = np.meshgrid(kx, ky, kz, indexing="ij")
+    k = np.sqrt(kx3d**2 + ky3d**2 + kz3d**2)
+
+    whichbin = np.digitize(k.flat, kbins)
+    ncount = np.bincount(whichbin)
+    
+    E_spectrum = np.zeros(len(ncount)-1)
+
+    for n in range(1,len(ncount)):
+        E_spectrum[n-1] = np.sum(Kk.flat[whichbin==n])
+
+    k = 0.5*(kbins[0:N-1] + kbins[1:N])
+    E_spectrum = E_spectrum[1:N]
+
+    index = np.argmax(E_spectrum)
+    kmax = k[index]
+    Emax = E_spectrum[index]
+
+    plt.loglog(k, E_spectrum)
+    plt.loglog(k, Emax*(k/kmax)**(-5./3.), ls=":", color="0.5")
+
+    plt.xlabel(r"$k$")
+    plt.ylabel(r"$E(k)dk$")
+
+    plt.savefig("spectrum.png")
+
+
+def fft_comp(ds, irho, iu, nindex_rho, level, low, delta ):
+
+    cube = ds.covering_grid(level, left_edge=low,
+                            dims=delta,
+                            fields=[irho, iu])
+
+    rho = cube[irho].d
+    u = cube[iu].d
+
+    nx, ny, nz = rho.shape
+
+    # do the FFTs -- note that since our data is real, there will be
+    # too much information here.  fftn puts the positive freq terms in
+    # the first half of the axes -- that's what we keep.  Our
+    # normalization has an '8' to account for this clipping to one
+    # octant.
+    ru = np.fft.fftn(rho**nindex_rho * u)[0:nx/2+1,0:ny/2+1,0:nz/2+1]
+    ru = 8.0*ru/(nx*ny*nz)
+
+    return np.abs(ru)**2
+
+
+if __name__ == "__main__":
+
+    ds = yt.load("maestro_xrb_lores_23437")
+    doit(ds)

diff -r 7ea8927bd64826419637dd928cf17c83ca34682f -r bbb89c51375248e2a98197e62d40c13a335e7db4 doc/source/developing/building_the_docs.rst
--- a/doc/source/developing/building_the_docs.rst
+++ b/doc/source/developing/building_the_docs.rst
@@ -28,7 +28,7 @@
 * Analyzing
 * Examining
 * Cookbook
-* Bootcamp
+* Quickstart
 * Developing
 * Reference
 * Help

diff -r 7ea8927bd64826419637dd928cf17c83ca34682f -r bbb89c51375248e2a98197e62d40c13a335e7db4 doc/source/examining/low_level_inspection.rst
--- a/doc/source/examining/low_level_inspection.rst
+++ b/doc/source/examining/low_level_inspection.rst
@@ -12,7 +12,7 @@
           based simulations.  For now, these are represented as patches, with
           the attendant properties.
 
-For a more basic introduction, see :ref:`bootcamp` and more specifically
+For a more basic introduction, see :ref:`quickstart` and more specifically
 :ref:`data_inspection`.
 
 .. _examining-grid-hierarchies:

diff -r 7ea8927bd64826419637dd928cf17c83ca34682f -r bbb89c51375248e2a98197e62d40c13a335e7db4 doc/source/index.rst
--- a/doc/source/index.rst
+++ b/doc/source/index.rst
@@ -34,7 +34,7 @@
      <tr valign="top"><td width="25%"><p>
-           <a href="bootcamp/index.html">yt Bootcamp</a>
+           <a href="quickstart/index.html">yt Quickstart</a></p></td><td width="75%">
@@ -127,7 +127,7 @@
    :hidden:
 
    installing
-   yt Bootcamp <bootcamp/index>
+   yt Quickstart <quickstart/index>
    yt3differences
    cookbook/index
    visualizing/index

diff -r 7ea8927bd64826419637dd928cf17c83ca34682f -r bbb89c51375248e2a98197e62d40c13a335e7db4 doc/source/quickstart/1)_Introduction.ipynb
--- /dev/null
+++ b/doc/source/quickstart/1)_Introduction.ipynb
@@ -0,0 +1,72 @@
+{
+ "metadata": {
+  "name": "",
+  "signature": "sha256:7c68cdd34ce71c042fa3c4badc4587693f1cc1b6aa0b3c99a4a63a1db6fe57f9"
+ },
+ "nbformat": 3,
+ "nbformat_minor": 0,
+ "worksheets": [
+  {
+   "cells": [
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "# Welcome to the yt quickstart!\n",
+      "\n",
+      "In this brief tutorial, we'll go over how to load up data, analyze things, inspect your data, and make some visualizations.\n",
+      "\n",
+      "Our documentation page can provide information on a variety of the commands that are used here, both in narrative documentation as well as recipes for specific functionality in our cookbook.  The documentation exists at http://yt-project.org/doc/.  If you encounter problems, look for help here: http://yt-project.org/doc/help/index.html.\n",
+      "\n",
+      "## Acquiring the datasets for this tutorial\n",
+      "\n",
+      "If you are executing these tutorials interactively, you need some sample datasets on which to run the code.  You can download these datasets at http://yt-project.org/data/.  The datasets necessary for each lesson are noted next to the corresponding tutorial.\n",
+      "\n",
+      "## What's Next?\n",
+      "\n",
+      "The Notebooks are meant to be explored in this order:\n",
+      "\n",
+      "1. Introduction\n",
+      "2. Data Inspection (IsolatedGalaxy dataset)\n",
+      "3. Simple Visualization (enzo_tiny_cosmology & Enzo_64 datasets)\n",
+      "4. Data Objects and Time Series (IsolatedGalaxy dataset)\n",
+      "5. Derived Fields and Profiles (IsolatedGalaxy dataset)\n",
+      "6. Volume Rendering (IsolatedGalaxy dataset)"
+     ]
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "The following code will download the data needed for this tutorial automatically using `curl`. It may take some time so please wait when the kernel is busy. You will need to set `download_datasets` to True before using it."
+     ]
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "download_datasets = False\n",
+      "if download_datasets:\n",
+      "    !curl -sSO http://yt-project.org/data/enzo_tiny_cosmology.tar\n",
+      "    print \"Got enzo_tiny_cosmology\"\n",
+      "    !tar xf enzo_tiny_cosmology.tar\n",
+      "    \n",
+      "    !curl -sSO http://yt-project.org/data/Enzo_64.tar\n",
+      "    print \"Got Enzo_64\"\n",
+      "    !tar xf Enzo_64.tar\n",
+      "    \n",
+      "    !curl -sSO http://yt-project.org/data/IsolatedGalaxy.tar\n",
+      "    print \"Got IsolatedGalaxy\"\n",
+      "    !tar xf IsolatedGalaxy.tar\n",
+      "    \n",
+      "    print \"All done!\""
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    }
+   ],
+   "metadata": {}
+  }
+ ]
+}
\ No newline at end of file

This diff is so big that we needed to truncate the remainder.

https://bitbucket.org/yt_analysis/yt/commits/c796979108a0/
Changeset:   c796979108a0
Branch:      yt
User:        drudd
Date:        2014-09-05 21:35:26
Summary:     Fixed probable bug in WindowedVariableMeshPanner
Affected #:  1 file

diff -r bbb89c51375248e2a98197e62d40c13a335e7db4 -r c796979108a0dccf744f127875cc7f64cda6d81c yt/visualization/image_panner/vm_panner.py
--- a/yt/visualization/image_panner/vm_panner.py
+++ b/yt/visualization/image_panner/vm_panner.py
@@ -215,8 +215,8 @@
         dy = (self.ylim[1] - self.ylim[0])/self.size[1]
         my_lim = (self.xlim[0] + dx*self.start_indices[0],
                   self.xlim[0] + dx*(self.start_indices[0] + self.my_size[0]),
-                  self.ylim[0] + dx*self.start_indices[1],
-                  self.ylim[0] + dx*(self.start_indices[1] + self.my_size[1]))
+                  self.ylim[0] + dy*self.start_indices[1],
+                  self.ylim[0] + dy*(self.start_indices[1] + self.my_size[1]))
         new_buffer = FixedResolutionBuffer(self.source, my_lim, self.my_size)
         self._buffer = new_buffer
 


https://bitbucket.org/yt_analysis/yt/commits/3e1834adf8a0/
Changeset:   3e1834adf8a0
Branch:      yt
User:        drudd
Date:        2014-09-05 21:37:27
Summary:     removed unused variable stack in insert_ipython
Affected #:  1 file

diff -r c796979108a0dccf744f127875cc7f64cda6d81c -r 3e1834adf8a05197902c140b85ae5e8af60636f9 yt/funcs.py
--- a/yt/funcs.py
+++ b/yt/funcs.py
@@ -270,7 +270,6 @@
 
     api_version = get_ipython_api_version()
 
-    stack = inspect.stack()
     frame = inspect.stack()[num_up]
     loc = frame[0].f_locals.copy()
     glo = frame[0].f_globals


https://bitbucket.org/yt_analysis/yt/commits/48a860428bab/
Changeset:   48a860428bab
Branch:      yt
User:        drudd
Date:        2014-09-05 21:38:47
Summary:     removed unused variable stack in get_script_contents
Affected #:  1 file

diff -r 3e1834adf8a05197902c140b85ae5e8af60636f9 -r 48a860428bab92c09b3315719384404dfafbbc04 yt/funcs.py
--- a/yt/funcs.py
+++ b/yt/funcs.py
@@ -536,7 +536,6 @@
     return version_info
 
 def get_script_contents():
-    stack = inspect.stack()
     top_frame = inspect.stack()[-1]
     finfo = inspect.getframeinfo(top_frame[0])
     if finfo[2] != "<module>": return None


https://bitbucket.org/yt_analysis/yt/commits/5fc41aff2aea/
Changeset:   5fc41aff2aea
Branch:      yt
User:        drudd
Date:        2014-09-05 21:42:10
Summary:     Ensure variable names match in setup_halo_analysis_fields
Affected #:  1 file

diff -r 48a860428bab92c09b3315719384404dfafbbc04 -r 5fc41aff2aeae97d3ab190ca5b5991f176e66843 yt/analysis_modules/halo_analysis/fields.py
--- a/yt/analysis_modules/halo_analysis/fields.py
+++ b/yt/analysis_modules/halo_analysis/fields.py
@@ -28,7 +28,7 @@
     if slice_info is None:
         sl_left = slice(None, -2, None)
         sl_right = slice(2, None, None)
-        div_fac = 2.0
+        div_face = 2.0
     else:
         sl_left, sl_right, div_face = slice_info
 


https://bitbucket.org/yt_analysis/yt/commits/db66e4acb2ff/
Changeset:   db66e4acb2ff
Branch:      yt
User:        drudd
Date:        2014-09-05 21:43:34
Summary:     Remove unused variable hds
Affected #:  1 file

diff -r 5fc41aff2aeae97d3ab190ca5b5991f176e66843 -r db66e4acb2ff8f063c0c0db3dbc00188dcd283da yt/analysis_modules/halo_analysis/halo_callbacks.py
--- a/yt/analysis_modules/halo_analysis/halo_callbacks.py
+++ b/yt/analysis_modules/halo_analysis/halo_callbacks.py
@@ -80,7 +80,6 @@
     """
 
     dds = halo.halo_catalog.data_ds
-    hds = halo.halo_catalog.halos_ds
     center = dds.arr([halo.quantities["particle_position_%s" % axis] \
                       for axis in "xyz"])
     radius = factor * halo.quantities[radius_field]


https://bitbucket.org/yt_analysis/yt/commits/a38b408b0e8d/
Changeset:   a38b408b0e8d
Branch:      yt
User:        drudd
Date:        2014-09-05 21:46:39
Summary:     Removed unused variable area_final
Affected #:  1 file

diff -r db66e4acb2ff8f063c0c0db3dbc00188dcd283da -r a38b408b0e8d5d9d03af361fc80f6b32169c1255 yt/analysis_modules/halo_mass_function/halo_mass_function.py
--- a/yt/analysis_modules/halo_mass_function/halo_mass_function.py
+++ b/yt/analysis_modules/halo_mass_function/halo_mass_function.py
@@ -832,7 +832,6 @@
     area1 = np.sum(areas)
     # Now we refine until the error is smaller than *error*.
     diff = area1 - area0
-    area_final = area1
     area_last = area1
     one_pow = 3
     while diff > error:


https://bitbucket.org/yt_analysis/yt/commits/957779f11006/
Changeset:   957779f11006
Branch:      yt
User:        drudd
Date:        2014-09-05 21:47:29
Summary:     Commented out unused variable tf_cbnu but kept in source for example
Affected #:  1 file

diff -r a38b408b0e8d5d9d03af361fc80f6b32169c1255 -r 957779f11006910c7a57922a97d8eddf4652c479 yt/analysis_modules/halo_mass_function/halo_mass_function.py
--- a/yt/analysis_modules/halo_mass_function/halo_mass_function.py
+++ b/yt/analysis_modules/halo_mass_function/halo_mass_function.py
@@ -788,7 +788,7 @@
     
         # Now compute the CDM+HDM+baryon transfer functions
         tf_cb = self.tf_master*self.growth_cb/self.growth_k0;
-        tf_cbnu = self.tf_master*self.growth_cbnu/self.growth_k0;
+        #tf_cbnu = self.tf_master*self.growth_cbnu/self.growth_k0;
         return tf_cb
 
 


https://bitbucket.org/yt_analysis/yt/commits/f66372ebf4bd/
Changeset:   f66372ebf4bd
Branch:      yt
User:        drudd
Date:        2014-09-05 21:49:58
Summary:     Removed unused variable tau1
Affected #:  1 file

diff -r 957779f11006910c7a57922a97d8eddf4652c479 -r f66372ebf4bd86b824455e07ef3943d560b16965 yt/analysis_modules/absorption_spectrum/absorption_line.py
--- a/yt/analysis_modules/absorption_spectrum/absorption_line.py
+++ b/yt/analysis_modules/absorption_spectrum/absorption_line.py
@@ -195,7 +195,6 @@
     ## tau_0
     tau_X = np.sqrt(np.pi) * e**2 / (me * ccgs) * \
         column_density * fval / vdop
-    tau1 = tau_X * lam1cgs
     tau0 = tau_X * lam0cgs
 
     # dimensionless frequency offset in units of doppler freq


https://bitbucket.org/yt_analysis/yt/commits/110f51d68d0e/
Changeset:   110f51d68d0e
Branch:      yt
User:        drudd
Date:        2014-09-05 21:53:19
Summary:     Removed unused variable total_vol
Affected #:  1 file

diff -r f66372ebf4bd86b824455e07ef3943d560b16965 -r 110f51d68d0ef6221f7fa97290f3852e5267159e yt/analysis_modules/level_sets/contour_finder.py
--- a/yt/analysis_modules/level_sets/contour_finder.py
+++ b/yt/analysis_modules/level_sets/contour_finder.py
@@ -32,7 +32,6 @@
     contours = {}
     node_ids = []
     DLE = data_source.ds.domain_left_edge
-    total_vol = None
     selector = getattr(data_source, "base_object", data_source).selector
     masks = dict((g.id, m) for g, m in data_source.blocks)
     for (g, node, (sl, dims, gi)) in data_source.tiles.slice_traverse():


https://bitbucket.org/yt_analysis/yt/commits/146936a31ea1/
Changeset:   146936a31ea1
Branch:      yt
User:        drudd
Date:        2014-09-05 21:54:48
Summary:     Fixed apparent bug in variable name pixel_area in project_light_cone
Affected #:  1 file

diff -r 110f51d68d0ef6221f7fa97290f3852e5267159e -r 146936a31ea14fe897a82794174a2070bb680e1c yt/analysis_modules/cosmological_observation/light_cone/light_cone.py
--- a/yt/analysis_modules/cosmological_observation/light_cone/light_cone.py
+++ b/yt/analysis_modules/cosmological_observation/light_cone/light_cone.py
@@ -328,7 +328,7 @@
                                                         output["redshift"])
                 proper_box_size = self.simulation.box_size / \
                   (1.0 + output["redshift"])
-                pixel_xarea = (proper_box_size.in_cgs() / pixels)**2 #in proper cm^2
+                pixel_area = (proper_box_size.in_cgs() / pixels)**2 #in proper cm^2
                 factor = pixel_area / (4.0 * np.pi * dL.in_cgs()**2)
                 mylog.info("Distance to slice = %s" % dL)
                 frb[field] *= factor #in erg/s/cm^2/Hz on observer"s image plane.


https://bitbucket.org/yt_analysis/yt/commits/71c07ed31373/
Changeset:   71c07ed31373
Branch:      yt
User:        drudd
Date:        2014-09-05 21:55:56
Summary:     Removed unused variable ndomains_finished
Affected #:  1 file

diff -r 146936a31ea14fe897a82794174a2070bb680e1c -r 71c07ed313737e2c5477cf3202a1f9fa95cc9fe4 yt/analysis_modules/sunrise_export/sunrise_exporter.py
--- a/yt/analysis_modules/sunrise_export/sunrise_exporter.py
+++ b/yt/analysis_modules/sunrise_export/sunrise_exporter.py
@@ -128,7 +128,6 @@
     if fni.endswith('.fits'):
         fni = fni.replace('.fits','')
 
-    ndomains_finished = 0
     for (num_halos, domain, halos) in domains_list:
         dle,dre = domain
         print 'exporting: '
@@ -154,7 +153,6 @@
             fh.write("%6.6e \n"%(halo.Rvir*ds['kpc']))
         fh.close()
         export_to_sunrise(ds, fnf, star_particle_type, dle*1.0/dn, dre*1.0/dn)
-        ndomains_finished +=1
 
 def domains_from_halos(ds,halo_list,frvir=0.15):
     domains = {}


https://bitbucket.org/yt_analysis/yt/commits/3eeaadac6ab0/
Changeset:   3eeaadac6ab0
Branch:      yt
User:        drudd
Date:        2014-09-05 21:57:01
Summary:     Removed unused variables domains_limits and domains_halos
Affected #:  1 file

diff -r 71c07ed313737e2c5477cf3202a1f9fa95cc9fe4 -r 3eeaadac6ab0ee4387ce4ba5c17be323db6232d7 yt/analysis_modules/sunrise_export/sunrise_exporter.py
--- a/yt/analysis_modules/sunrise_export/sunrise_exporter.py
+++ b/yt/analysis_modules/sunrise_export/sunrise_exporter.py
@@ -170,8 +170,6 @@
     domains_list = [(len(v),k,v) for k,v in domains.iteritems()]
     domains_list.sort() 
     domains_list.reverse() #we want the most populated domains first
-    domains_limits = [d[1] for d in domains_list]
-    domains_halos  = [d[2] for d in domains_list]
     return domains_list
 
 def prepare_octree(ds,ile,start_level=0,debug=True,dd=None,center=None):


https://bitbucket.org/yt_analysis/yt/commits/b226ad2570e9/
Changeset:   b226ad2570e9
Branch:      yt
User:        drudd
Date:        2014-09-05 21:58:34
Summary:     Removed unused variable c
Affected #:  1 file

diff -r 3eeaadac6ab0ee4387ce4ba5c17be323db6232d7 -r b226ad2570e919b45bc5f2b57c2cc3d50f741cdd yt/analysis_modules/sunrise_export/sunrise_exporter.py
--- a/yt/analysis_modules/sunrise_export/sunrise_exporter.py
+++ b/yt/analysis_modules/sunrise_export/sunrise_exporter.py
@@ -241,10 +241,6 @@
     hs       = hilbert_state()
     start_time = time.time()
     if debug:
-        if center is not None: 
-            c = center*ds['kpc']
-        else:
-            c = ile*1.0/ds.domain_dimensions*ds['kpc']
         printing = lambda x: print_oct(x)
     else:
         printing = None


https://bitbucket.org/yt_analysis/yt/commits/5ac2ee90902b/
Changeset:   5ac2ee90902b
Branch:      yt
User:        drudd
Date:        2014-09-05 21:59:34
Summary:     Removed unnecessary enumerate
Affected #:  1 file

diff -r b226ad2570e919b45bc5f2b57c2cc3d50f741cdd -r 5ac2ee90902be6db1aabfc21e7400dac7d92654f yt/analysis_modules/sunrise_export/sunrise_exporter.py
--- a/yt/analysis_modules/sunrise_export/sunrise_exporter.py
+++ b/yt/analysis_modules/sunrise_export/sunrise_exporter.py
@@ -324,7 +324,7 @@
         #then translate onto the subgrid integer index 
         parent_fle  = grid.left_edges + cell_index*grid.dx
         subgrid_ile = np.floor((parent_fle - subgrid.left_edges)/subgrid.dx)
-        for i, (vertex,hilbert_child) in enumerate(hilbert):
+        for (vertex, hilbert_child) in hilbert:
             #vertex is a combination of three 0s and 1s to 
             #denote each of the 8 octs
             if level < 0:


https://bitbucket.org/yt_analysis/yt/commits/ba64592a4b5e/
Changeset:   ba64592a4b5e
Branch:      yt
User:        drudd
Date:        2014-09-05 22:01:03
Summary:     Removed unused variable dl
Affected #:  1 file

diff -r 5ac2ee90902be6db1aabfc21e7400dac7d92654f -r ba64592a4b5ee38ff51a27d81da0cefce8fa382b yt/analysis_modules/sunyaev_zeldovich/tests/test_projection.py
--- a/yt/analysis_modules/sunyaev_zeldovich/tests/test_projection.py
+++ b/yt/analysis_modules/sunyaev_zeldovich/tests/test_projection.py
@@ -89,8 +89,6 @@
     L = 2 * R * cm_per_kpc
     bbox = np.array([[-0.5,0.5],[-0.5,0.5],[-0.5,0.5]]) * L
 
-    dl = L/nz
-
     ds = load_uniform_grid(data, ddims, length_unit='cm', bbox=bbox)
     ds.index
 


https://bitbucket.org/yt_analysis/yt/commits/edc22aacc0df/
Changeset:   edc22aacc0df
Branch:      yt
User:        drudd
Date:        2014-09-05 22:03:07
Summary:     Removed unused variables ncells and cell_vol
Affected #:  1 file

diff -r ba64592a4b5ee38ff51a27d81da0cefce8fa382b -r edc22aacc0dfd3fa62082f8c4fbde302c8b93385 yt/analysis_modules/photon_simulator/photon_models.py
--- a/yt/analysis_modules/photon_simulator/photon_models.py
+++ b/yt/analysis_modules/photon_simulator/photon_models.py
@@ -128,7 +128,6 @@
         energy = self.spectral_model.ebins
     
         cell_em = EM[idxs]*vol_scale
-        cell_vol = vol[idxs]*vol_scale
     
         number_of_photons = np.zeros(dshape, dtype='uint64')
         energies = []
@@ -139,7 +138,6 @@
 
         for i, ikT in enumerate(kT_idxs):
 
-            ncells = int(bcounts[i])
             ibegin = bcell[i]
             iend = ecell[i]
             kT = kT_bins[ikT] + 0.5*dkT


https://bitbucket.org/yt_analysis/yt/commits/6b88d9afe851/
Changeset:   6b88d9afe851
Branch:      yt
User:        drudd
Date:        2014-09-05 22:03:49
Summary:     Removed unused variables num_cells
Affected #:  1 file

diff -r edc22aacc0dfd3fa62082f8c4fbde302c8b93385 -r 6b88d9afe8513b095cf3387622d522332773bb79 yt/analysis_modules/photon_simulator/photon_simulator.py
--- a/yt/analysis_modules/photon_simulator/photon_simulator.py
+++ b/yt/analysis_modules/photon_simulator/photon_simulator.py
@@ -490,7 +490,6 @@
         z_hat = orient.unit_vectors[2]
 
         n_ph = self.photons["NumberOfPhotons"]
-        num_cells = len(n_ph)
         n_ph_tot = n_ph.sum()
         
         eff_area = None


https://bitbucket.org/yt_analysis/yt/commits/bffd5536eb07/
Changeset:   bffd5536eb07
Branch:      yt
User:        drudd
Date:        2014-09-05 22:03:58
Summary:     Removed unused variables pindex
Affected #:  1 file

diff -r 6b88d9afe8513b095cf3387622d522332773bb79 -r bffd5536eb07a2d8ff8b229c6c346f4f951eb958 yt/analysis_modules/photon_simulator/photon_simulator.py
--- a/yt/analysis_modules/photon_simulator/photon_simulator.py
+++ b/yt/analysis_modules/photon_simulator/photon_simulator.py
@@ -681,7 +681,6 @@
         phYY = events["ypix"][eidxs]
 
         detectedChannels = []
-        pindex = 0
 
         # run through all photon energies and find which bin they go in
         k = 0


https://bitbucket.org/yt_analysis/yt/commits/1057ee84fd9f/
Changeset:   1057ee84fd9f
Branch:      yt
User:        drudd
Date:        2014-09-05 22:05:20
Summary:     Removed unused variables de
Affected #:  1 file

diff -r bffd5536eb07a2d8ff8b229c6c346f4f951eb958 -r 1057ee84fd9f5ed7471f5310e23e5596666effa4 yt/analysis_modules/photon_simulator/photon_simulator.py
--- a/yt/analysis_modules/photon_simulator/photon_simulator.py
+++ b/yt/analysis_modules/photon_simulator/photon_simulator.py
@@ -666,7 +666,6 @@
         tblhdu = hdulist["MATRIX"]
         n_de = len(tblhdu.data["ENERG_LO"])
         mylog.info("Number of energy bins in RMF: %d" % (n_de))
-        de = tblhdu.data["ENERG_HI"] - tblhdu.data["ENERG_LO"]
         mylog.info("Energy limits: %g %g" % (min(tblhdu.data["ENERG_LO"]),
                                              max(tblhdu.data["ENERG_HI"])))
 


https://bitbucket.org/yt_analysis/yt/commits/43ed9c8c1004/
Changeset:   43ed9c8c1004
Branch:      yt
User:        drudd
Date:        2014-09-05 22:26:41
Summary:     Removed unused import coordinates_from_header
Affected #:  1 file

diff -r 1057ee84fd9f5ed7471f5310e23e5596666effa4 -r 43ed9c8c100470734fffc28ecbb5d0e4b2fb1f99 yt/data_objects/data_containers.py
--- a/yt/data_objects/data_containers.py
+++ b/yt/data_objects/data_containers.py
@@ -418,7 +418,6 @@
         otherwise Glue will be started.
         """
         from glue.core import DataCollection, Data
-        from glue.core.coordinates import coordinates_from_header
         from glue.qt.glue_application import GlueApplication
         
         gdata = Data(label=label)


https://bitbucket.org/yt_analysis/yt/commits/6d59438ab0ea/
Changeset:   6d59438ab0ea
Branch:      yt
User:        drudd
Date:        2014-09-05 22:41:10
Summary:     Fixed vector field vector_absolute
Affected #:  1 file

diff -r 43ed9c8c100470734fffc28ecbb5d0e4b2fb1f99 -r 6d59438ab0eaf7974b17401856a1327a88a07eb0 yt/fields/vector_operations.py
--- a/yt/fields/vector_operations.py
+++ b/yt/fields/vector_operations.py
@@ -131,7 +131,7 @@
     registry.add_field((ftype, "radial_%s" % basename),
                        function = _radial, units = field_units)
     registry.add_field((ftype, "radial_%s_absolute" % basename),
-                       function = _radial, units = field_units)
+                       function = _radial_absolute, units = field_units)
     registry.add_field((ftype, "tangential_%s" % basename),
                        function=_tangential, units = field_units)
 


https://bitbucket.org/yt_analysis/yt/commits/52e04e462741/
Changeset:   52e04e462741
Branch:      yt
User:        drudd
Date:        2014-09-05 22:58:27
Summary:     Fixed variable naming convention to div_fac
Affected #:  1 file

diff -r 6d59438ab0eaf7974b17401856a1327a88a07eb0 -r 52e04e4627414db75441891e63a0c27e34b47f0f yt/analysis_modules/halo_analysis/fields.py
--- a/yt/analysis_modules/halo_analysis/fields.py
+++ b/yt/analysis_modules/halo_analysis/fields.py
@@ -28,9 +28,9 @@
     if slice_info is None:
         sl_left = slice(None, -2, None)
         sl_right = slice(2, None, None)
-        div_face = 2.0
+        div_fac = 2.0
     else:
-        sl_left, sl_right, div_face = slice_info
+        sl_left, sl_right, div_fac = slice_info
 
     def _virial_radius(field, data):
         virial_radius = data.get_field_parameter("virial_radius")


https://bitbucket.org/yt_analysis/yt/commits/873acca93e9e/
Changeset:   873acca93e9e
Branch:      yt
User:        drudd
Date:        2014-09-05 23:02:01
Summary:     remove unused function _particle_angular_momentum
Affected #:  1 file

diff -r 52e04e4627414db75441891e63a0c27e34b47f0f -r 873acca93e9eda5cb5de9e3ed445a648d8bf3c17 yt/fields/particle_fields.py
--- a/yt/fields/particle_fields.py
+++ b/yt/fields/particle_fields.py
@@ -322,10 +322,6 @@
     create_magnitude_field(registry, "particle_specific_angular_momentum",
                            "cm**2/s", ftype=ptype, particle_type=True)
     
-    def _particle_angular_momentum(field, data):
-        return data[ptype, "particle_mass"] \
-             * data[ptype, "particle_specific_angular_momentum"]
-
     def _particle_angular_momentum_x(field, data):
         return data[ptype, "particle_mass"] * \
                data[ptype, "particle_specific_angular_momentum_x"]


https://bitbucket.org/yt_analysis/yt/commits/c92c03b6370a/
Changeset:   c92c03b6370a
Branch:      yt
User:        drudd
Date:        2014-09-05 23:08:22
Summary:     decompose_array is actually just a wrapper around split_array
Affected #:  1 file

diff -r 873acca93e9eda5cb5de9e3ed445a648d8bf3c17 -r c92c03b6370ae152a82f3c2df66201e1a5acff2c yt/utilities/decompose.py
--- a/yt/utilities/decompose.py
+++ b/yt/utilities/decompose.py
@@ -34,13 +34,7 @@
     """ Calculate list of product(psize) subarrays of arr, along with their
         left and right edges
     """
-    grid_left_edges = np.empty([np.product(psize), 3], dtype=np.float64)
-    grid_right_edges = np.empty([np.product(psize), 3], dtype=np.float64)
-    n_d = shape
-    d_s = (bbox[:, 1] - bbox[:, 0]) / n_d
-    grid_left_edges, grid_right_edges, shapes, slices = \
-            split_array(bbox[:, 0], bbox[:, 1], shape, psize)
-    return grid_left_edges, grid_right_edges, shapes, slices
+    return split_array(bbox[:, 0], bbox[:, 1], shape, psize)
 
 
 def evaluate_domain_decomposition(n_d, pieces, ldom):


https://bitbucket.org/yt_analysis/yt/commits/5b1b7f4b11c4/
Changeset:   5b1b7f4b11c4
Branch:      yt
User:        drudd
Date:        2014-09-05 23:08:58
Summary:     Removed unused variable sp_list
Affected #:  1 file

diff -r c92c03b6370ae152a82f3c2df66201e1a5acff2c -r 5b1b7f4b11c46e6dc885c814b6c38cb191240874 yt/utilities/linear_interpolators.py
--- a/yt/utilities/linear_interpolators.py
+++ b/yt/utilities/linear_interpolators.py
@@ -240,7 +240,6 @@
     Return an iterator over EnzoSphere objects generated from the appropriate 
     columns in *filename*.  Optionally specify the *unit* radius is in.
     """
-    sp_list = []
     for line in open(filename):
         if line.startswith("#"): continue
         vals = line.split()


https://bitbucket.org/yt_analysis/yt/commits/503173139c9a/
Changeset:   503173139c9a
Branch:      yt
User:        drudd
Date:        2014-09-05 23:10:27
Summary:     Removed unused variable grid_ends
Affected #:  1 file

diff -r 5b1b7f4b11c46e6dc885c814b6c38cb191240874 -r 503173139c9ada60253724b63f0e7cd41adfe647 yt/utilities/flagging_methods.py
--- a/yt/utilities/flagging_methods.py
+++ b/yt/utilities/flagging_methods.py
@@ -147,7 +147,6 @@
         for dim in range(3):
             sig = self.sigs[dim]
             sd = sig[:-2] - 2.0*sig[1:-1] + sig[2:]
-            grid_ends = np.zeros((sig.size, 2))
             ng = 0
             center = int((self.flagged.shape[dim] - 1) / 2)
             strength = zero_strength = 0


https://bitbucket.org/yt_analysis/yt/commits/62af92929989/
Changeset:   62af92929989
Branch:      yt
User:        drudd
Date:        2014-09-05 23:11:16
Summary:     Removed unused variable ng
Affected #:  1 file

diff -r 503173139c9ada60253724b63f0e7cd41adfe647 -r 62af92929989cf76740b3c7b39fbe25cb1d71818 yt/utilities/flagging_methods.py
--- a/yt/utilities/flagging_methods.py
+++ b/yt/utilities/flagging_methods.py
@@ -147,7 +147,6 @@
         for dim in range(3):
             sig = self.sigs[dim]
             sd = sig[:-2] - 2.0*sig[1:-1] + sig[2:]
-            ng = 0
             center = int((self.flagged.shape[dim] - 1) / 2)
             strength = zero_strength = 0
             for i in range(1, sig.size-2):


https://bitbucket.org/yt_analysis/yt/commits/3e73098ac08e/
Changeset:   3e73098ac08e
Branch:      yt
User:        drudd
Date:        2014-09-05 23:12:32
Summary:     Removed unused ftell call in skip
Affected #:  1 file

diff -r 62af92929989cf76740b3c7b39fbe25cb1d71818 -r 3e73098ac08edc3680702edc61ec6eab6c3d85ba yt/utilities/fortran_utils.py
--- a/yt/utilities/fortran_utils.py
+++ b/yt/utilities/fortran_utils.py
@@ -208,7 +208,6 @@
     >>> skip(f, 3)
     """
     skipped = []
-    pos = f.tell()
     for i in range(n):
         fmt = endian+"I"
         size = f.read(struct.calcsize(fmt))


https://bitbucket.org/yt_analysis/yt/commits/be9014a5fa15/
Changeset:   be9014a5fa15
Branch:      yt
User:        drudd
Date:        2014-09-05 23:14:01
Summary:     Removed unused variable chunks
Affected #:  1 file

diff -r 3e73098ac08edc3680702edc61ec6eab6c3d85ba -r be9014a5fa15c2f29af14464196fe501ea165025 yt/utilities/minimal_representation.py
--- a/yt/utilities/minimal_representation.py
+++ b/yt/utilities/minimal_representation.py
@@ -207,7 +207,6 @@
 
     def _generate_post(self):
         metadata = self._attrs
-        chunks = []
         return (metadata, ("chunks", []))
 
 class MinimalNotebook(MinimalRepresentation):


https://bitbucket.org/yt_analysis/yt/commits/c1bacbd14b6f/
Changeset:   c1bacbd14b6f
Branch:      yt
User:        drudd
Date:        2014-09-05 23:15:24
Summary:     Added missing variable data from global statement
Affected #:  1 file

diff -r be9014a5fa15c2f29af14464196fe501ea165025 -r c1bacbd14b6f7e2e710617331824ec2927eac634 yt/utilities/lib/tests/test_alt_ray_tracers.py
--- a/yt/utilities/lib/tests/test_alt_ray_tracers.py
+++ b/yt/utilities/lib/tests/test_alt_ray_tracers.py
@@ -14,7 +14,7 @@
 
 def setup():
     # set up some sample cylindrical grid data, radiating out from center
-    global left_grid, right_grid, amr_levels, center_grid
+    global left_grid, right_grid, amr_levels, center_grid, data
     np.seterr(all='ignore')
     l1, r1, lvl1 = amrspace([0.0, 1.0, 0.0, -1.0, 0.0, 2*np.pi], levels=(7,7,0))
     l2, r2, lvl2 = amrspace([0.0, 1.0, 0.0,  1.0, 0.0, 2*np.pi], levels=(7,7,0))


https://bitbucket.org/yt_analysis/yt/commits/b0aa0b325b53/
Changeset:   b0aa0b325b53
Branch:      yt
User:        drudd
Date:        2014-09-05 23:17:08
Summary:     Removed unnecessary enumerate in run_all_tests
Affected #:  1 file

diff -r c1bacbd14b6f7e2e710617331824ec2927eac634 -r b0aa0b325b53f01677b40b20bec8505657c04ed9 yt/utilities/answer_testing/runner.py
--- a/yt/utilities/answer_testing/runner.py
+++ b/yt/utilities/answer_testing/runner.py
@@ -90,7 +90,7 @@
 
     def run_all_tests(self):
         plot_list = []
-        for i,name in enumerate(sorted(test_registry)):
+        for name in sorted(test_registry):
             self.run_test(name)
         return self.passed_tests
 


https://bitbucket.org/yt_analysis/yt/commits/9209535f5452/
Changeset:   9209535f5452
Branch:      yt
User:        drudd
Date:        2014-09-05 23:18:16
Summary:     Removed unused list variables plot_list
Affected #:  1 file

diff -r b0aa0b325b53f01677b40b20bec8505657c04ed9 -r 9209535f545230bd1a014f1afd6e15edf25b259c yt/utilities/answer_testing/runner.py
--- a/yt/utilities/answer_testing/runner.py
+++ b/yt/utilities/answer_testing/runner.py
@@ -89,7 +89,6 @@
         self.plot_tests = plot_tests
 
     def run_all_tests(self):
-        plot_list = []
         for name in sorted(test_registry):
             self.run_test(name)
         return self.passed_tests
@@ -98,7 +97,6 @@
         # We'll also need to call the "compare" operation,
         # but for that we'll need a data store.
         test = test_registry[name]
-        plot_list = []
         if test.output_type == 'single':
             mot = MultipleOutputTest(self.io_log)
             for i,fn in enumerate(mot):


https://bitbucket.org/yt_analysis/yt/commits/d1ec974e4004/
Changeset:   d1ec974e4004
Branch:      yt
User:        drudd
Date:        2014-09-05 23:21:00
Summary:     Correct typo in ParentageRelationshipsTest compare
Affected #:  1 file

diff -r 9209535f545230bd1a014f1afd6e15edf25b259c -r d1ec974e40042ec810dac77d4a7c96cefada542d yt/utilities/answer_testing/framework.py
--- a/yt/utilities/answer_testing/framework.py
+++ b/yt/utilities/answer_testing/framework.py
@@ -574,7 +574,7 @@
         for newp, oldp in zip(new_result["parents"], old_result["parents"]):
             assert(newp == oldp)
         for newc, oldc in zip(new_result["children"], old_result["children"]):
-            assert(newp == oldp)
+            assert(newc == oldc)
 
 class SimulatedHaloMassFunctionTest(AnswerTestingTest):
     _type_name = "SimulatedHaloMassFunction"


https://bitbucket.org/yt_analysis/yt/commits/64220b4b012a/
Changeset:   64220b4b012a
Branch:      yt
User:        drudd
Date:        2014-09-05 23:24:07
Summary:     Commented out includes that are only needed by commented code
Affected #:  1 file

diff -r d1ec974e40042ec810dac77d4a7c96cefada542d -r 64220b4b012a5d5c033fc4ee91935bab227da68c yt/utilities/spatial/setup.py
--- a/yt/utilities/spatial/setup.py
+++ b/yt/utilities/spatial/setup.py
@@ -4,8 +4,8 @@
 
 def configuration(parent_package='', top_path=None):
     from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs
-    from numpy.distutils.system_info import get_info
-    from distutils.sysconfig import get_python_inc
+#    from numpy.distutils.system_info import get_info
+#    from distutils.sysconfig import get_python_inc
 
     config = Configuration('spatial', parent_package, top_path)
 


https://bitbucket.org/yt_analysis/yt/commits/69929d5f168a/
Changeset:   69929d5f168a
Branch:      yt
User:        drudd
Date:        2014-09-05 23:24:43
Summary:     Removed unused import
Affected #:  1 file

diff -r 64220b4b012a5d5c033fc4ee91935bab227da68c -r 69929d5f168aea0dd030a8bd48ee2f6afd8896ab yt/utilities/spatial/setupscons.py
--- a/yt/utilities/spatial/setupscons.py
+++ b/yt/utilities/spatial/setupscons.py
@@ -3,7 +3,7 @@
 from os.path import join
 
 def configuration(parent_package = '', top_path = None):
-    from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs
+    from numpy.distutils.misc_util import Configuration
     config = Configuration('spatial', parent_package, top_path)
 
     config.add_data_dir('tests')


https://bitbucket.org/yt_analysis/yt/commits/a5724c496bb6/
Changeset:   a5724c496bb6
Branch:      yt
User:        drudd
Date:        2014-09-05 23:27:57
Summary:     Corrected typo in IOCommunicator.initialize_data
Affected #:  1 file

diff -r 69929d5f168aea0dd030a8bd48ee2f6afd8896ab -r a5724c496bb675ed807b4f6136cd97823cb03beb yt/utilities/parallel_tools/io_runner.py
--- a/yt/utilities/parallel_tools/io_runner.py
+++ b/yt/utilities/parallel_tools/io_runner.py
@@ -57,7 +57,7 @@
         ds = self.ds
         fields = [f for f in ds.field_list
                   if not ds.field_info[f].particle_type]
-        dsields = [f for f in ds.field_list
+        ds.fields = [f for f in ds.field_list
                    if ds.field_info[f].particle_type]
         # Preload is only defined for Enzo ...
         if ds.index.io._dataset_type == "enzo_packed_3d":


https://bitbucket.org/yt_analysis/yt/commits/26792610d84b/
Changeset:   26792610d84b
Branch:      yt
User:        drudd
Date:        2014-09-05 23:29:36
Summary:     Removed unused variable gids
Affected #:  1 file

diff -r a5724c496bb675ed807b4f6136cd97823cb03beb -r 26792610d84bde85edd50ddc9c2cc6bef860dda9 yt/utilities/amr_kdtree/amr_kdtree.py
--- a/yt/utilities/amr_kdtree/amr_kdtree.py
+++ b/yt/utilities/amr_kdtree/amr_kdtree.py
@@ -81,8 +81,6 @@
         for lvl in lvl_range:
             #grids = self.data_source.select_grids(lvl)
             grids = np.array([b for b, mask in self.data_source.blocks if b.Level == lvl])
-            gids = np.array([g.id for g in grids if g.Level == lvl],
-                            dtype="int64")
             if len(grids) == 0: continue
             self.add_grids(grids)
 


https://bitbucket.org/yt_analysis/yt/commits/53bed3c60004/
Changeset:   53bed3c60004
Branch:      yt
User:        drudd
Date:        2014-09-05 23:30:13
Summary:     Removed unused variable gre
Affected #:  1 file

diff -r 26792610d84bde85edd50ddc9c2cc6bef860dda9 -r 53bed3c60004bd12b30170895f2baa4fedcde508 yt/utilities/amr_kdtree/amr_kdtree.py
--- a/yt/utilities/amr_kdtree/amr_kdtree.py
+++ b/yt/utilities/amr_kdtree/amr_kdtree.py
@@ -91,7 +91,6 @@
             grid = self.ds.index.grids[node.grid - self._id_offset]
             dds = grid.dds
             gle = grid.LeftEdge
-            gre = grid.RightEdge
             nle = self.ds.arr(get_left_edge(node), input_units="code_length")
             nre = self.ds.arr(get_right_edge(node), input_units="code_length")
             li = np.rint((nle-gle)/dds).astype('int32')


https://bitbucket.org/yt_analysis/yt/commits/67549f027df1/
Changeset:   67549f027df1
Branch:      yt
User:        drudd
Date:        2014-09-05 23:32:21
Summary:     Removed unused variable count
Affected #:  1 file

diff -r 53bed3c60004bd12b30170895f2baa4fedcde508 -r 67549f027df1e42d0c8958184ca389d663078533 yt/visualization/eps_writer.py
--- a/yt/visualization/eps_writer.py
+++ b/yt/visualization/eps_writer.py
@@ -1013,7 +1013,6 @@
             for i in range(npanels): ylabels.append("")
 
     d = DualEPS(figsize=figsize)
-    count = 0
     for j in range(nrow):
         invj = nrow - j - 1
         ypos = invj*(figsize[1] + margins[1])


https://bitbucket.org/yt_analysis/yt/commits/01cb1525b41e/
Changeset:   01cb1525b41e
Branch:      yt
User:        drudd
Date:        2014-09-05 23:32:55
Summary:     Removed unused variable shape
Affected #:  1 file

diff -r 67549f027df1e42d0c8958184ca389d663078533 -r 01cb1525b41e1b1249c956d9af0677fc19547a43 yt/visualization/image_writer.py
--- a/yt/visualization/image_writer.py
+++ b/yt/visualization/image_writer.py
@@ -257,7 +257,6 @@
             raise KeyError(cmap_name)
 
     x = np.mgrid[0.0:1.0:lut[0].shape[0]*1j]
-    shape = buff.shape
     mapped = np.dstack(
             [(np.interp(buff, x, v)*255) for v in lut ]).astype("uint8")
     return mapped.copy("C")


https://bitbucket.org/yt_analysis/yt/commits/67e717688635/
Changeset:   67e717688635
Branch:      yt
User:        drudd
Date:        2014-09-05 23:34:10
Summary:     Removed unused variable max_nu
Affected #:  1 file

diff -r 01cb1525b41e1b1249c956d9af0677fc19547a43 -r 67e717688635cebbf703c6ddbb71068a9b75d3d1 yt/visualization/plot_modifications.py
--- a/yt/visualization/plot_modifications.py
+++ b/yt/visualization/plot_modifications.py
@@ -558,7 +558,6 @@
         plot._axes.set_ylabel(self.label)
 
 def get_smallest_appropriate_unit(v, ds):
-    max_nu = 1e30
     good_u = None
     for unit in ['Mpc', 'kpc', 'pc', 'au', 'rsun', 'km', 'cm']:
         uq = YTQuantity(1.0, unit)


https://bitbucket.org/yt_analysis/yt/commits/43769f18d89a/
Changeset:   43769f18d89a
Branch:      yt
User:        drudd
Date:        2014-09-05 23:34:48
Summary:     Removed unused variables DomainWidth, DomainLeft, DomainRight
Affected #:  1 file

diff -r 67e717688635cebbf703c6ddbb71068a9b75d3d1 -r 43769f18d89a26a876bb6e4fa7b06faf91dc42f9 yt/visualization/plot_modifications.py
--- a/yt/visualization/plot_modifications.py
+++ b/yt/visualization/plot_modifications.py
@@ -707,10 +707,6 @@
         dxf = "d%s" % xf
         dyf = "d%s" % yf
 
-        DomainRight = plot.data.ds.domain_right_edge
-        DomainLeft = plot.data.ds.domain_left_edge
-        DomainWidth = DomainRight - DomainLeft
-
         nx, ny = plot.image._A.shape
         buff = np.zeros((nx,ny),dtype='float64')
         for i,clump in enumerate(reversed(self.clumps)):


https://bitbucket.org/yt_analysis/yt/commits/2f73db094b63/
Changeset:   2f73db094b63
Branch:      yt
User:        drudd
Date:        2014-09-05 23:35:26
Summary:     Removed unused variables width, height
Affected #:  1 file

diff -r 43769f18d89a26a876bb6e4fa7b06faf91dc42f9 -r 2f73db094b6391b5e4a57a24e6dc059da64f789b yt/visualization/plot_modifications.py
--- a/yt/visualization/plot_modifications.py
+++ b/yt/visualization/plot_modifications.py
@@ -785,7 +785,6 @@
                         plot.data.ds.coordinates.y_axis[ax])
             pos = self.pos[xi], self.pos[yi]
         else: pos = self.pos
-        width,height = plot.image._A.shape
         x,y = self.convert_to_plot(plot, pos)
         
         plot._axes.text(x, y, self.text, **self.text_args)


https://bitbucket.org/yt_analysis/yt/commits/a023ccef9113/
Changeset:   a023ccef9113
Branch:      yt
User:        drudd
Date:        2014-09-05 23:39:07
Summary:     Removed unnecessary outer loop over profiles
Affected #:  1 file

diff -r 2f73db094b6391b5e4a57a24e6dc059da64f789b -r a023ccef9113b64bc41839a971708b6a5f75e9b4 yt/visualization/profile_plotter.py
--- a/yt/visualization/profile_plotter.py
+++ b/yt/visualization/profile_plotter.py
@@ -544,18 +544,17 @@
         >>> pp.save()
 
         """
-        for i, p in enumerate(self.profiles):
-            if field is 'all':
-                fields = self.axes.keys()
-            else:
-                fields = ensure_list(field)
-            for profile in self.profiles:
-                for field in profile.data_source._determine_fields(fields):
-                    if field in profile.field_map:
-                        field = profile.field_map[field]
-                    self.axes.ylim[field] = (ymin, ymax)
-                    # Continue on to the next profile.
-                    break
+        if field is 'all':
+            fields = self.axes.keys()
+        else:
+            fields = ensure_list(field)
+        for profile in self.profiles:
+            for field in profile.data_source._determine_fields(fields):
+                if field in profile.field_map:
+                    field = profile.field_map[field]
+                self.axes.ylim[field] = (ymin, ymax)
+                # Continue on to the next profile.
+                break
         return self
 
     def _get_field_log(self, field_y, profile):


https://bitbucket.org/yt_analysis/yt/commits/0a7c3135bda3/
Changeset:   0a7c3135bda3
Branch:      yt
User:        drudd
Date:        2014-09-05 23:40:15
Summary:     Removed unused variable program
Affected #:  1 file

diff -r a023ccef9113b64bc41839a971708b6a5f75e9b4 -r 0a7c3135bda32cadff2c4aa4e9bc64197f178612 yt/visualization/volume_rendering/multi_texture.py
--- a/yt/visualization/volume_rendering/multi_texture.py
+++ b/yt/visualization/volume_rendering/multi_texture.py
@@ -65,7 +65,7 @@
         self._colormap = vvt.Colormap()
         
         # create glsl program for this texture...
-        self._program1 = program =  vvt.GlslProgram()
+        self._program1 = vvt.GlslProgram()
         
         # scale and translation transforms
         self._trafo_scale = vv.Transform_Scale()


https://bitbucket.org/yt_analysis/yt/commits/103161afe4c7/
Changeset:   103161afe4c7
Branch:      yt
User:        drudd
Date:        2014-09-05 23:40:56
Summary:     Removed unnecessary enumerate
Affected #:  1 file

diff -r 0a7c3135bda32cadff2c4aa4e9bc64197f178612 -r 103161afe4c74415b7ee84edce7d8f27f75450cb yt/visualization/volume_rendering/multi_texture.py
--- a/yt/visualization/volume_rendering/multi_texture.py
+++ b/yt/visualization/volume_rendering/multi_texture.py
@@ -287,7 +287,7 @@
 
     ax = vv.gca()
 
-    for i,g in enumerate(gs):
+    for g in gs:
         ss = ((g.RightEdge - g.LeftEdge) / (np.array(g.my_data[0].shape)-1)).tolist()
         origin = g.LeftEdge.astype("float32").tolist()
         dd = (g.my_data[0].astype("float32") - mi)/(ma - mi)


https://bitbucket.org/yt_analysis/yt/commits/fe8ee5f5b47a/
Changeset:   fe8ee5f5b47a
Branch:      yt
User:        drudd
Date:        2014-09-05 23:42:23
Summary:     Removed unnecessary alpha
Affected #:  1 file

diff -r 103161afe4c74415b7ee84edce7d8f27f75450cb -r fe8ee5f5b47af53fc1ba8b0af3e71c4d368285af yt/visualization/volume_rendering/transfer_functions.py
--- a/yt/visualization/volume_rendering/transfer_functions.py
+++ b/yt/visualization/volume_rendering/transfer_functions.py
@@ -403,7 +403,6 @@
         >>> tf = ColorTransferFunction( (-10.0, -5.0) )
         >>> tf.add_gaussian(-9.0, 0.01, [1.0, 0.0, 0.0, 1.0])
         """
-        alpha = height[3]
         for tf, v in zip(self.funcs, height):
             tf.add_gaussian(location, width, v)
 


https://bitbucket.org/yt_analysis/yt/commits/936d2b166508/
Changeset:   936d2b166508
Branch:      yt
User:        drudd
Date:        2014-09-05 23:44:42
Summary:     Removed unused variable norm
Affected #:  1 file

diff -r fe8ee5f5b47af53fc1ba8b0af3e71c4d368285af -r 936d2b16650874f1f2b0f405528bb61f3cc9f142 yt/visualization/volume_rendering/transfer_functions.py
--- a/yt/visualization/volume_rendering/transfer_functions.py
+++ b/yt/visualization/volume_rendering/transfer_functions.py
@@ -550,7 +550,6 @@
             label = ''
         alpha = self.alpha.y 
         max_alpha = alpha.max()
-        norm = max_alpha
         i_data = np.zeros((self.alpha.x.size, self.funcs[0].y.size, 3))
         i_data[:,:,0] = np.outer(self.funcs[0].y, np.ones(self.alpha.x.size))
         i_data[:,:,1] = np.outer(self.funcs[1].y, np.ones(self.alpha.x.size))


https://bitbucket.org/yt_analysis/yt/commits/e8929fed2c8d/
Changeset:   e8929fed2c8d
Branch:      yt
User:        drudd
Date:        2014-09-06 00:44:16
Summary:     Restored _particle_angular_momentum function and added add_field call
Affected #:  1 file

diff -r 936d2b16650874f1f2b0f405528bb61f3cc9f142 -r e8929fed2c8d3a38206ac8ed1f07aa3bb2fc273f yt/fields/particle_fields.py
--- a/yt/fields/particle_fields.py
+++ b/yt/fields/particle_fields.py
@@ -346,6 +346,15 @@
              units="g*cm**2/s", particle_type=True,
              validators=[ValidateParameter('center')])
 
+    def _particle_angular_momentum(field, data):
+        return data[ptype, "particle_mass"] \
+            * data[ptype, "particle_specific_angular_momentum"]
+    registry.add_field((ptype, "particle_angular_momentum"),
+              function=_particle_angular_momentum,
+              particle_type=True,
+              units="g*cm**2/s",
+              validators=[ValidateParameter("center")])
+
     create_magnitude_field(registry, "particle_angular_momentum",
                            "g*cm**2/s", ftype=ptype, particle_type=True)
     


https://bitbucket.org/yt_analysis/yt/commits/d9f11b1049d6/
Changeset:   d9f11b1049d6
Branch:      yt
User:        drudd
Date:        2014-09-08 23:57:57
Summary:     Fixed variable name from dsields to pfields
Affected #:  1 file

diff -r e8929fed2c8d3a38206ac8ed1f07aa3bb2fc273f -r d9f11b1049d65a4f141b4d4eabd84fcaf424f429 yt/utilities/parallel_tools/io_runner.py
--- a/yt/utilities/parallel_tools/io_runner.py
+++ b/yt/utilities/parallel_tools/io_runner.py
@@ -57,7 +57,7 @@
         ds = self.ds
         fields = [f for f in ds.field_list
                   if not ds.field_info[f].particle_type]
-        ds.fields = [f for f in ds.field_list
+        pfields = [f for f in ds.field_list
                    if ds.field_info[f].particle_type]
         # Preload is only defined for Enzo ...
         if ds.index.io._dataset_type == "enzo_packed_3d":


https://bitbucket.org/yt_analysis/yt/commits/9b28df7c4d71/
Changeset:   9b28df7c4d71
Branch:      yt
User:        drudd
Date:        2014-09-09 17:07:13
Summary:     Merge with upstream
Affected #:  17 files

diff -r d9f11b1049d65a4f141b4d4eabd84fcaf424f429 -r 9b28df7c4d71482b3759c6dcbdbc3d57e395b3a5 doc/source/analyzing/analysis_modules/halo_catalogs.rst
--- a/doc/source/analyzing/analysis_modules/halo_catalogs.rst
+++ b/doc/source/analyzing/analysis_modules/halo_catalogs.rst
@@ -129,7 +129,14 @@
 are center_of_mass and bulk_velocity. Their definitions are available in 
 ``yt/analysis_modules/halo_analysis/halo_quantities.py``. If you think that 
 your quantity may be of use to the general community, add it to 
-``halo_quantities.py`` and issue a pull request.
+``halo_quantities.py`` and issue a pull request.  Default halo quantities are:
+
+* ``particle_identifier`` -- Halo ID (e.g. 0 to N)
+* ``particle_mass`` -- Mass of halo
+* ``particle_position_x`` -- Location of halo
+* ``particle_position_y`` -- Location of halo
+* ``particle_position_z`` -- Location of halo
+* ``virial_radius`` -- Virial radius of halo
 
 An example of adding a quantity:
 

diff -r d9f11b1049d65a4f141b4d4eabd84fcaf424f429 -r 9b28df7c4d71482b3759c6dcbdbc3d57e395b3a5 doc/source/analyzing/analysis_modules/halo_finders.rst
--- a/doc/source/analyzing/analysis_modules/halo_finders.rst
+++ b/doc/source/analyzing/analysis_modules/halo_finders.rst
@@ -75,7 +75,8 @@
   mass. In simulations where the highest-resolution particles all have the 
   same mass (ie: zoom-in grid based simulations), one can set up a particle
   filter to select the lowest mass particles and perform the halo finding
-  only on those.
+  only on those.  See the this cookbook recipe for an example: 
+  :ref:`cookbook-rockstar-nested-grid`.
 
 To run the Rockstar Halo finding, you must launch python with MPI and 
 parallelization enabled. While Rockstar itself does not require MPI to run, 

diff -r d9f11b1049d65a4f141b4d4eabd84fcaf424f429 -r 9b28df7c4d71482b3759c6dcbdbc3d57e395b3a5 doc/source/cookbook/cosmological_analysis.rst
--- a/doc/source/cookbook/cosmological_analysis.rst
+++ b/doc/source/cookbook/cosmological_analysis.rst
@@ -14,6 +14,22 @@
 
 .. yt_cookbook:: halo_plotting.py
 
+.. _cookbook-rockstar-nested-grid:
+
+Running Rockstar to Find Halos on Multi-Resolution-Particle Datasets
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The version of Rockstar installed with yt does not have the capability
+to work on datasets with particles of different masses.  Unfortunately,
+many simulations possess particles of different masses, notably cosmological 
+zoom datasets.  This recipe uses Rockstar in two different ways to generate a 
+HaloCatalog from the highest resolution dark matter particles (the ones 
+inside the zoom region).  It then overlays some of those halos on a projection
+as a demonstration.  See :ref:`halo-analysis` and :ref:`annotate-halos` for
+more information.
+
+.. yt_cookbook:: rockstar_nest.py
+
 .. _cookbook-halo_finding:
 
 Halo Profiling and Custom Analysis

diff -r d9f11b1049d65a4f141b4d4eabd84fcaf424f429 -r 9b28df7c4d71482b3759c6dcbdbc3d57e395b3a5 doc/source/cookbook/power_spectrum_example.py
--- a/doc/source/cookbook/power_spectrum_example.py
+++ b/doc/source/cookbook/power_spectrum_example.py
@@ -57,7 +57,7 @@
     
     # physical limits to the wavenumbers
     kmin = np.min(1.0/L)
-    kmax = np.max(0.5*dims/L)
+    kmax = np.min(0.5*dims/L)
     
     kbins = np.arange(kmin, kmax, kmin)
     N = len(kbins)
@@ -112,7 +112,6 @@
     return np.abs(ru)**2
 
 
-if __name__ == "__main__":
 
-    ds = yt.load("maestro_xrb_lores_23437")
-    doit(ds)
+ds = yt.load("maestro_xrb_lores_23437")
+doit(ds)

diff -r d9f11b1049d65a4f141b4d4eabd84fcaf424f429 -r 9b28df7c4d71482b3759c6dcbdbc3d57e395b3a5 doc/source/cookbook/rockstar_nest.py
--- /dev/null
+++ b/doc/source/cookbook/rockstar_nest.py
@@ -0,0 +1,74 @@
+# You must run this job in parallel.  
+# There are several mpi flags which can be useful in order for it to work OK.
+# It requires at least 3 processors in order to run because of the way in which 
+# rockstar divides up the work.  Make sure you have mpi4py installed as per 
+# http://yt-project.org/docs/dev/analyzing/parallel_computation.html#setting-up-parallel-yt
+    
+# Usage: mpirun -np <num_procs> --mca btl ^openib python this_script.py
+
+import yt
+from yt.analysis_modules.halo_analysis.halo_catalog import HaloCatalog
+from yt.data_objects.particle_filters import add_particle_filter
+from yt.analysis_modules.halo_finding.rockstar.api import RockstarHaloFinder
+yt.enable_parallelism() # rockstar halofinding requires parallelism
+
+# Create a dark matter particle filter
+# This will be code dependent, but this function here is true for enzo
+
+def DarkMatter(pfilter, data):
+    filter = data[("all", "particle_type")] == 1 # DM = 1, Stars = 2
+    return filter
+
+add_particle_filter("dark_matter", function=DarkMatter, filtered_type='all', \
+                    requires=["particle_type"])
+
+# First, we make sure that this script is being run using mpirun with
+# at least 3 processors as indicated in the comments above.
+assert(yt.communication_system.communicators[-1].size >= 3)
+
+# Load the dataset and apply dark matter filter
+fn = "Enzo_64/DD0043/data0043"
+ds = yt.load(fn)
+ds.add_particle_filter('dark_matter')
+
+# Determine highest resolution DM particle mass in sim by looking
+# at the extrema of the dark_matter particle_mass field.
+ad = ds.all_data()
+min_dm_mass = ad.quantities.extrema(('dark_matter','particle_mass'))[0]
+
+# Define a new particle filter to isolate all highest resolution DM particles
+# and apply it to dataset
+def MaxResDarkMatter(pfilter, data):
+    return data["particle_mass"] <= 1.01 * min_dm_mass
+
+add_particle_filter("max_res_dark_matter", function=MaxResDarkMatter, \
+                    filtered_type='dark_matter', requires=["particle_mass"])
+ds.add_particle_filter('max_res_dark_matter')
+
+# If desired, we can see the total number of DM and High-res DM particles
+#if yt.is_root():
+#    print "Simulation has %d DM particles." % ad['dark_matter','particle_type'].shape
+#    print "Simulation has %d Highest Res DM particles." % ad['max_res_dark_matter', 'particle_type'].shape
+
+# Run the halo catalog on the dataset only on the highest resolution dark matter 
+# particles
+hc = HaloCatalog(data_ds=ds, finder_method='rockstar', \
+                 finder_kwargs={'dm_only':True, 'particle_type':'max_res_dark_matter'})
+hc.create()
+
+# Or alternatively, just run the RockstarHaloFinder and later import the 
+# output file as necessary.  You can skip this step if you've already run it
+# once, but be careful since subsequent halo finds will overwrite this data.
+#rhf = RockstarHaloFinder(ds, particle_type="max_res_dark_matter")
+#rhf.run()
+# Load the halo list from a rockstar output for this dataset
+# Create a projection with the halos overplot on top
+#halos = yt.load('rockstar_halos/halos_0.0.bin')
+#hc = HaloCatalog(halos_ds=halos)
+#hc.load()
+
+# Regardless of your method of creating the halo catalog, use it to overplot the
+# halos on a projection.
+p = yt.ProjectionPlot(ds, "x", "density")
+p.annotate_halos(hc, annotate_field = 'particle_identifier', width=(10,'Mpc'), factor=2)
+p.save()

diff -r d9f11b1049d65a4f141b4d4eabd84fcaf424f429 -r 9b28df7c4d71482b3759c6dcbdbc3d57e395b3a5 doc/source/cookbook/tests/test_cookbook.py
--- /dev/null
+++ b/doc/source/cookbook/tests/test_cookbook.py
@@ -0,0 +1,29 @@
+# -*- coding: utf-8 -*-
+"""Module for cookbook testing
+
+
+This test should be run from main yt directory.
+
+Example:
+
+      $ sed -e '/where/d' -i nose.cfg setup.cfg
+      $ nosetests doc/source/cookbook/tests/test_cookbook.py -P -v
+"""
+import glob
+import os
+import sys
+
+sys.path.append(os.path.join(os.getcwd(), "doc/source/cookbook"))
+
+
+def test_recipe():
+    '''Dummy test grabbing all cookbook's recipes'''
+    for fname in glob.glob("doc/source/cookbook/*.py"):
+        module_name = os.path.splitext(os.path.basename(fname))[0]
+        yield check_recipe, module_name
+
+
+def check_recipe(module_name):
+    '''Run single recipe'''
+    __import__(module_name)
+    assert True

diff -r d9f11b1049d65a4f141b4d4eabd84fcaf424f429 -r 9b28df7c4d71482b3759c6dcbdbc3d57e395b3a5 doc/source/cookbook/thin_slice_projection.py
--- a/doc/source/cookbook/thin_slice_projection.py
+++ b/doc/source/cookbook/thin_slice_projection.py
@@ -4,7 +4,7 @@
 ds = yt.load("Enzo_64/DD0030/data0030")
 
 # Make a projection that is the full width of the domain,
-# but only 10 Mpc in depth.  This is done by creating a
+# but only 5 Mpc in depth.  This is done by creating a
 # region object with this exact geometry and providing it
 # as a data_source for the projection.
 
@@ -17,12 +17,12 @@
 right_corner = ds.domain_right_edge
 
 # Now adjust the size of the region along the line of sight (x axis).
-depth = ds.quan(10.0,'Mpc')
+depth = ds.quan(5.0,'Mpc')
 left_corner[0] = center[0] - 0.5 * depth
-left_corner[0] = center[0] + 0.5 * depth
+right_corner[0] = center[0] + 0.5 * depth
 
 # Create the region
-region = ds.region(center, left_corner, right_corner)
+region = ds.box(left_corner, right_corner)
 
 # Create a density projection and supply the region we have just created.
 # Only cells within the region will be included in the projection.

diff -r d9f11b1049d65a4f141b4d4eabd84fcaf424f429 -r 9b28df7c4d71482b3759c6dcbdbc3d57e395b3a5 doc/source/visualizing/_cb_docstrings.inc
--- a/doc/source/visualizing/_cb_docstrings.inc
+++ b/doc/source/visualizing/_cb_docstrings.inc
@@ -151,19 +151,28 @@
 Overplot Halo Annotations
 ~~~~~~~~~~~~~~~~~~~~~~~~~
 
-.. function:: annotate_halos(self, halo_catalog, col='white', alpha=1, \
-                             width=None):
+.. function:: annotate_halos(self, halo_catalog, circle_kwargs=None, width=None, \ 
+                             annotate_field=False, font_kwargs=None, factor=1.0):
 
    (This is a proxy for
    :class:`~yt.visualization.plot_modifications.HaloCatalogCallback`.)
 
    Accepts a :class:`~yt.analysis_modules.halo_analysis.halo_catalog.HaloCatalog` 
-   and plots a circle at the location of each
-   halo with the radius of the circle corresponding to the virial radius of the
-   halo.  If ``width`` is set to None (default) all halos are plotted.
-   Otherwise, only halos that fall within a slab with width ``width`` centered
-   on the center of the plot data. The color and transparency of the circles can
-   be controlled with ``col`` and ``alpha`` respectively.
+   and plots a circle at the location of each halo with the radius of the 
+   circle corresponding to the virial radius of the halo.  If ``width`` is set 
+   to None (default) all halos are plotted, otherwise it accepts a tuple in 
+   the form (1.0, ‘Mpc’) to only display halos that fall within a slab with 
+   width ``width`` centered on the center of the plot data.  The appearance of 
+   the circles can be changed with the circle_kwargs dictionary, which is 
+   supplied to the Matplotlib patch Circle.  One can label each of the halos 
+   with the annotate_field, which accepts a field contained in the halo catalog 
+   to add text to the plot near the halo (example: annotate_field = 
+   ``particle_mass`` will write the halo mass next to each halo, whereas 
+   ``particle_identifier`` shows the halo number).  font_kwargs contains the 
+   arguments controlling the text appearance of the annotated field.
+   Factor is the number the virial radius is multiplied by for plotting the 
+   circles. Ex: factor = 2.0 will plot circles with twice the radius of each 
+   halo virial radius.
 
 .. python-script::
 
@@ -177,7 +186,7 @@
    hc.create()
 
    prj = yt.ProjectionPlot(data_ds, 'z', 'density')
-   prj.annotate_halos(hc)
+   prj.annotate_halos(hc, annotate_field=particle_identifier)
    prj.save()
 
 Overplot a Straight Line

diff -r d9f11b1049d65a4f141b4d4eabd84fcaf424f429 -r 9b28df7c4d71482b3759c6dcbdbc3d57e395b3a5 doc/source/visualizing/colormaps/index.rst
--- a/doc/source/visualizing/colormaps/index.rst
+++ b/doc/source/visualizing/colormaps/index.rst
@@ -6,12 +6,17 @@
 There are several colormaps available for yt.  yt includes all of the 
 matplotlib colormaps as well for nearly all functions.  Individual visualization
 functions usually allow you to specify a colormap with the ``cmap`` flag.
-There are a small number of functions (mostly contained in the image_writer 
-module; e.g. write_bitmap, write_image, write_projection, etc.), which do 
-not load the matplotlib infrastructure and can only access the colormaps 
-native to yt.  
 
-Here is a chart of all of the colormaps available.  In addition to each 
+If you have installed `brewer2mpl`
+(`pip install brewer2mpl` or see `https://github.com/jiffyclub/brewer2mpl`_),
+you can also access the discrete colormaps available on
+`http://colorbrewer2.org`_. Instead of supplying the colormap name, specify
+a tuple of the form (name, type, number), for example `('RdBu', 'Diverging', 9)`.
+These discrete colormaps will not be interpolated, and can be useful for
+creating colorblind/printer/grayscale-friendly plots. For more information,
+visit `http://colorbrewer2.org`_.
+
+Here is a chart of all of the yt and matplotlib colormaps available.  In addition to each 
 colormap displayed here, you can access its "reverse" by simply appending a 
 ``"_r"`` to the end of the colormap name.
 

diff -r d9f11b1049d65a4f141b4d4eabd84fcaf424f429 -r 9b28df7c4d71482b3759c6dcbdbc3d57e395b3a5 yt/data_objects/tests/test_spheres.py
--- a/yt/data_objects/tests/test_spheres.py
+++ b/yt/data_objects/tests/test_spheres.py
@@ -6,10 +6,11 @@
     from yt.config import ytcfg
     ytcfg["yt","__withintesting"] = "True"
 
+_fields_to_compare = ("spherical_r", "cylindrical_r",
+                      "spherical_theta", "cylindrical_theta",
+                      "spherical_phi", "cylindrical_z")
+
 def test_domain_sphere():
-    ds = fake_random_ds(16, fields = ("density"))
-    sp = ds.sphere(ds.domain_center, ds.domain_width[0])
-
     # Now we test that we can get different radial velocities based on field
     # parameters.
 
@@ -51,3 +52,12 @@
     yield assert_equal, np.any(rp0["radial_velocity"][rp0.used] ==
                                rp1["radial_velocity"][rp1.used]), \
                                False
+
+    ref_sp = ds.sphere("c", 0.25)
+    for f in _fields_to_compare:
+        ref_sp[f].sort()
+    for center in periodicity_cases(ds):
+        sp = ds.sphere(center, 0.25)
+        for f in _fields_to_compare:
+            sp[f].sort()
+            yield assert_equal, sp[f], ref_sp[f]

diff -r d9f11b1049d65a4f141b4d4eabd84fcaf424f429 -r 9b28df7c4d71482b3759c6dcbdbc3d57e395b3a5 yt/frontends/boxlib/data_structures.py
--- a/yt/frontends/boxlib/data_structures.py
+++ b/yt/frontends/boxlib/data_structures.py
@@ -410,6 +410,8 @@
     def _is_valid(cls, *args, **kwargs):
         # fill our args
         output_dir = args[0]
+        # boxlib datasets are always directories
+        if not os.path.isdir(output_dir): return False
         header_filename = os.path.join(output_dir, "Header")
         jobinfo_filename = os.path.join(output_dir, "job_info")
         if not os.path.exists(header_filename):
@@ -704,15 +706,17 @@
           
     @classmethod
     def _is_valid(cls, *args, **kwargs):
-        # fill our args                                                                               
+        # fill our args
         output_dir = args[0]
+        # boxlib datasets are always directories
+        if not os.path.isdir(output_dir): return False
         header_filename = os.path.join(output_dir, "Header")
         jobinfo_filename = os.path.join(output_dir, "job_info")
         if not os.path.exists(header_filename):
-            # We *know* it's not boxlib if Header doesn't exist.                                      
+            # We *know* it's not boxlib if Header doesn't exist.
             return False
         args = inspect.getcallargs(cls.__init__, args, kwargs)
-        # This might need to be localized somehow                                                     
+        # This might need to be localized somehow
         inputs_filename = os.path.join(
                             os.path.dirname(os.path.abspath(output_dir)),
                             args['cparam_filename'])
@@ -720,7 +724,7 @@
             return False
         if os.path.exists(jobinfo_filename):
             return False
-        # Now we check for all the others                                                             
+        # Now we check for all the others
         lines = open(inputs_filename).readlines()
         if any(("castro." in line for line in lines)): return False
         if any(("nyx." in line for line in lines)): return False
@@ -736,6 +740,8 @@
     def _is_valid(cls, *args, **kwargs):
         # fill our args
         output_dir = args[0]
+        # boxlib datasets are always directories
+        if not os.path.isdir(output_dir): return False
         header_filename = os.path.join(output_dir, "Header")
         jobinfo_filename = os.path.join(output_dir, "job_info")
         if not os.path.exists(header_filename):
@@ -756,6 +762,8 @@
     def _is_valid(cls, *args, **kwargs):
         # fill our args
         output_dir = args[0]
+        # boxlib datasets are always directories
+        if not os.path.isdir(output_dir): return False
         header_filename = os.path.join(output_dir, "Header")
         jobinfo_filename = os.path.join(output_dir, "job_info")
         if not os.path.exists(header_filename):
@@ -852,6 +860,8 @@
     def _is_valid(cls, *args, **kwargs):
         # fill our args
         pname = args[0].rstrip("/")
+        # boxlib datasets are always directories
+        if not os.path.isdir(pname): return False
         dn = os.path.dirname(pname)
         if len(args) > 1:
             kwargs['paramFilename'] = args[1]
@@ -862,7 +872,7 @@
         # We check for the job_info file's existence because this is currently
         # what distinguishes Nyx data from MAESTRO data.
         pfn = os.path.join(pfname)
-        if not os.path.exists(pfn): return False
+        if not os.path.exists(pfn) or os.path.isdir(pfn): return False
         nyx = any(("nyx." in line for line in open(pfn)))
         maestro = os.path.exists(os.path.join(pname, "job_info"))
         orion = (not nyx) and (not maestro)

diff -r d9f11b1049d65a4f141b4d4eabd84fcaf424f429 -r 9b28df7c4d71482b3759c6dcbdbc3d57e395b3a5 yt/geometry/oct_container.pyx
--- a/yt/geometry/oct_container.pyx
+++ b/yt/geometry/oct_container.pyx
@@ -418,7 +418,7 @@
         cdef np.ndarray[np.uint8_t, ndim=1] coords
         cdef OctVisitorData data
         self.setup_data(&data, domain_id)
-        coords = np.zeros((num_cells*8), dtype="uint8")
+        coords = np.zeros((num_cells*data.nz), dtype="uint8")
         data.array = <void *> coords.data
         self.visit_all_octs(selector, oct_visitors.mask_octs, &data)
         return coords.astype("bool")

diff -r d9f11b1049d65a4f141b4d4eabd84fcaf424f429 -r 9b28df7c4d71482b3759c6dcbdbc3d57e395b3a5 yt/testing.py
--- a/yt/testing.py
+++ b/yt/testing.py
@@ -624,6 +624,18 @@
         return _func
     return compare_results(func)
 
+def periodicity_cases(ds):
+    # This is a generator that yields things near the corners.  It's good for
+    # getting different places to check periodicity.
+    yield (ds.domain_left_edge + ds.domain_right_edge)/2.0
+    dx = ds.domain_width / ds.domain_dimensions
+    # We start one dx in, and only go to one in as well.
+    for i in (1, ds.domain_dimensions[0] - 2):
+        for j in (1, ds.domain_dimensions[1] - 2):
+            for k in (1, ds.domain_dimensions[2] - 2):
+                center = dx * np.array([i,j,k]) + ds.domain_left_edge
+                yield center
+
 def run_nose(verbose=False, run_answer_tests=False, answer_big_data=False):
     import nose, os, sys, yt
     from yt.funcs import mylog

diff -r d9f11b1049d65a4f141b4d4eabd84fcaf424f429 -r 9b28df7c4d71482b3759c6dcbdbc3d57e395b3a5 yt/visualization/base_plot_types.py
--- a/yt/visualization/base_plot_types.py
+++ b/yt/visualization/base_plot_types.py
@@ -19,6 +19,12 @@
 from yt.funcs import \
     get_image_suffix, mylog, iterable
 import numpy as np
+try:
+    import brewer2mpl
+    has_brewer = True
+except:
+    has_brewer = False
+
 
 class CallbackWrapper(object):
     def __init__(self, viewer, window_plot, frb, field):
@@ -110,6 +116,13 @@
         elif (cbnorm == 'linear'):
             norm = matplotlib.colors.Normalize()
         extent = [float(e) for e in extent]
+        if isinstance(cmap, tuple):
+            if has_brewer:
+                bmap = brewer2mpl.get_map(*cmap)
+                cmap = bmap.get_mpl_colormap(N=cmap[2])
+            else:
+                raise RuntimeError("Please install brewer2mpl to use colorbrewer colormaps")
+
         self.image = self.axes.imshow(data.to_ndarray(), origin='lower',
                                       extent=extent, norm=norm, vmin=self.zmin,
                                       aspect=aspect, vmax=self.zmax, cmap=cmap)

diff -r d9f11b1049d65a4f141b4d4eabd84fcaf424f429 -r 9b28df7c4d71482b3759c6dcbdbc3d57e395b3a5 yt/visualization/image_writer.py
--- a/yt/visualization/image_writer.py
+++ b/yt/visualization/image_writer.py
@@ -23,6 +23,12 @@
 import yt.utilities.lib.image_utilities as au
 import yt.utilities.png_writer as pw
 from yt.extern.six.moves import builtins
+try:
+    import brewer2mpl
+    has_brewer = True
+except:
+    has_brewer = False
+
 
 def scale_image(image, mi=None, ma=None):
     r"""Scale an image ([NxNxM] where M = 1-4) to be uint8 and values scaled 
@@ -248,7 +254,14 @@
         lut = cmd.color_map_luts[cmap_name]
     except KeyError:
         try:
-            cmap = mcm.get_cmap(cmap_name)
+            if isinstance(cmap_name, tuple):
+                if has_brewer:
+                    bmap = brewer2mpl.get_map(*cmap_name)
+                    cmap = bmap.get_mpl_colormap(N=cmap_name[2])
+                else:
+                    raise RuntimeError("Please install brewer2mpl to use colorbrewer colormaps")
+            else:
+                cmap = mcm.get_cmap(cmap_name)
             dummy = cmap(0.0)
             lut = cmap._lut.T
         except ValueError:
@@ -256,9 +269,19 @@
                 " colormap file or matplotlib colormaps"
             raise KeyError(cmap_name)
 
-    x = np.mgrid[0.0:1.0:lut[0].shape[0]*1j]
-    mapped = np.dstack(
-            [(np.interp(buff, x, v)*255) for v in lut ]).astype("uint8")
+    if isinstance(cmap_name, tuple) and has_brewer:
+        # If we are using the colorbrewer maps, don't interpolate
+        shape = buff.shape
+        # We add float_eps so that digitize doesn't go out of bounds
+        x = np.mgrid[0.0:1.0+np.finfo(np.float32).eps:lut[0].shape[0]*1j]
+        inds = np.digitize(buff.ravel(), x)
+        inds.shape = (shape[0], shape[1])
+        mapped = np.dstack([(v[inds]*255).astype('uint8') for v in lut])
+        del inds
+    else:
+        x = np.mgrid[0.0:1.0:lut[0].shape[0]*1j]
+        mapped = np.dstack(
+                [(np.interp(buff, x, v)*255).astype('uint8') for v in lut ])
     return mapped.copy("C")
 
 def strip_colormap_data(fn = "color_map_data.py",

diff -r d9f11b1049d65a4f141b4d4eabd84fcaf424f429 -r 9b28df7c4d71482b3759c6dcbdbc3d57e395b3a5 yt/visualization/plot_container.py
--- a/yt/visualization/plot_container.py
+++ b/yt/visualization/plot_container.py
@@ -35,6 +35,7 @@
 from yt.utilities.exceptions import \
     YTNotInsideNotebook
 
+
 def invalidate_data(f):
     @wraps(f)
     def newfunc(*args, **kwargs):
@@ -198,7 +199,7 @@
         return self
 
     @invalidate_plot
-    def set_cmap(self, field, cmap_name):
+    def set_cmap(self, field, cmap):
         """set the colormap for one of the fields
 
         Parameters
@@ -206,8 +207,11 @@
         field : string
             the field to set the colormap
             if field == 'all', applies to all plots.
-        cmap_name : string
-            name of the colormap
+        cmap : string or tuple
+            If a string, will be interpreted as name of the colormap.
+            If a tuple, it is assumed to be of the form (name, type, number)
+            to be used for brewer2mpl functionality. (name, type, number, bool)
+            can be used to specify if a reverse colormap is to be used.
 
         """
 
@@ -217,7 +221,7 @@
             fields = [field]
         for field in self.data_source._determine_fields(fields):
             self._colorbar_valid = False
-            self._colormaps[field] = cmap_name
+            self._colormaps[field] = cmap
         return self
 
     @invalidate_plot
@@ -384,37 +388,6 @@
         return self.set_font({'size': size})
 
     @invalidate_plot
-    def set_cmap(self, field, cmap):
-        """set the colormap for one of the fields
-
-        Parameters
-        ----------
-        field : string
-            the field to set a transform
-            if field == 'all', applies to all plots.
-        cmap : string
-            name of the colormap
-
-        """
-        if field == 'all':
-            fields = self.plots.keys()
-        else:
-            fields = [field]
-
-        for field in self.data_source._determine_fields(fields):
-            self._colorbar_valid = False
-            self._colormaps[field] = cmap
-            if isinstance(cmap, types.StringTypes):
-                if str(cmap) in yt_colormaps:
-                    cmap = yt_colormaps[str(cmap)]
-                elif hasattr(matplotlib.cm, cmap):
-                    cmap = getattr(matplotlib.cm, cmap)
-            if not is_colormap(cmap) and cmap is not None:
-                raise RuntimeError("Colormap '%s' does not exist!" % str(cmap))
-            self.plots[field].image.set_cmap(cmap)
-        return self
-
-    @invalidate_plot
     @invalidate_figure
     def set_figure_size(self, size):
         """Sets a new figure size for the plot

diff -r d9f11b1049d65a4f141b4d4eabd84fcaf424f429 -r 9b28df7c4d71482b3759c6dcbdbc3d57e395b3a5 yt/visualization/plot_modifications.py
--- a/yt/visualization/plot_modifications.py
+++ b/yt/visualization/plot_modifications.py
@@ -901,8 +901,8 @@
 class HaloCatalogCallback(PlotCallback):
     """
     annotate_halos(halo_catalog, circle_kwargs=None,
-        width = None, annotate_field=False,
-        font_kwargs = None, factor = 1.0)
+        width = None, annotate_field = False,
+        font_kwargs=None, factor = 1.0)
 
     Plots circles at the locations of all the halos
     in a halo catalog with radii corresponding to the
@@ -929,14 +929,16 @@
     region = None
     _descriptor = None
 
-    def __init__(self, halo_catalog, circle_kwargs = None, 
+    def __init__(self, halo_catalog, circle_kwargs=None, 
             width = None, annotate_field = False,
-            font_kwargs = None, factor = 1.0):
+            font_kwargs=None, factor = 1.0):
 
         PlotCallback.__init__(self)
         self.halo_catalog = halo_catalog
         self.width = width
         self.annotate_field = annotate_field
+        if font_kwargs is None:
+            font_kwargs = {'color':'white'}
         self.font_kwargs = font_kwargs
         self.factor = factor
         if circle_kwargs is None:
@@ -999,7 +1001,7 @@
 
         if self.annotate_field:
             annotate_dat = halo_data[self.annotate_field]
-            texts = ['{0}'.format(dat) for dat in annotate_dat]
+            texts = ['{:g}'.format(float(dat))for dat in annotate_dat]
             for pos_x, pos_y, t in zip(px, py, texts): 
                 plot._axes.text(pos_x, pos_y, t, **self.font_kwargs)

Repository URL: https://bitbucket.org/yt_analysis/yt/

--

This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.


More information about the yt-svn mailing list