[yt-svn] commit/yt: 4 new changesets

commits-noreply at bitbucket.org commits-noreply at bitbucket.org
Sat Jul 19 06:42:17 PDT 2014


4 new commits in yt:

https://bitbucket.org/yt_analysis/yt/commits/02b81db113ec/
Changeset:   02b81db113ec
Branch:      yt-3.0
User:        jzuhone
Date:        2014-07-10 06:20:34
Summary:     More options for center coordinates of the slice.
Affected #:  1 file

diff -r 3fceec20bf2ce03a9771e97e69d3087a204f7eda -r 02b81db113ec602d61ea05889526e92711c9d900 yt/utilities/fits_image.py
--- a/yt/utilities/fits_image.py
+++ b/yt/utilities/fits_image.py
@@ -288,15 +288,20 @@
         The axis of the slice. One of "x","y","z", or 0,1,2.
     fields : string or list of strings
         The fields to slice
-    coord : float, tuple, or YTQuantity
+    coord : float, tuple, YTQuantity, or string, optional
         The coordinate of the slice along *axis*. Can be a (value,
         unit) tuple, a YTQuantity, or a float. If a float, it will be
         interpreted as in units of code_length.
     """
-    def __init__(self, ds, axis, fields, coord, **kwargs):
+    def __init__(self, ds, axis, fields, coord="c", **kwargs):
         fields = ensure_list(fields)
         axis = fix_axis(axis, ds)
-        if isinstance(coord, tuple):
+        if coord == "c":
+            coord = ds.domain_center[axis].value
+        elif coord == "max":
+            v, c = ds.find_max("density")
+            coord = c[axis].value
+        elif isinstance(coord, tuple):
             coord = ds.quan(coord[0], coord[1]).in_units("code_length").value
         elif isinstance(coord, YTQuantity):
             coord = coord.in_units("code_length").value


https://bitbucket.org/yt_analysis/yt/commits/000b21eeddcf/
Changeset:   000b21eeddcf
Branch:      yt-3.0
User:        jzuhone
Date:        2014-07-18 17:43:32
Summary:     Use get_santized_center instead
Affected #:  1 file

diff -r 02b81db113ec602d61ea05889526e92711c9d900 -r 000b21eeddcffd4406a5938ab09bfca341bdf60c yt/utilities/fits_image.py
--- a/yt/utilities/fits_image.py
+++ b/yt/utilities/fits_image.py
@@ -13,6 +13,7 @@
 import numpy as np
 from yt.funcs import mylog, iterable, fix_axis, ensure_list
 from yt.visualization.fixed_resolution import FixedResolutionBuffer
+from yt.visualization.plot_window import get_sanitized_center
 from yt.data_objects.construction_data_containers import YTCoveringGridBase
 from yt.utilities.on_demand_imports import _astropy
 from yt.units.yt_array import YTQuantity
@@ -288,24 +289,20 @@
         The axis of the slice. One of "x","y","z", or 0,1,2.
     fields : string or list of strings
         The fields to slice
-    coord : float, tuple, YTQuantity, or string, optional
-        The coordinate of the slice along *axis*. Can be a (value,
-        unit) tuple, a YTQuantity, or a float. If a float, it will be
-        interpreted as in units of code_length.
+    center : A sequence floats, a string, or a tuple.
+         The coordinate of the center of the image. If set to 'c', 'center' or
+         left blank, the plot is centered on the middle of the domain. If set to
+         'max' or 'm', the center will be located at the maximum of the
+         ('gas', 'density') field. Units can be specified by passing in center
+         as a tuple containing a coordinate and string unit name or by passing
+         in a YTArray.  If a list or unitless array is supplied, code units are
+         assumed.
     """
-    def __init__(self, ds, axis, fields, coord="c", **kwargs):
+    def __init__(self, ds, axis, fields, center="c", **kwargs):
         fields = ensure_list(fields)
         axis = fix_axis(axis, ds)
-        if coord == "c":
-            coord = ds.domain_center[axis].value
-        elif coord == "max":
-            v, c = ds.find_max("density")
-            coord = c[axis].value
-        elif isinstance(coord, tuple):
-            coord = ds.quan(coord[0], coord[1]).in_units("code_length").value
-        elif isinstance(coord, YTQuantity):
-            coord = coord.in_units("code_length").value
-        slc = ds.slice(axis, coord, **kwargs)
+        center = get_sanitized_center(center, ds)
+        slc = ds.slice(axis, center[axis], **kwargs)
         w, frb = construct_image(slc)
         super(FITSSlice, self).__init__(frb, fields=fields, wcs=w)
         for i, field in enumerate(fields):


https://bitbucket.org/yt_analysis/yt/commits/b7479711ae9f/
Changeset:   b7479711ae9f
Branch:      yt-3.0
User:        jzuhone
Date:        2014-07-18 19:19:12
Summary:     Merge
Affected #:  235 files

diff -r 000b21eeddcffd4406a5938ab09bfca341bdf60c -r b7479711ae9f0dd2e8854713638419dd44f7622a .hgignore
--- a/.hgignore
+++ b/.hgignore
@@ -41,6 +41,7 @@
 yt/utilities/lib/PointsInVolume.c
 yt/utilities/lib/QuadTree.c
 yt/utilities/lib/RayIntegrators.c
+yt/utilities/lib/ragged_arrays.c
 yt/utilities/lib/VolumeIntegrator.c
 yt/utilities/lib/grid_traversal.c
 yt/utilities/lib/GridTree.c

diff -r 000b21eeddcffd4406a5938ab09bfca341bdf60c -r b7479711ae9f0dd2e8854713638419dd44f7622a doc/README
--- a/doc/README
+++ b/doc/README
@@ -5,6 +5,6 @@
 http://sphinx.pocoo.org/
 
 Because the documentation requires a number of dependencies, we provide
-pre-build versions online, accessible here:
+pre-built versions online, accessible here:
 
-http://yt-project.org/docs/
+http://yt-project.org/docs/dev-3.0/

diff -r 000b21eeddcffd4406a5938ab09bfca341bdf60c -r b7479711ae9f0dd2e8854713638419dd44f7622a doc/install_script.sh
--- a/doc/install_script.sh
+++ b/doc/install_script.sh
@@ -567,8 +567,10 @@
 
 mkdir -p ${DEST_DIR}/data
 cd ${DEST_DIR}/data
-echo 'de6d8c6ea849f0206d219303329a0276b3cce7c051eec34377d42aacbe0a4f47ac5145eb08966a338ecddd2b83c8f787ca9956508ad5c39ee2088ad875166410  xray_emissivity.h5' > xray_emissivity.h5.sha512
-get_ytdata xray_emissivity.h5
+echo 'de6d8c6ea849f0206d219303329a0276b3cce7c051eec34377d42aacbe0a4f47ac5145eb08966a338ecddd2b83c8f787ca9956508ad5c39ee2088ad875166410  cloudy_emissivity.h5' > cloudy_emissivity.h5.sha512
+[ ! -e cloudy_emissivity.h5 ] && get_ytdata cloudy_emissivity.h5
+echo '0f714ae2eace0141b1381abf1160dc8f8a521335e886f99919caf3beb31df1fe271d67c7b2a804b1467949eb16b0ef87a3d53abad0e8160fccac1e90d8d9e85f  apec_emissivity.h5' > apec_emissivity.h5.sha512
+[ ! -e apec_emissivity.h5 ] && get_ytdata apec_emissivity.h5
 
 # Set paths to what they should be when yt is activated.
 export PATH=${DEST_DIR}/bin:$PATH
@@ -608,7 +610,6 @@
 echo '3f53d0b474bfd79fea2536d0a9197eaef6c0927e95f2f9fd52dbd6c1d46409d0e649c21ac418d8f7767a9f10fe6114b516e06f2be4b06aec3ab5bdebc8768220  Forthon-0.8.11.tar.gz' > Forthon-0.8.11.tar.gz.sha512
 echo '4941f5aa21aff3743546495fb073c10d2657ff42b2aff401903498638093d0e31e344cce778980f28a7170c6d29eab72ac074277b9d4088376e8692dc71e55c1  PyX-0.12.1.tar.gz' > PyX-0.12.1.tar.gz.sha512
 echo '3df0ba4b1cfef5f02fb27925de4c2ca414eca9000af6a3d475d39063720afe987287c3d51377e0a36b88015573ef699f700782e1749c7a357b8390971d858a79  Python-2.7.6.tgz' > Python-2.7.6.tgz.sha512
-echo '172f2bc671145ebb0add2669c117863db35851fb3bdb192006cd710d4d038e0037497eb39a6d01091cb923f71a7e8982a77b6e80bf71d6275d5d83a363c8d7e5  rockstar-0.99.6.tar.gz' > rockstar-0.99.6.tar.gz.sha512
 echo '276bd9c061ec9a27d478b33078a86f93164ee2da72210e12e2c9da71dcffeb64767e4460b93f257302b09328eda8655e93c4b9ae85e74472869afbeae35ca71e  blas.tar.gz' > blas.tar.gz.sha512
 echo '00ace5438cfa0c577e5f578d8a808613187eff5217c35164ffe044fbafdfec9e98f4192c02a7d67e01e5a5ccced630583ad1003c37697219b0f147343a3fdd12  bzip2-1.0.6.tar.gz' > bzip2-1.0.6.tar.gz.sha512
 echo 'a296dfcaef7e853e58eed4e24b37c4fa29cfc6ac688def048480f4bb384b9e37ca447faf96eec7b378fd764ba291713f03ac464581d62275e28eb2ec99110ab6  reason-js-20120623.zip' > reason-js-20120623.zip.sha512
@@ -624,7 +625,6 @@
 echo 'd58177f3971b6d07baf6f81a2088ba371c7e43ea64ee7ada261da97c6d725b4bd4927122ac373c55383254e4e31691939276dab08a79a238bfa55172a3eff684  numpy-1.7.1.tar.gz' > numpy-1.7.1.tar.gz.sha512
 echo '9c0a61299779aff613131aaabbc255c8648f0fa7ab1806af53f19fbdcece0c8a68ddca7880d25b926d67ff1b9201954b207919fb09f6a290acb078e8bbed7b68  python-hglib-1.0.tar.gz' > python-hglib-1.0.tar.gz.sha512
 echo 'c65013293dd4049af5db009fdf7b6890a3c6b1e12dd588b58fb5f5a5fef7286935851fb7a530e03ea16f28de48b964e50f48bbf87d34545fd23b80dd4380476b  pyzmq-13.1.0.tar.gz' > pyzmq-13.1.0.tar.gz.sha512
-echo '172f2bc671145ebb0add2669c117863db35851fb3bdb192006cd710d4d038e0037497eb39a6d01091cb923f71a7e8982a77b6e80bf71d6275d5d83a363c8d7e5  rockstar-0.99.6.tar.gz' > rockstar-0.99.6.tar.gz.sha512
 echo '80c8e137c3ccba86575d4263e144ba2c4684b94b5cd620e200f094c92d4e118ea6a631d27bdb259b0869771dfaeeae68c0fdd37fdd740b9027ee185026e921d4  scipy-0.12.0.tar.gz' > scipy-0.12.0.tar.gz.sha512
 echo '96f3e51b46741450bc6b63779c10ebb4a7066860fe544385d64d1eda52592e376a589ef282ace2e1df73df61c10eab1a0d793abbdaf770e60289494d4bf3bcb4  sqlite-autoconf-3071700.tar.gz' > sqlite-autoconf-3071700.tar.gz.sha512
 echo '2992baa3edfb4e1842fb642abf0bf0fc0bf56fc183aab8fed6b3c42fbea928fa110ede7fdddea2d63fc5953e8d304b04da433dc811134fadefb1eecc326121b8  sympy-0.7.3.tar.gz' > sympy-0.7.3.tar.gz.sha512
@@ -657,7 +657,6 @@
 get_ytproject $NOSE.tar.gz
 get_ytproject $PYTHON_HGLIB.tar.gz
 get_ytproject $SYMPY.tar.gz
-get_ytproject $ROCKSTAR.tar.gz
 if [ $INST_BZLIB -eq 1 ]
 then
     if [ ! -e $BZLIB/done ]
@@ -816,6 +815,7 @@
         YT_DIR=`dirname $ORIG_PWD`
     elif [ ! -e yt-hg ]
     then
+        echo "Cloning yt"
         YT_DIR="$PWD/yt-hg/"
         ( ${HG_EXEC} --debug clone https://bitbucket.org/yt_analysis/yt-supplemental/ 2>&1 ) 1>> ${LOG_FILE}
         # Recently the hg server has had some issues with timeouts.  In lieu of
@@ -824,9 +824,9 @@
         ( ${HG_EXEC} --debug clone https://bitbucket.org/yt_analysis/yt/ ./yt-hg 2>&1 ) 1>> ${LOG_FILE}
         # Now we update to the branch we're interested in.
         ( ${HG_EXEC} -R ${YT_DIR} up -C ${BRANCH} 2>&1 ) 1>> ${LOG_FILE}
-    elif [ -e yt-3.0-hg ] 
+    elif [ -e yt-hg ]
     then
-        YT_DIR="$PWD/yt-3.0-hg/"
+        YT_DIR="$PWD/yt-hg/"
     fi
     echo Setting YT_DIR=${YT_DIR}
 fi
@@ -943,14 +943,19 @@
 # Now we build Rockstar and set its environment variable.
 if [ $INST_ROCKSTAR -eq 1 ]
 then
-    if [ ! -e Rockstar/done ]
+    if [ ! -e rockstar/done ]
     then
-        [ ! -e Rockstar ] && tar xfz $ROCKSTAR.tar.gz
         echo "Building Rockstar"
-        cd Rockstar
+        if [ ! -e rockstar ]
+        then
+            ( hg clone http://bitbucket.org/MatthewTurk/rockstar 2>&1 ) 1>> ${LOG_FILE}
+        fi
+        cd rockstar
+        ( hg pull 2>&1 ) 1>> ${LOG_FILE}
+        ( hg up -C tip 2>&1 ) 1>> ${LOG_FILE}
         ( make lib 2>&1 ) 1>> ${LOG_FILE} || do_exit
         cp librockstar.so ${DEST_DIR}/lib
-        ROCKSTAR_DIR=${DEST_DIR}/src/Rockstar
+        ROCKSTAR_DIR=${DEST_DIR}/src/rockstar
         echo $ROCKSTAR_DIR > ${YT_DIR}/rockstar.cfg
         touch done
         cd ..

diff -r 000b21eeddcffd4406a5938ab09bfca341bdf60c -r b7479711ae9f0dd2e8854713638419dd44f7622a doc/source/analyzing/analysis_modules/Halo_Analysis.ipynb
--- a/doc/source/analyzing/analysis_modules/Halo_Analysis.ipynb
+++ b/doc/source/analyzing/analysis_modules/Halo_Analysis.ipynb
@@ -36,10 +36,15 @@
      "input": [
       "from yt.mods import *\n",
       "from yt.analysis_modules.halo_analysis.api import *\n",
-      "path = ytcfg.get(\"yt\", \"test_data_dir\")\n",
+      "import tempfile\n",
+      "import shutil\n",
+      "import os\n",
+      "\n",
+      "# Create temporary directory for storing files\n",
+      "tmpdir = tempfile.mkdtemp()\n",
       "\n",
       "# Load the data set with the full simulation information\n",
-      "data_pf = load(path+'Enzo_64/RD0006/RedshiftOutput0006')"
+      "data_pf = load('Enzo_64/RD0006/RedshiftOutput0006')"
      ],
      "language": "python",
      "metadata": {},
@@ -57,7 +62,7 @@
      "collapsed": false,
      "input": [
       "# Load the rockstar data files\n",
-      "halos_pf = load(path+'rockstar_halos/halos_0.0.bin')"
+      "halos_pf = load('rockstar_halos/halos_0.0.bin')"
      ],
      "language": "python",
      "metadata": {},
@@ -76,7 +81,7 @@
      "input": [
       "# Instantiate a catalog using those two paramter files\n",
       "hc = HaloCatalog(data_pf=data_pf, halos_pf=halos_pf, \n",
-      "                 output_dir = path+'halo_catalog')"
+      "                 output_dir=os.path.join(tmpdir, 'halo_catalog'))"
      ],
      "language": "python",
      "metadata": {},
@@ -202,8 +207,8 @@
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "hc.add_callback('sphere', radius_field='radius_200', factor = 5,\n",
-      "        field_parameters = dict(virial_radius=('quantity','radius_200')))"
+      "hc.add_callback('sphere', radius_field='radius_200', factor=5,\n",
+      "                field_parameters=dict(virial_radius=('quantity', 'radius_200')))"
      ],
      "language": "python",
      "metadata": {},
@@ -221,9 +226,9 @@
      "collapsed": false,
      "input": [
       "hc.add_callback('profile', 'virial_radius', [('gas','temperature')],\n",
-      "        storage = 'virial_profiles',\n",
-      "        weight_field = 'cell_mass', \n",
-      "        accumulation=False, output_dir='profiles')\n"
+      "                storage='virial_profiles',\n",
+      "                weight_field='cell_mass', \n",
+      "                accumulation=False, output_dir='profiles')\n"
      ],
      "language": "python",
      "metadata": {},
@@ -290,9 +295,10 @@
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "halos_pf =  load(path+'halo_catalog/halo_catalog.0.h5')\n",
+      "halos_pf =  load(os.path.join(tmpdir, 'halo_catalog/halo_catalog.0.h5'))\n",
       "\n",
-      "hc_reloaded = HaloCatalog(halos_pf=halos_pf, output_dir=path+'halo_catalog')"
+      "hc_reloaded = HaloCatalog(halos_pf=halos_pf,\n",
+      "                          output_dir=os.path.join(tmpdir, 'halo_catalog'))"
      ],
      "language": "python",
      "metadata": {},
@@ -309,8 +315,8 @@
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "hc_reloaded.add_callback('load_profiles',storage='virial_profiles',\n",
-      "        output_dir='profiles')"
+      "hc_reloaded.add_callback('load_profiles', storage='virial_profiles',\n",
+      "                         output_dir='profiles')"
      ],
      "language": "python",
      "metadata": {},
@@ -362,7 +368,10 @@
       "halo = hc_reloaded.halo_list[0]\n",
       "\n",
       "radius = halo.virial_profiles['virial_radius']\n",
-      "temperature = halo.virial_profiles[u\"('gas', 'temperature')\"]"
+      "temperature = halo.virial_profiles[u\"('gas', 'temperature')\"]\n",
+      "\n",
+      "# Remove output files, that are no longer needed\n",
+      "shutil.rmtree(tmpdir)"
      ],
      "language": "python",
      "metadata": {},
@@ -382,7 +391,7 @@
       "%matplotlib inline\n",
       "import matplotlib.pyplot as plt\n",
       "\n",
-      "plt.plot(radius,temperature)\n",
+      "plt.plot(radius, temperature)\n",
       "\n",
       "plt.semilogy()\n",
       "plt.xlabel('$\\mathrm{R/R_{vir}}$')\n",

diff -r 000b21eeddcffd4406a5938ab09bfca341bdf60c -r b7479711ae9f0dd2e8854713638419dd44f7622a doc/source/analyzing/analysis_modules/PPVCube.ipynb
--- a/doc/source/analyzing/analysis_modules/PPVCube.ipynb
+++ b/doc/source/analyzing/analysis_modules/PPVCube.ipynb
@@ -1,7 +1,7 @@
 {
  "metadata": {
   "name": "",
-  "signature": "sha256:3a720e0a18272564522f9fc23553431908d6f2b4f3e3e7dfe5b3e690e2e37677"
+  "signature": "sha256:3f810954006851303837edb8fd85ee6583a883122b0f4867903562546c4f19d2"
  },
  "nbformat": 3,
  "nbformat_minor": 0,
@@ -16,6 +16,18 @@
      ]
     },
     {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "%matplotlib inline\n",
+      "from yt.mods import *\n",
+      "from yt.analysis_modules.api import PPVCube"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    },
+    {
      "cell_type": "markdown",
      "metadata": {},
      "source": [
@@ -44,30 +56,40 @@
      ]
     },
     {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "First, we'll set up the grid and the parameters of the profiles:"
+     ]
+    },
+    {
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "%matplotlib inline\n",
-      "from yt.mods import *\n",
-      "from yt.analysis_modules.api import PPVCube"
+      "nx,ny,nz = (256,256,256) # domain dimensions\n",
+      "R = 10. # outer radius of disk, kpc\n",
+      "r_0 = 3. # scale radius, kpc\n",
+      "beta = 1.4 # for the tangential velocity profile\n",
+      "alpha = -1. # for the radial density profile\n",
+      "x, y = np.mgrid[-R:R:nx*1j,-R:R:ny*1j] # cartesian coordinates of x-y plane of disk\n",
+      "r = np.sqrt(x*x+y*y) # polar coordinates\n",
+      "theta = np.arctan2(y, x) # polar coordinates"
      ],
      "language": "python",
      "metadata": {},
      "outputs": []
     },
     {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "Second, we'll construct the data arrays for the density and the velocity of the disk. Since we have the tangential velocity profile, we have to use the polar coordinates we derived earlier to compute `velx` and `vely`. Everywhere outside the disk, all fields are set to zero.  "
+     ]
+    },
+    {
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "data = {}\n",
-      "nx,ny,nz = (256,256,256)\n",
-      "R = 10. # kpc\n",
-      "r_0 = 3. # kpc\n",
-      "beta = 1.4\n",
-      "alpha = -1.\n",
-      "x, y = np.mgrid[-R:R:nx*1j,-R:R:ny*1j] # cartesian coordinates\n",
-      "r = np.sqrt(x*x+y*y) # polar coordinates\n",
-      "theta = np.arctan2(y, x) # polar coordinates\n",
       "dens = np.zeros((nx,ny,nz))\n",
       "dens[:,:,nz/2-3:nz/2+3] = (r**alpha).reshape(nx,ny,1) # the density profile of the disk\n",
       "vel_theta = r/(1.+(r/r_0)**beta) # the azimuthal velocity profile of the disk\n",
@@ -75,11 +97,31 @@
       "vely = np.zeros((nx,ny,nz))\n",
       "velx[:,:,nz/2-3:nz/2+3] = (-vel_theta*np.sin(theta)).reshape(nx,ny,1) # convert polar to cartesian\n",
       "vely[:,:,nz/2-3:nz/2+3] = (vel_theta*np.cos(theta)).reshape(nx,ny,1) # convert polar to cartesian\n",
+      "dens[r > R] = 0.0\n",
+      "velx[r > R] = 0.0\n",
+      "vely[r > R] = 0.0"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "Finally, we'll package these data arrays up into a dictionary, which will then be shipped off to `load_uniform_grid`. We'll define the width of the grid to be `2*R` kpc, which will be equal to 1  `code_length`. "
+     ]
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "data = {}\n",
       "data[\"density\"] = (dens,\"g/cm**3\")\n",
       "data[\"velocity_x\"] = (velx, \"km/s\")\n",
       "data[\"velocity_y\"] = (vely, \"km/s\")\n",
       "data[\"velocity_z\"] = (np.zeros((nx,ny,nz)), \"km/s\") # zero velocity in the z-direction\n",
-      "bbox = np.array([[-0.5,0.5],[-0.5,0.5],[-0.5,0.5]])\n",
+      "bbox = np.array([[-0.5,0.5],[-0.5,0.5],[-0.5,0.5]]) # bbox of width 1 on a side with center (0,0,0)\n",
       "ds = load_uniform_grid(data, (nx,ny,nz), length_unit=(2*R,\"kpc\"), nprocs=1, bbox=bbox)"
      ],
      "language": "python",
@@ -146,7 +188,7 @@
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "cube = PPVCube(ds, L, \"density\", dims=(200,100,50), velocity_bounds=(-0.5,0.5,\"km/s\"))"
+      "cube = PPVCube(ds, L, \"density\", dims=(200,100,50), velocity_bounds=(-1.5,1.5,\"km/s\"))"
      ],
      "language": "python",
      "metadata": {},
@@ -180,8 +222,18 @@
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "ds = load(\"cube.fits\")\n",
-      "slc = SlicePlot(ds, \"z\", [\"density\"], center=\"c\") # sliced at the center of the domain\n",
+      "pf = load(\"cube.fits\")"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "# Specifying no center gives us the center slice\n",
+      "slc = SlicePlot(pf, \"z\", [\"density\"])\n",
       "slc.show()"
      ],
      "language": "python",
@@ -192,19 +244,11 @@
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "# To figure out what the domain center and width is in pixel (code length) units:\n",
-      "print ds.domain_center\n",
-      "print ds.domain_width"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "slc = SlicePlot(ds, \"z\", [\"density\"], center=[100.5,50.5,-250.0]) # \"z\" slice is in m/s\n",
+      "import yt.units as u\n",
+      "# Picking different velocities for the slices\n",
+      "new_center = pf.domain_center\n",
+      "new_center[2] = pf.spec2pixel(-1.0*u.km/u.s)\n",
+      "slc = SlicePlot(pf, \"z\", [\"density\"], center=new_center)\n",
       "slc.show()"
      ],
      "language": "python",
@@ -215,7 +259,8 @@
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "slc = SlicePlot(ds, \"z\", [\"density\"], center=[100.5,50.5,300.0])\n",
+      "new_center[2] = pf.spec2pixel(0.7*u.km/u.s)\n",
+      "slc = SlicePlot(pf, \"z\", [\"density\"], center=new_center)\n",
       "slc.show()"
      ],
      "language": "python",
@@ -225,7 +270,31 @@
     {
      "cell_type": "code",
      "collapsed": false,
-     "input": [],
+     "input": [
+      "new_center[2] = pf.spec2pixel(-0.3*u.km/u.s)\n",
+      "slc = SlicePlot(pf, \"z\", [\"density\"], center=new_center)\n",
+      "slc.show()"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "If we project all the emission at all the different velocities along the z-axis, we recover the entire disk:"
+     ]
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "prj = ProjectionPlot(pf, \"z\", [\"density\"], proj_style=\"sum\")\n",
+      "prj.set_log(\"density\", True)\n",
+      "prj.set_zlim(\"density\", 1.0e-3, 0.2)\n",
+      "prj.show()"
+     ],
      "language": "python",
      "metadata": {},
      "outputs": []

diff -r 000b21eeddcffd4406a5938ab09bfca341bdf60c -r b7479711ae9f0dd2e8854713638419dd44f7622a doc/source/analyzing/analysis_modules/SZ_projections.ipynb
--- a/doc/source/analyzing/analysis_modules/SZ_projections.ipynb
+++ b/doc/source/analyzing/analysis_modules/SZ_projections.ipynb
@@ -1,7 +1,7 @@
 {
  "metadata": {
   "name": "",
-  "signature": "sha256:e5d3c629592c8aacbabf2e3fab2660703298886b8de6f36eb7cdc1f60b726496"
+  "signature": "sha256:7fc053480ba7896bfa5905bd69f7b3dd326364fbab324975b76f79640f2e0adf"
  },
  "nbformat": 3,
  "nbformat_minor": 0,
@@ -18,7 +18,7 @@
       "projection of the pressure field of a cluster. However, the *full* S-Z signal is a combination of thermal and kinetic\n",
       "contributions, and for large frequencies and high temperatures\n",
       "relativistic effects are important. For computing the full S-Z signal\n",
-      "incorporating all of these effects, Jens Chluba has written a library:\n",
+      "incorporating all of these effects, there is a library:\n",
       "SZpack ([Chluba et al 2012](http://adsabs.harvard.edu/abs/2012MNRAS.426..510C)). \n",
       "\n",
       "The `sunyaev_zeldovich` analysis module in `yt` makes it possible\n",
@@ -93,10 +93,10 @@
       "from yt.mods import *\n",
       "from yt.analysis_modules.api import SZProjection\n",
       "\n",
-      "pf = load(\"enzo_tiny_cosmology/DD0046/DD0046\")\n",
+      "ds = load(\"enzo_tiny_cosmology/DD0046/DD0046\")\n",
       "\n",
       "freqs = [90.,180.,240.]\n",
-      "szprj = SZProjection(pf, freqs)"
+      "szprj = SZProjection(ds, freqs)"
      ],
      "language": "python",
      "metadata": {},
@@ -108,8 +108,8 @@
      "source": [
       "`freqs` is a list or array of frequencies in GHz at which the signal\n",
       "is to be computed. The `SZProjection` constructor also accepts the\n",
-      "optional keywords, **mue** (mean molecular weight for computing the\n",
-      "electron number density, 1.143 is the default) and **high_order** (set\n",
+      "optional keywords, `mue` (mean molecular weight for computing the\n",
+      "electron number density, 1.143 is the default) and `high_order` (set\n",
       "to True to compute terms in the S-Z signal expansion up to\n",
       "second-order in $T_{e,SZ}$ and $\\beta$). "
      ]
@@ -127,7 +127,7 @@
      "collapsed": false,
      "input": [
       "# An on-axis projection along the z-axis with width 10 Mpc, centered on the gas density maximum\n",
-      "szprj.on_axis(\"z\", center=\"max\", width=(10.0, \"mpc\"), nx=400)"
+      "szprj.on_axis(\"z\", center=\"max\", width=(10.0, \"Mpc\"), nx=400)"
      ],
      "language": "python",
      "metadata": {},
@@ -144,7 +144,7 @@
       "which can be accessed dict-like from the projection object (e.g.,\n",
       "`szprj[\"90_GHz\"]`). Projections of other quantities may also be\n",
       "accessed; to see what fields are available call `szprj.keys()`. The methods also accept standard ``yt``\n",
-      "keywords for projections such as **center**, **width**, and **source**. The image buffer size can be controlled by setting **nx**.  \n"
+      "keywords for projections such as `center`, `width`, and `source`. The image buffer size can be controlled by setting `nx`.  \n"
      ]
     },
     {
@@ -216,8 +216,16 @@
      "source": [
       "which would write all of the projections to a single FITS file,\n",
       "including coordinate information in kpc. The optional keyword\n",
-      "**clobber** allows a previous file to be overwritten. \n"
+      "`clobber` allows a previous file to be overwritten. \n"
      ]
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
     }
    ],
    "metadata": {}

diff -r 000b21eeddcffd4406a5938ab09bfca341bdf60c -r b7479711ae9f0dd2e8854713638419dd44f7622a doc/source/analyzing/analysis_modules/ellipsoid_analysis.rst
--- a/doc/source/analyzing/analysis_modules/ellipsoid_analysis.rst
+++ b/doc/source/analyzing/analysis_modules/ellipsoid_analysis.rst
@@ -58,7 +58,7 @@
   from yt.mods import *
   from yt.analysis_modules.halo_finding.api import *
 
-  pf=load('RD0006/RD0006')
+  pf=load('Enzo_64/RD0006/RedshiftOutput0006')
   halo_list = parallelHF(pf)
   halo_list.dump('MyHaloList')
 
@@ -69,7 +69,7 @@
   from yt.mods import *
   from yt.analysis_modules.halo_finding.api import *
 
-  pf=load('RD0006/RD0006')
+  pf=load('Enzo_64/RD0006/RedshiftOutput0006')
   haloes = LoadHaloes(pf, 'MyHaloList')
 
 Once the halo information is saved you can load it into the data

diff -r 000b21eeddcffd4406a5938ab09bfca341bdf60c -r b7479711ae9f0dd2e8854713638419dd44f7622a doc/source/analyzing/analysis_modules/halo_catalogs.rst
--- a/doc/source/analyzing/analysis_modules/halo_catalogs.rst
+++ b/doc/source/analyzing/analysis_modules/halo_catalogs.rst
@@ -226,4 +226,4 @@
 =======
 
 For a full example of how to use these methods together see 
-:ref:`halo_analysis_example`.
+:doc:`halo_analysis_example`.

diff -r 000b21eeddcffd4406a5938ab09bfca341bdf60c -r b7479711ae9f0dd2e8854713638419dd44f7622a doc/source/analyzing/analysis_modules/photon_simulator.rst
--- a/doc/source/analyzing/analysis_modules/photon_simulator.rst
+++ b/doc/source/analyzing/analysis_modules/photon_simulator.rst
@@ -1,6 +1,11 @@
 Constructing Mock X-ray Observations
 ------------------------------------
 
+.. note::
+
+  If you just want to create derived fields for X-ray emission,
+  you should go `here <xray_emission_fields.html>`_ instead.
+
 The ``photon_simulator`` analysis module enables the creation of
 simulated X-ray photon lists of events from datasets that ``yt`` is able
 to read. The simulated events then can be exported to X-ray telescope
@@ -36,20 +41,20 @@
 .. code:: python
 
     from yt.mods import *
-    from yt.analysis_modules.api import *
+    from yt.analysis_modules.photon_simulator.api import *
     from yt.utilities.cosmology import Cosmology
 
 We're going to load up an Athena dataset of a galaxy cluster core:
 
 .. code:: python
 
-    pf = load("MHDSloshing/virgo_low_res.0054.vtk", 
-              parameters={"TimeUnits":3.1557e13,
-                          "LengthUnits":3.0856e24,
-                          "DensityUnits":6.770424595218825e-27})
+    pf = load("MHDSloshing/virgo_low_res.0054.vtk",
+              parameters={"time_unit":(1.0,"Myr"),
+                          "length_unit":(1.0,"Mpc"),
+                          "mass_unit":(1.0e14,"Msun")}) 
 
 First, to get a sense of what the resulting image will look like, let's
-make a new ``yt`` field called ``"DensitySquared"``, since the X-ray
+make a new ``yt`` field called ``"density_squared"``, since the X-ray
 emission is proportional to :math:`\rho^2`, and a weak function of
 temperature and metallicity.
 
@@ -57,14 +62,14 @@
 
     def _density_squared(field, data):
         return data["density"]**2
-    add_field("DensitySquared", function=_density_squared)
+    add_field("density_squared", function=_density_squared)
 
 Then we'll project this field along the z-axis.
 
 .. code:: python
 
-    prj = ProjectionPlot(pf, "z", ["DensitySquared"], width=(500., "kpc"))
-    prj.set_cmap("DensitySquared", "gray_r")
+    prj = ProjectionPlot(ds, "z", ["density_squared"], width=(500., "kpc"))
+    prj.set_cmap("density_squared", "gray_r")
     prj.show()
 
 .. image:: _images/dsquared.png
@@ -89,7 +94,7 @@
 
 .. code:: python
 
-    sp = pf.sphere("c", (250., "kpc"))
+    sp = ds.sphere("c", (250., "kpc"))
 
 This will serve as our ``data_source`` that we will use later. Next, we
 need to create the ``SpectralModel`` instance that will determine how
@@ -258,11 +263,6 @@
     events = photons.project_photons(L, exp_time_new=2.0e5, redshift_new=0.07, absorb_model=abs_model,
                                      sky_center=(187.5,12.333), responses=[ARF,RMF])
 
-.. parsed-literal::
-
-    WARNING:yt:This routine has not been tested to work with all RMFs. YMMV.
-
-
 Also, the optional keyword ``psf_sigma`` specifies a Gaussian standard
 deviation to scatter the photon sky positions around with, providing a
 crude representation of a PSF.
@@ -282,17 +282,17 @@
 
 .. code:: python
 
-    {'eobs': array([  0.32086522,   0.32271389,   0.32562708, ...,   8.90600621,
-             9.73534237,  10.21614256]), 
-     'xsky': array([ 187.5177707 ,  187.4887825 ,  187.50733609, ...,  187.5059345 ,
-            187.49897546,  187.47307048]), 
-     'ysky': array([ 12.33519996,  12.3544496 ,  12.32750903, ...,  12.34907707,
-            12.33327653,  12.32955225]), 
-     'ypix': array([ 133.85374195,  180.68583074,  115.14110561, ...,  167.61447493,
-            129.17278711,  120.11508562]), 
+    {'eobs': YTArray([  0.32086522,   0.32271389,   0.32562708, ...,   8.90600621,
+             9.73534237,  10.21614256]) keV, 
+     'xsky': YTArray([ 187.5177707 ,  187.4887825 ,  187.50733609, ...,  187.5059345 ,
+            187.49897546,  187.47307048]) degree, 
+     'ysky': YTArray([ 12.33519996,  12.3544496 ,  12.32750903, ...,  12.34907707,
+            12.33327653,  12.32955225]) degree, 
+     'ypix': YTArray([ 133.85374195,  180.68583074,  115.14110561, ...,  167.61447493,
+            129.17278711,  120.11508562]) (dimensionless), 
      'PI': array([ 27,  15,  25, ..., 609, 611, 672]), 
-     'xpix': array([  86.26331108,  155.15934197,  111.06337043, ...,  114.39586907,
-            130.93509652,  192.50639633])}
+     'xpix': YTArray([  86.26331108,  155.15934197,  111.06337043, ...,  114.39586907,
+            130.93509652,  192.50639633]) (dimensionless)}
 
 
 We can bin up the events into an image and save it to a FITS file. The
@@ -436,7 +436,7 @@
 
    bbox = np.array([[-0.5,0.5],[-0.5,0.5],[-0.5,0.5]])
 
-   pf = load_uniform_grid(data, ddims, 2*R*cm_per_kpc, bbox=bbox)
+   ds = load_uniform_grid(data, ddims, 2*R*cm_per_kpc, bbox=bbox)
 
 where for simplicity we have set the velocities to zero, though we
 could have created a realistic velocity field as well. Now, we
@@ -445,7 +445,7 @@
 
 .. code:: python
 
-   sphere = pf.sphere(pf.domain_center, 1.0/pf["mpc"])
+   sphere = ds.sphere(pf.domain_center, (1.0,"Mpc"))
        
    A = 6000.
    exp_time = 2.0e5

diff -r 000b21eeddcffd4406a5938ab09bfca341bdf60c -r b7479711ae9f0dd2e8854713638419dd44f7622a doc/source/analyzing/analysis_modules/xray_emission_fields.rst
--- a/doc/source/analyzing/analysis_modules/xray_emission_fields.rst
+++ b/doc/source/analyzing/analysis_modules/xray_emission_fields.rst
@@ -2,41 +2,46 @@
 
 X-ray Emission Fields
 =====================
-.. sectionauthor:: Britton Smith <brittonsmith at gmail.com>
+.. sectionauthor:: Britton Smith <brittonsmith at gmail.com>, John ZuHone <jzuhone at gmail.com>
+
+.. note::
+
+  If you came here trying to figure out how to create simulated X-ray photons and observations,
+  you should go `here <photon_simulator.html>`_ instead.
 
 This functionality provides the ability to create metallicity-dependent 
-X-ray luminosity, emissivity, and photo emissivity fields for a given 
+X-ray luminosity, emissivity, and photon emissivity fields for a given
 photon energy range.  This works by interpolating from emission tables 
-created with the photoionization code, `Cloudy <http://nublado.org/>`_.  
-If you installed yt with the install script, the data should be located in 
-the *data* directory inside the installation directory.  Emission fields can 
-be made for any interval between 0.1 keV and 100 keV.
+created from the photoionization code `Cloudy <http://nublado.org/>`_ or
+the collisional ionization database `AtomDB <http://www.atomdb.org>`_. If
+you installed yt with the install script, these data files should be located in
+the *data* directory inside the installation directory, or can be downloaded
+from `<http://yt-project.org/data>`_. Emission fields can be made for any
+interval between 0.1 keV and 100 keV.
 
 Adding Emission Fields
 ----------------------
 
-Fields can be created for luminosity (erg/s), emissivity (erg/s/cm^3), 
-and photon emissivity (photons/s/cm^3).  The only required arguments are 
-the minimum and maximum energies.
+Fields will be created for luminosity :math:`{\rm (erg~s^{-1})}`, emissivity :math:`{\rm (erg~s^{-1}~cm^{-3})}`,
+and photon emissivity :math:`{\rm (photons~s^{-1}~cm^{-3})}`.  The only required arguments are the
+dataset object, and the minimum and maximum energies of the energy band.
 
 .. code-block:: python
 
-  from yt.mods import *
+  import yt
   from yt.analysis_modules.spectral_integrator.api import \
-       add_xray_luminosity_field, \
-       add_xray_emissivity_field, \
-       add_xray_photon_emissivity_field
+       add_xray_emissivity_field
 
-  add_xray_luminosity_field(0.5, 7)
-  add_xray_emissivity_field(0.5, 7)
-  add_xray_photon_emissivity_field(0.5, 7)
+  xray_fields = add_xray_emissivity_field(0.5, 7.0)
 
 Additional keyword arguments are:
 
- * **filename**  (*string*): Path to data file containing emissivity 
-   values.  If None, a file called xray_emissivity.h5 is used.  This file 
-   contains emissivity tables for primordial elements and for metals at 
-   solar metallicity for the energy range 0.1 to 100 keV.  Default: None.
+ * **filename** (*string*): Path to data file containing emissivity values. If None,
+   a file called "cloudy_emissivity.h5" is used, for photoionized plasmas. A second
+   option, for collisionally ionized plasmas, is in the file "apec_emissivity.h5",
+   available at http://yt-project.org/data. These files contain emissivity tables
+   for primordial elements and for metals at solar metallicity for the energy range
+   0.1 to 100 keV. Default: None.
 
  * **with_metals** (*bool*): If True, use the metallicity field to add the 
    contribution from metals.  If False, only the emission from H/He is 
@@ -46,24 +51,27 @@
    metallicity for the emission from metals.  The *with_metals* keyword 
    must be set to False to use this.  Default: None.
 
-The resulting fields can be used like all normal fields.
+The resulting fields can be used like all normal fields. The function will return the names of
+the created fields in a Python list.
 
-.. python-script::
+.. code-block:: python
 
-  from yt.mods import *
+  import yt
   from yt.analysis_modules.spectral_integrator.api import \
-       add_xray_luminosity_field, \
-       add_xray_emissivity_field, \
-       add_xray_photon_emissivity_field
+       add_xray_emissivity_field
 
-  add_xray_luminosity_field(0.5, 7)
-  add_xray_emissivity_field(0.5, 7)
-  add_xray_photon_emissivity_field(0.5, 7)
+  xray_fields = add_xray_emissivity_field(0.5, 7.0, filename="apec_emissivity.h5")
 
-  pf = load("enzo_tiny_cosmology/DD0046/DD0046")
-  plot = SlicePlot(pf, 'x', 'Xray_Luminosity_0.5_7keV')
+  ds = yt.load("enzo_tiny_cosmology/DD0046/DD0046")
+  plot = yt.SlicePlot(ds, 'x', 'xray_luminosity_0.5_7.0_keV')
   plot.save()
-  plot = ProjectionPlot(pf, 'x', 'Xray_Emissivity_0.5_7keV')
+  plot = yt.ProjectionPlot(ds, 'x', 'xray_emissivity_0.5_7.0_keV')
   plot.save()
-  plot = ProjectionPlot(pf, 'x', 'Xray_Photon_Emissivity_0.5_7keV')
+  plot = yt.ProjectionPlot(ds, 'x', 'xray_photon_emissivity_0.5_7.0_keV')
   plot.save()
+
+.. warning::
+
+  The X-ray fields depend on the number density of hydrogen atoms, in the yt field
+  ``H_number_density``. If this field is not defined (either in the dataset or by the user),
+  the primordial hydrogen mass fraction (X = 0.76) will be used to construct it.
\ No newline at end of file

diff -r 000b21eeddcffd4406a5938ab09bfca341bdf60c -r b7479711ae9f0dd2e8854713638419dd44f7622a doc/source/analyzing/objects.rst
--- a/doc/source/analyzing/objects.rst
+++ b/doc/source/analyzing/objects.rst
@@ -236,7 +236,7 @@
 -------------------------------
 
 Data objects can be cut by their field values using the ``cut_region`` 
-method.  For example, this could be used to compute the total mass within 
+method.  For example, this could be used to compute the total gas mass within
 a certain temperature range, as in the following example.
 
 .. notebook-cell::
@@ -244,11 +244,11 @@
    from yt.mods import *
    ds = load("enzo_tiny_cosmology/DD0046/DD0046")
    ad = ds.all_data()
-   total_mass = ad.quantities.total_mass()
+   total_mass = ad.quantities.total_quantity('cell_mass')
    # now select only gas with 1e5 K < T < 1e7 K.
    new_region = ad.cut_region(['obj["temperature"] > 1e5',
                                'obj["temperature"] < 1e7'])
-   cut_mass = new_region.quantities.total_mass()
+   cut_mass = new_region.quantities.total_quantity('cell_mass')
    print "The fraction of mass in this temperature range is %f." % \
      (cut_mass / total_mass)
 

diff -r 000b21eeddcffd4406a5938ab09bfca341bdf60c -r b7479711ae9f0dd2e8854713638419dd44f7622a doc/source/analyzing/units/2)_Data_Selection_and_fields.ipynb
--- a/doc/source/analyzing/units/2)_Data_Selection_and_fields.ipynb
+++ b/doc/source/analyzing/units/2)_Data_Selection_and_fields.ipynb
@@ -1,7 +1,7 @@
 {
  "metadata": {
   "name": "",
-  "signature": "sha256:b7541e0167001c6dd74306c8490385ace7bdb0533a829286f0505c0b24c67f16"
+  "signature": "sha256:882b31591c60bfe6ad4cb0f8842953d2e94fb8a12ce742be831a65642eea72c9"
  },
  "nbformat": 3,
  "nbformat_minor": 0,
@@ -325,8 +325,7 @@
      "input": [
       "from astropy import units as u\n",
       "x = 42.0 * u.meter\n",
-      "y = YTQuantity(x)\n",
-      "y2 = YTQuantity.from_astropy(x) # Another way to create the quantity"
+      "y = YTQuantity.from_astropy(x) "
      ],
      "language": "python",
      "metadata": {},
@@ -337,8 +336,7 @@
      "collapsed": false,
      "input": [
       "print x, type(x)\n",
-      "print y, type(y)\n",
-      "print y2, type(y2)"
+      "print y, type(y)"
      ],
      "language": "python",
      "metadata": {},
@@ -349,8 +347,7 @@
      "collapsed": false,
      "input": [
       "a = np.random.random(size=10) * u.km/u.s\n",
-      "b = YTArray(a)\n",
-      "b2 = YTArray.from_astropy(a) # Another way to create the quantity"
+      "b = YTArray.from_astropy(a)"
      ],
      "language": "python",
      "metadata": {},
@@ -361,8 +358,7 @@
      "collapsed": false,
      "input": [
       "print a, type(a)\n",
-      "print b, type(b)\n",
-      "print b2, type(b2)"
+      "print b, type(b)"
      ],
      "language": "python",
      "metadata": {},
@@ -438,7 +434,7 @@
      "collapsed": false,
      "input": [
       "k1 = kboltz.to_astropy()\n",
-      "k2 = YTQuantity(kb)\n",
+      "k2 = YTQuantity.from_astropy(kb)\n",
       "print k1 == k2"
      ],
      "language": "python",
@@ -449,7 +445,7 @@
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "c = YTArray(a)\n",
+      "c = YTArray.from_astropy(a)\n",
       "d = c.to_astropy()\n",
       "print a == d"
      ],

diff -r 000b21eeddcffd4406a5938ab09bfca341bdf60c -r b7479711ae9f0dd2e8854713638419dd44f7622a doc/source/bootcamp/1)_Introduction.ipynb
--- a/doc/source/bootcamp/1)_Introduction.ipynb
+++ b/doc/source/bootcamp/1)_Introduction.ipynb
@@ -1,6 +1,7 @@
 {
  "metadata": {
-  "name": ""
+  "name": "",
+  "signature": "sha256:39620670ce7751b23f30d2123fd3598de1c7843331f65de13e29f4ae9f759e0f"
  },
  "nbformat": 3,
  "nbformat_minor": 0,
@@ -32,9 +33,40 @@
       "5. Derived Fields and Profiles (IsolatedGalaxy dataset)\n",
       "6. Volume Rendering (IsolatedGalaxy dataset)"
      ]
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "The following code will download the data needed for this tutorial automatically using `curl`. It may take some time so please wait when the kernel is busy. You will need to set `download_datasets` to True before using it."
+     ]
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "download_datasets = False\n",
+      "if download_datasets:\n",
+      "    !curl -sSO http://yt-project.org/data/enzo_tiny_cosmology.tar\n",
+      "    print \"Got enzo_tiny_cosmology\"\n",
+      "    !tar xf enzo_tiny_cosmology.tar\n",
+      "    \n",
+      "    !curl -sSO http://yt-project.org/data/Enzo_64.tar\n",
+      "    print \"Got Enzo_64\"\n",
+      "    !tar xf Enzo_64.tar\n",
+      "    \n",
+      "    !curl -sSO http://yt-project.org/data/IsolatedGalaxy.tar\n",
+      "    print \"Got IsolatedGalaxy\"\n",
+      "    !tar xf IsolatedGalaxy.tar\n",
+      "    \n",
+      "    print \"All done!\""
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
     }
    ],
    "metadata": {}
   }
  ]
-}
+}
\ No newline at end of file

diff -r 000b21eeddcffd4406a5938ab09bfca341bdf60c -r b7479711ae9f0dd2e8854713638419dd44f7622a doc/source/bootcamp/2)_Data_Inspection.ipynb
--- a/doc/source/bootcamp/2)_Data_Inspection.ipynb
+++ b/doc/source/bootcamp/2)_Data_Inspection.ipynb
@@ -1,7 +1,7 @@
 {
  "metadata": {
   "name": "",
-  "signature": "sha256:15cdc35ddb8b1b938967237e17534149f734f4e7a61ebd37d74b675f8059da20"
+  "signature": "sha256:9d67e9e4ca5ce92dcd0658025dbfbd28be47b47ca8d4531fdac16cc2c2fa038b"
  },
  "nbformat": 3,
  "nbformat_minor": 0,
@@ -21,7 +21,7 @@
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "from yt.mods import *"
+      "import yt"
      ],
      "language": "python",
      "metadata": {},
@@ -38,7 +38,7 @@
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "ds = load(\"IsolatedGalaxy/galaxy0030/galaxy0030\")"
+      "ds = yt.load(\"IsolatedGalaxy/galaxy0030/galaxy0030\")"
      ],
      "language": "python",
      "metadata": {},

diff -r 000b21eeddcffd4406a5938ab09bfca341bdf60c -r b7479711ae9f0dd2e8854713638419dd44f7622a doc/source/bootcamp/3)_Simple_Visualization.ipynb
--- a/doc/source/bootcamp/3)_Simple_Visualization.ipynb
+++ b/doc/source/bootcamp/3)_Simple_Visualization.ipynb
@@ -1,7 +1,7 @@
 {
  "metadata": {
   "name": "",
-  "signature": "sha256:eb5fbf5eb55a9c8997c687f072c8c6030e74bef0048a72b4f74a06893c11b80a"
+  "signature": "sha256:c00ba7fdbbd9ea957d06060ad70f06f629b1fd4ebf5379c1fdad2697ab0a4cd6"
  },
  "nbformat": 3,
  "nbformat_minor": 0,
@@ -21,7 +21,7 @@
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "from yt.mods import *"
+      "import yt"
      ],
      "language": "python",
      "metadata": {},
@@ -38,7 +38,7 @@
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "ds = load(\"enzo_tiny_cosmology/DD0046/DD0046\")\n",
+      "ds = yt.load(\"enzo_tiny_cosmology/DD0046/DD0046\")\n",
       "print \"Redshift =\", ds.current_redshift"
      ],
      "language": "python",
@@ -58,7 +58,7 @@
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "p = ProjectionPlot(ds, \"y\", \"density\")\n",
+      "p = yt.ProjectionPlot(ds, \"y\", \"density\")\n",
       "p.show()"
      ],
      "language": "python",
@@ -135,7 +135,7 @@
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "p = ProjectionPlot(ds, \"z\", [\"density\", \"temperature\"], weight_field=\"density\")\n",
+      "p = yt.ProjectionPlot(ds, \"z\", [\"density\", \"temperature\"], weight_field=\"density\")\n",
       "p.show()"
      ],
      "language": "python",
@@ -189,8 +189,8 @@
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "ds = load(\"Enzo_64/DD0043/data0043\")\n",
-      "s = SlicePlot(ds, \"z\", [\"density\", \"velocity_magnitude\"], center=\"max\")\n",
+      "ds = yt.load(\"Enzo_64/DD0043/data0043\")\n",
+      "s = yt.SlicePlot(ds, \"z\", [\"density\", \"velocity_magnitude\"], center=\"max\")\n",
       "s.set_cmap(\"velocity_magnitude\", \"kamae\")\n",
       "s.zoom(10.0)"
      ],
@@ -243,7 +243,7 @@
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "s = SlicePlot(ds, \"x\", [\"density\"], center=\"max\")\n",
+      "s = yt.SlicePlot(ds, \"x\", [\"density\"], center=\"max\")\n",
       "s.annotate_contour(\"temperature\")\n",
       "s.zoom(2.5)"
      ],
@@ -272,4 +272,4 @@
    "metadata": {}
   }
  ]
-}
+}
\ No newline at end of file

diff -r 000b21eeddcffd4406a5938ab09bfca341bdf60c -r b7479711ae9f0dd2e8854713638419dd44f7622a doc/source/bootcamp/4)_Data_Objects_and_Time_Series.ipynb
--- a/doc/source/bootcamp/4)_Data_Objects_and_Time_Series.ipynb
+++ b/doc/source/bootcamp/4)_Data_Objects_and_Time_Series.ipynb
@@ -1,7 +1,7 @@
 {
  "metadata": {
   "name": "",
-  "signature": "sha256:41293a66cd6fd5eae6da2d0343549144dc53d72e83286999faab3cf21d801f51"
+  "signature": "sha256:a46e1baa90d32045c2b524100f28bad41b3665249612c9a275ee0375a6f4be20"
  },
  "nbformat": 3,
  "nbformat_minor": 0,
@@ -22,8 +22,10 @@
      "collapsed": false,
      "input": [
       "%matplotlib inline\n",
-      "from yt.mods import *\n",
-      "from matplotlib import pylab"
+      "import yt\n",
+      "import numpy as np\n",
+      "from matplotlib import pylab\n",
+      "from yt.analysis_modules.halo_finding.api import HaloFinder"
      ],
      "language": "python",
      "metadata": {},
@@ -44,7 +46,7 @@
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "ts = DatasetSeries(\"enzo_tiny_cosmology/*/*.hierarchy\")"
+      "ts = yt.DatasetSeries(\"enzo_tiny_cosmology/*/*.hierarchy\")"
      ],
      "language": "python",
      "metadata": {},
@@ -86,8 +88,13 @@
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "pylab.semilogy(times, rho_ex[:,0], '-xk')\n",
-      "pylab.semilogy(times, rho_ex[:,1], '-xr')"
+      "pylab.semilogy(times, rho_ex[:,0], '-xk', label='Minimum')\n",
+      "pylab.semilogy(times, rho_ex[:,1], '-xr', label='Maximum')\n",
+      "pylab.ylabel(\"Density ($g/cm^3$)\")\n",
+      "pylab.xlabel(\"Time (Gyr)\")\n",
+      "pylab.legend()\n",
+      "pylab.ylim(1e-32, 1e-21)\n",
+      "pylab.show()"
      ],
      "language": "python",
      "metadata": {},
@@ -108,13 +115,15 @@
      "cell_type": "code",
      "collapsed": false,
      "input": [
+      "from yt.units import Msun\n",
+      "\n",
       "mass = []\n",
       "zs = []\n",
       "for ds in ts:\n",
       "    halos = HaloFinder(ds)\n",
       "    dd = ds.all_data()\n",
       "    total_mass = dd.quantities.total_quantity(\"cell_mass\").in_units(\"Msun\")\n",
-      "    total_in_baryons = 0.0\n",
+      "    total_in_baryons = 0.0*Msun\n",
       "    for halo in halos:\n",
       "        sp = halo.get_sphere()\n",
       "        total_in_baryons += sp.quantities.total_quantity(\"cell_mass\").in_units(\"Msun\")\n",
@@ -136,7 +145,11 @@
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "pylab.loglog(zs, mass, '-xb')"
+      "pylab.semilogx(zs, mass, '-xb')\n",
+      "pylab.xlabel(\"Redshift\")\n",
+      "pylab.ylabel(\"Mass in halos / Total mass\")\n",
+      "pylab.xlim(max(zs), min(zs))\n",
+      "pylab.ylim(-0.01, .18)"
      ],
      "language": "python",
      "metadata": {},
@@ -154,7 +167,9 @@
       "\n",
       "yt provides the ability to examine rays, or lines, through the domain.  Note that these are not periodic, unlike most other data objects.  We create a ray object and can then examine quantities of it.  Rays have the special fields `t` and `dts`, which correspond to the time the ray enters a given cell and the distance it travels through that cell.\n",
       "\n",
-      "To create a ray, we specify the start and end points."
+      "To create a ray, we specify the start and end points.\n",
+      "\n",
+      "Note that we need to convert these arrays to numpy arrays due to a bug in matplotlib 1.3.1."
      ]
     },
     {
@@ -162,7 +177,7 @@
      "collapsed": false,
      "input": [
       "ray = ds.ray([0.1, 0.2, 0.3], [0.9, 0.8, 0.7])\n",
-      "pylab.semilogy(ray[\"t\"], ray[\"density\"])"
+      "pylab.semilogy(np.array(ray[\"t\"]), np.array(ray[\"density\"]))"
      ],
      "language": "python",
      "metadata": {},
@@ -211,10 +226,12 @@
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "ds = load(\"IsolatedGalaxy/galaxy0030/galaxy0030\")\n",
+      "ds = yt.load(\"IsolatedGalaxy/galaxy0030/galaxy0030\")\n",
       "v, c = ds.find_max(\"density\")\n",
       "sl = ds.slice(0, c[0])\n",
-      "print sl[\"index\", \"x\"], sl[\"index\", \"z\"], sl[\"pdx\"]\n",
+      "print sl[\"index\", \"x\"]\n",
+      "print sl[\"index\", \"z\"]\n",
+      "print sl[\"pdx\"]\n",
       "print sl[\"gas\", \"density\"].shape"
      ],
      "language": "python",
@@ -250,8 +267,8 @@
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "write_image(np.log10(frb[\"gas\", \"density\"]), \"temp.png\")\n",
-      "from IPython.core.display import Image\n",
+      "yt.write_image(np.log10(frb[\"gas\", \"density\"]), \"temp.png\")\n",
+      "from IPython.display import Image\n",
       "Image(filename = \"temp.png\")"
      ],
      "language": "python",
@@ -274,7 +291,7 @@
      "collapsed": false,
      "input": [
       "cp = ds.cutting([0.2, 0.3, 0.5], \"max\")\n",
-      "pw = cp.to_pw(fields = [\"density\"])"
+      "pw = cp.to_pw(fields = [(\"gas\", \"density\")])"
      ],
      "language": "python",
      "metadata": {},
@@ -309,7 +326,8 @@
      "collapsed": false,
      "input": [
       "pws = sl.to_pw(fields=[\"density\"])\n",
-      "pws.show()"
+      "#pws.show()\n",
+      "print pws.plots.keys()"
      ],
      "language": "python",
      "metadata": {},
@@ -361,4 +379,4 @@
    "metadata": {}
   }
  ]
-}
+}
\ No newline at end of file

diff -r 000b21eeddcffd4406a5938ab09bfca341bdf60c -r b7479711ae9f0dd2e8854713638419dd44f7622a doc/source/bootcamp/5)_Derived_Fields_and_Profiles.ipynb
--- a/doc/source/bootcamp/5)_Derived_Fields_and_Profiles.ipynb
+++ b/doc/source/bootcamp/5)_Derived_Fields_and_Profiles.ipynb
@@ -1,7 +1,7 @@
 {
  "metadata": {
   "name": "",
-  "signature": "sha256:a19d451f3b4dcfeed448caa22c2cac35c46958e0646c19c226b1e467b76d0718"
+  "signature": "sha256:eca573e749829cacda0a8c07c6d5d11d07a5de657563a44b8c4ffff8f735caed"
  },
  "nbformat": 3,
  "nbformat_minor": 0,
@@ -22,7 +22,9 @@
      "collapsed": false,
      "input": [
       "%matplotlib inline\n",
-      "from yt.mods import *\n",
+      "import yt\n",
+      "import numpy as np\n",
+      "from yt import derived_field\n",
       "from matplotlib import pylab"
      ],
      "language": "python",
@@ -61,7 +63,7 @@
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "ds = load(\"IsolatedGalaxy/galaxy0030/galaxy0030\")\n",
+      "ds = yt.load(\"IsolatedGalaxy/galaxy0030/galaxy0030\")\n",
       "dd = ds.all_data()\n",
       "print dd.quantities.keys()"
      ],
@@ -120,7 +122,9 @@
       "bv = sp.quantities.bulk_velocity()\n",
       "L = sp.quantities.angular_momentum_vector()\n",
       "rho_min, rho_max = sp.quantities.extrema(\"density\")\n",
-      "print bv, L, rho_min, rho_max"
+      "print bv\n",
+      "print L\n",
+      "print rho_min, rho_max"
      ],
      "language": "python",
      "metadata": {},
@@ -143,9 +147,11 @@
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "prof = Profile1D(sp, \"density\", 32, rho_min, rho_max, True, weight_field=\"cell_mass\")\n",
+      "prof = yt.Profile1D(sp, \"density\", 32, rho_min, rho_max, True, weight_field=\"cell_mass\")\n",
       "prof.add_fields([\"temperature\",\"dinosaurs\"])\n",
-      "pylab.loglog(np.array(prof.x), np.array(prof[\"temperature\"]), \"-x\")"
+      "pylab.loglog(np.array(prof.x), np.array(prof[\"temperature\"]), \"-x\")\n",
+      "pylab.xlabel('Density $(g/cm^3)$')\n",
+      "pylab.ylabel('Temperature $(K)$')"
      ],
      "language": "python",
      "metadata": {},
@@ -162,7 +168,9 @@
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "pylab.loglog(np.array(prof.x), np.array(prof[\"dinosaurs\"]), '-x')"
+      "pylab.loglog(np.array(prof.x), np.array(prof[\"dinosaurs\"]), '-x')\n",
+      "pylab.xlabel('Density $(g/cm^3)$')\n",
+      "pylab.ylabel('Dinosaurs $(K cm / s)$')"
      ],
      "language": "python",
      "metadata": {},
@@ -179,9 +187,30 @@
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "prof = Profile1D(sp, \"density\", 32, rho_min, rho_max, True, weight_field=None)\n",
+      "prof = yt.Profile1D(sp, \"density\", 32, rho_min, rho_max, True, weight_field=None)\n",
       "prof.add_fields([\"cell_mass\"])\n",
-      "pylab.loglog(np.array(prof.x), np.array(prof[\"cell_mass\"].in_units(\"Msun\")), '-x')"
+      "pylab.loglog(np.array(prof.x), np.array(prof[\"cell_mass\"].in_units(\"Msun\")), '-x')\n",
+      "pylab.xlabel('Density $(g/cm^3)$')\n",
+      "pylab.ylabel('Cell mass $(M_\\odot)$')"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "In addition to the low-level `ProfileND` interface, it's also quite straightforward to quickly create plots of profiles using the `ProfilePlot` class.  Let's redo the last plot using `ProfilePlot`"
+     ]
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "prof = yt.ProfilePlot(sp, 'density', 'cell_mass', weight_field=None)\n",
+      "prof.set_unit('cell_mass', 'Msun')\n",
+      "prof.show()"
      ],
      "language": "python",
      "metadata": {},

diff -r 000b21eeddcffd4406a5938ab09bfca341bdf60c -r b7479711ae9f0dd2e8854713638419dd44f7622a doc/source/bootcamp/6)_Volume_Rendering.ipynb
--- a/doc/source/bootcamp/6)_Volume_Rendering.ipynb
+++ b/doc/source/bootcamp/6)_Volume_Rendering.ipynb
@@ -1,7 +1,7 @@
 {
  "metadata": {
   "name": "",
-  "signature": "sha256:2929940fc3977b495aa124dee851f7602d61e073ed65407dd95e7cf597684b35"
+  "signature": "sha256:2a24bbe82955f9d948b39cbd1b1302968ff57f62f73afb2c7a5c4953393d00ae"
  },
  "nbformat": 3,
  "nbformat_minor": 0,
@@ -21,8 +21,8 @@
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "from yt.mods import *\n",
-      "ds = load(\"IsolatedGalaxy/galaxy0030/galaxy0030\")"
+      "import yt\n",
+      "ds = yt.load(\"IsolatedGalaxy/galaxy0030/galaxy0030\")"
      ],
      "language": "python",
      "metadata": {},
@@ -43,7 +43,7 @@
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "tf = ColorTransferFunction((-28, -24))\n",
+      "tf = yt.ColorTransferFunction((-28, -24))\n",
       "tf.add_layers(4, w=0.01)\n",
       "cam = ds.camera([0.5, 0.5, 0.5], [1.0, 1.0, 1.0], (20, 'kpc'), 512, tf, fields=[\"density\"])\n",
       "cam.show()"
@@ -80,7 +80,7 @@
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "tf = ColorTransferFunction((-28, -25))\n",
+      "tf = yt.ColorTransferFunction((-28, -25))\n",
       "tf.add_layers(4, w=0.03)\n",
       "cam = ds.camera([0.5, 0.5, 0.5], [1.0, 1.0, 1.0], (20.0, 'kpc'), 512, tf, no_ghost=False)\n",
       "cam.show(clip_ratio=4.0)"

diff -r 000b21eeddcffd4406a5938ab09bfca341bdf60c -r b7479711ae9f0dd2e8854713638419dd44f7622a doc/source/cookbook/aligned_cutting_plane.py
--- a/doc/source/cookbook/aligned_cutting_plane.py
+++ /dev/null
@@ -1,18 +0,0 @@
-import yt
-
-# Load the dataset.
-ds = yt.load("IsolatedGalaxy/galaxy0030/galaxy0030")
-
-# Create a 1 kpc radius sphere, centered on the max density.  Note that this
-# sphere is very small compared to the size of our final plot, and it has a
-# non-axially aligned L vector.
-sp = ds.sphere("center", (15.0, "kpc"))
-
-# Get the angular momentum vector for the sphere.
-L = sp.quantities.angular_momentum_vector()
-
-print "Angular momentum vector: {0}".format(L)
-
-# Create an OffAxisSlicePlot on the object with the L vector as its normal
-p = yt.OffAxisSlicePlot(ds, L, "density", sp.center, (25, "kpc"))
-p.save()

diff -r 000b21eeddcffd4406a5938ab09bfca341bdf60c -r b7479711ae9f0dd2e8854713638419dd44f7622a doc/source/cookbook/amrkdtree_downsampling.py
--- a/doc/source/cookbook/amrkdtree_downsampling.py
+++ b/doc/source/cookbook/amrkdtree_downsampling.py
@@ -1,3 +1,6 @@
+### THIS RECIPE IS CURRENTLY BROKEN IN YT-3.0
+### DO NOT TRUST THIS RECIPE UNTIL THIS LINE IS REMOVED 
+
 # Using AMRKDTree Homogenized Volumes to examine large datasets
 # at lower resolution.
 
@@ -10,17 +13,17 @@
 import yt
 from yt.utilities.amr_kdtree.api import AMRKDTree
 
-# Load up a data and print out the maximum refinement level
+# Load up a dataset
 ds = yt.load('IsolatedGalaxy/galaxy0030/galaxy0030')
 
 kd = AMRKDTree(ds)
-# Print out the total volume of all the bricks
-print kd.count_volume()
-# Print out the number of cells
-print kd.count_cells()
+
+# Print out specifics of KD Tree
+print "Total volume of all bricks = %i" % kd.count_volume()
+print "Total number of cells = %i" % kd.count_cells()
 
 tf = yt.ColorTransferFunction((-30, -22))
-cam = ds.h.camera([0.5, 0.5, 0.5], [0.2, 0.3, 0.4], 0.10, 256,
+cam = ds.camera([0.5, 0.5, 0.5], [0.2, 0.3, 0.4], 0.10, 256,
                   tf, volume=kd)
 tf.add_layers(4, 0.01, col_bounds=[-27.5, -25.5], colormap='RdBu_r')
 cam.snapshot("v1.png", clip_ratio=6.0)

diff -r 000b21eeddcffd4406a5938ab09bfca341bdf60c -r b7479711ae9f0dd2e8854713638419dd44f7622a doc/source/cookbook/amrkdtree_to_uniformgrid.py
--- /dev/null
+++ b/doc/source/cookbook/amrkdtree_to_uniformgrid.py
@@ -0,0 +1,33 @@
+import numpy as np
+import yt
+
+#This is an example of how to map an amr data set
+#to a uniform grid. In this case the highest
+#level of refinement is mapped into a 1024x1024x1024 cube
+
+#first the amr data is loaded
+ds = yt.load("~/pfs/galaxy/new_tests/feedback_8bz/DD0021/DD0021")
+
+#next we get the maxium refinement level
+lmax = ds.parameters['MaximumRefinementLevel']
+
+#calculate the center of the domain
+domain_center = (ds.domain_right_edge - ds.domain_left_edge)/2
+
+#determine the cellsize in the highest refinement level
+cell_size = pf.domain_width/(pf.domain_dimensions*2**lmax)
+
+#calculate the left edge of the new grid
+left_edge = domain_center - 512*cell_size
+
+#the number of cells per side of the new grid
+ncells = 1024
+
+#ask yt for the specified covering grid
+cgrid = pf.h.covering_grid(lmax, left_edge, np.array([ncells,]*3))
+
+#get a map of the density into the new grid
+density_map = cgrid["density"].astype(dtype="float32")
+
+#save the file as a numpy array for convenient future processing
+np.save("/pfs/goldbaum/galaxy/new_tests/feedback_8bz/gas_density_DD0021_log_densities.npy", density_map)

diff -r 000b21eeddcffd4406a5938ab09bfca341bdf60c -r b7479711ae9f0dd2e8854713638419dd44f7622a doc/source/cookbook/average_value.py
--- a/doc/source/cookbook/average_value.py
+++ b/doc/source/cookbook/average_value.py
@@ -5,9 +5,10 @@
 field = "temperature"  # The field to average
 weight = "cell_mass"  # The weight for the average
 
-dd = ds.h.all_data()  # This is a region describing the entire box,
-                      # but note it doesn't read anything in yet!
+ad = ds.all_data()  # This is a region describing the entire box,
+                    # but note it doesn't read anything in yet!
+
 # We now use our 'quantities' call to get the average quantity
-average_value = dd.quantities["WeightedAverageQuantity"](field, weight)
+average_value = ad.quantities.weighted_average_quantity(field, weight)
 
-print "Average %s (weighted by %s) is %0.5e" % (field, weight, average_value)
+print "Average %s (weighted by %s) is %0.3e %s" % (field, weight, average_value, average_value.units)

diff -r 000b21eeddcffd4406a5938ab09bfca341bdf60c -r b7479711ae9f0dd2e8854713638419dd44f7622a doc/source/cookbook/boolean_data_objects.py
--- a/doc/source/cookbook/boolean_data_objects.py
+++ b/doc/source/cookbook/boolean_data_objects.py
@@ -1,23 +1,32 @@
+### THIS RECIPE IS CURRENTLY BROKEN IN YT-3.0
+### DO NOT TRUST THIS RECIPE UNTIL THIS LINE IS REMOVED
+
 import yt
 
 ds = yt.load("Enzo_64/DD0043/data0043")  # load data
-# Make a few data ojbects to start.
+# Make a few data ojbects to start. Two boxes and two spheres.
 re1 = ds.region([0.5, 0.5, 0.5], [0.4, 0.4, 0.4], [0.6, 0.6, 0.6])
 re2 = ds.region([0.5, 0.5, 0.5], [0.5, 0.5, 0.5], [0.6, 0.6, 0.6])
 sp1 = ds.sphere([0.5, 0.5, 0.5], 0.05)
 sp2 = ds.sphere([0.1, 0.2, 0.3], 0.1)
+
 # The "AND" operator. This will make a region identical to re2.
 bool1 = ds.boolean([re1, "AND", re2])
 xp = bool1["particle_position_x"]
+
 # The "OR" operator. This will make a region identical to re1.
 bool2 = ds.boolean([re1, "OR", re2])
+
 # The "NOT" operator. This will make a region like re1, but with the corner
 # that re2 covers cut out.
 bool3 = ds.boolean([re1, "NOT", re2])
+
 # Disjoint regions can be combined with the "OR" operator.
 bool4 = ds.boolean([sp1, "OR", sp2])
+
 # Find oddly-shaped overlapping regions.
 bool5 = ds.boolean([re2, "AND", sp1])
+
 # Nested logic with parentheses.
 # This is re1 with the oddly-shaped region cut out.
 bool6 = ds.boolean([re1, "NOT", "(", re1, "AND", sp1, ")"])

diff -r 000b21eeddcffd4406a5938ab09bfca341bdf60c -r b7479711ae9f0dd2e8854713638419dd44f7622a doc/source/cookbook/calculating_information.rst
--- a/doc/source/cookbook/calculating_information.rst
+++ b/doc/source/cookbook/calculating_information.rst
@@ -57,3 +57,12 @@
 serial the operation ``for pf in ts:`` would also have worked identically.
 
 .. yt_cookbook:: time_series.py
+
+Complex Derived Fields
+~~~~~~~~~~~~~~~~~~~~~~
+
+This recipe estimates the ratio of gravitational and pressure forces in a galaxy
+cluster simulation.  This shows how to create and work with vector derived 
+fields.
+
+.. yt_cookbook:: hse_field.py

diff -r 000b21eeddcffd4406a5938ab09bfca341bdf60c -r b7479711ae9f0dd2e8854713638419dd44f7622a doc/source/cookbook/camera_movement.py
--- a/doc/source/cookbook/camera_movement.py
+++ b/doc/source/cookbook/camera_movement.py
@@ -1,11 +1,10 @@
+import yt
 import numpy as np
 
-import yt
-
 # Follow the simple_volume_rendering cookbook for the first part of this.
 ds = yt.load("IsolatedGalaxy/galaxy0030/galaxy0030")  # load data
-dd = ds.all_data()
-mi, ma = dd.quantities["Extrema"]("density")
+ad = ds.all_data()
+mi, ma = ad.quantities.extrema("density")
 
 # Set up transfer function
 tf = yt.ColorTransferFunction((np.log10(mi), np.log10(ma)))
@@ -40,4 +39,4 @@
 # Zoom in by a factor of 10 over 5 frames
 for i, snapshot in enumerate(cam.zoomin(10.0, 5, clip_ratio=8.0)):
     snapshot.write_png('camera_movement_%04i.png' % frame)
-    frame += 1
\ No newline at end of file
+    frame += 1

diff -r 000b21eeddcffd4406a5938ab09bfca341bdf60c -r b7479711ae9f0dd2e8854713638419dd44f7622a doc/source/cookbook/complex_plots.rst
--- a/doc/source/cookbook/complex_plots.rst
+++ b/doc/source/cookbook/complex_plots.rst
@@ -36,7 +36,7 @@
 axes.  To focus on what's happening in the x-y plane, we make an additional
 Temperature slice for the bottom-right subpanel.
 
-.. yt-cookbook:: multiplot_2x2_coordaxes_slice.py
+.. yt_cookbook:: multiplot_2x2_coordaxes_slice.py
 
 Multi-Plot Slice and Projections
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~

diff -r 000b21eeddcffd4406a5938ab09bfca341bdf60c -r b7479711ae9f0dd2e8854713638419dd44f7622a doc/source/cookbook/contours_on_slice.py
--- a/doc/source/cookbook/contours_on_slice.py
+++ b/doc/source/cookbook/contours_on_slice.py
@@ -1,13 +1,12 @@
 import yt
 
 # first add density contours on a density slice
-pf = yt.load("GasSloshing/sloshing_nomag2_hdf5_plt_cnt_0150")  # load data
-p = yt.SlicePlot(pf, "x", "density")
+ds = yt.load("GasSloshing/sloshing_nomag2_hdf5_plt_cnt_0150")  
+p = yt.SlicePlot(ds, "x", "density")
 p.annotate_contour("density")
 p.save()
 
-# then add temperature contours on the same densty slice
-pf = yt.load("GasSloshing/sloshing_nomag2_hdf5_plt_cnt_0150")  # load data
-p = yt.SlicePlot(pf, "x", "density")
+# then add temperature contours on the same density slice
+p = yt.SlicePlot(ds, "x", "density")
 p.annotate_contour("temperature")
-p.save(str(pf)+'_T_contour')
+p.save(str(ds)+'_T_contour')

diff -r 000b21eeddcffd4406a5938ab09bfca341bdf60c -r b7479711ae9f0dd2e8854713638419dd44f7622a doc/source/cookbook/extract_fixed_resolution_data.py
--- a/doc/source/cookbook/extract_fixed_resolution_data.py
+++ b/doc/source/cookbook/extract_fixed_resolution_data.py
@@ -8,21 +8,26 @@
 level = 2
 dims = ds.domain_dimensions * ds.refine_by**level
 
-# Now, we construct an object that describes the data region and structure we
-# want
-cube = ds.covering_grid(2,  # The level we are willing to extract to; higher
-                            # levels than this will not contribute to the data!
+# We construct an object that describes the data region and structure we want
+# In this case, we want all data up to the maximum "level" of refinement 
+# across the entire simulation volume.  Higher levels than this will not 
+# contribute to our covering grid.
+cube = ds.covering_grid(level,  
                         left_edge=[0.0, 0.0, 0.0],
+                        dims=dims,
                         # And any fields to preload (this is optional!)
-                        dims=dims,
                         fields=["density"])
 
 # Now we open our output file using h5py
-# Note that we open with 'w' which will overwrite existing files!
+# Note that we open with 'w' (write), which will overwrite existing files!
 f = h5py.File("my_data.h5", "w")
 
-# We create a dataset at the root note, calling it density...
+# We create a dataset at the root, calling it "density"
 f.create_dataset("/density", data=cube["density"])
 
 # We close our file
 f.close()
+
+# If we want to then access this datacube in the h5 file, we can now...
+f = h5py.File("my_data.h5", "r")
+print f["density"].value

diff -r 000b21eeddcffd4406a5938ab09bfca341bdf60c -r b7479711ae9f0dd2e8854713638419dd44f7622a doc/source/cookbook/ffmpeg_volume_rendering.py
--- /dev/null
+++ b/doc/source/cookbook/ffmpeg_volume_rendering.py
@@ -0,0 +1,99 @@
+#This is an example of how to make videos of 
+#uniform grid data using Theia and ffmpeg
+
+#The Scene object to hold the ray caster and view camera
+from yt.visualization.volume_rendering.theia.scene import TheiaScene
+
+#GPU based raycasting algorithm to use 
+from yt.visualization.volume_rendering.theia.algorithms.front_to_back import FrontToBackRaycaster
+
+#These will be used to define how to color the data
+from yt.visualization.volume_rendering.transfer_functions import ColorTransferFunction
+from yt.visualization.color_maps import *
+
+#This will be used to launch ffmpeg
+import subprocess as sp
+
+#Of course we need numpy for math magic
+import numpy as np
+
+#Opacity scaling function
+def scale_func(v, mi, ma):
+      return  np.minimum(1.0, (v-mi)/(ma-mi) + 0.0)
+
+#load the uniform grid from a numpy array file
+bolshoi = "/home/bogert/log_densities_1024.npy"
+density_grid = np.load(bolshoi)
+
+#Set the TheiaScene to use the density_grid and 
+#setup the raycaster for a resulting 1080p image
+ts = TheiaScene(volume = density_grid, raycaster = FrontToBackRaycaster(size = (1920,1080) ))
+
+#the min and max values in the data to color
+mi, ma = 0.0, 3.6
+
+#setup colortransferfunction
+bins = 5000
+tf = ColorTransferFunction( (mi, ma), bins)
+tf.map_to_colormap(0.5, ma, colormap="spring", scale_func = scale_func)
+
+#pass the transfer function to the ray caster
+ts.source.raycaster.set_transfer(tf)
+
+#Initial configuration for start of video
+#set initial opacity and brightness values
+#then zoom into the center of the data 30%
+ts.source.raycaster.set_opacity(0.03)
+ts.source.raycaster.set_brightness(2.3)
+ts.camera.zoom(30.0)
+
+#path to ffmpeg executable
+FFMPEG_BIN = "/usr/local/bin/ffmpeg"
+
+pipe = sp.Popen([ FFMPEG_BIN,
+        '-y', # (optional) overwrite the output file if it already exists
+	#This must be set to rawvideo because the image is an array
+        '-f', 'rawvideo', 
+	#This must be set to rawvideo because the image is an array
+        '-vcodec','rawvideo',
+	#The size of the image array and resulting video
+        '-s', '1920x1080', 
+	#This must be rgba to match array format (uint32)
+        '-pix_fmt', 'rgba',
+	#frame rate of video
+        '-r', '29.97', 
+        #Indicate that the input to ffmpeg comes from a pipe
+        '-i', '-', 
+        # Tells FFMPEG not to expect any audio
+        '-an', 
+        #Setup video encoder
+	#Use any encoder you life available from ffmpeg
+        '-vcodec', 'libx264', '-preset', 'ultrafast', '-qp', '0',
+        '-pix_fmt', 'yuv420p',
+        #Name of the output
+        'bolshoiplanck2.mkv' ],
+        stdin=sp.PIPE,stdout=sp.PIPE)
+		
+		
+#Now we loop and produce 500 frames
+for k in range (0,500) :
+    #update the scene resulting in a new image
+    ts.update()
+
+    #get the image array from the ray caster
+    array = ts.source.get_results()
+
+    #send the image array to ffmpeg
+    array.tofile(pipe.stdin)
+
+    #rotate the scene by 0.01 rads in x,y & z
+    ts.camera.rotateX(0.01)
+    ts.camera.rotateZ(0.01)
+    ts.camera.rotateY(0.01)
+
+    #zoom in 0.01% for a total of a 5% zoom
+    ts.camera.zoom(0.01)
+
+
+#Close the pipe to ffmpeg
+pipe.terminate()

diff -r 000b21eeddcffd4406a5938ab09bfca341bdf60c -r b7479711ae9f0dd2e8854713638419dd44f7622a doc/source/cookbook/find_clumps.py
--- a/doc/source/cookbook/find_clumps.py
+++ b/doc/source/cookbook/find_clumps.py
@@ -1,3 +1,6 @@
+### THIS RECIPE IS CURRENTLY BROKEN IN YT-3.0
+### DO NOT TRUST THIS RECIPE UNTIL THIS LINE IS REMOVED
+
 import numpy as np
 
 import yt

diff -r 000b21eeddcffd4406a5938ab09bfca341bdf60c -r b7479711ae9f0dd2e8854713638419dd44f7622a doc/source/cookbook/fit_spectrum.py
--- a/doc/source/cookbook/fit_spectrum.py
+++ b/doc/source/cookbook/fit_spectrum.py
@@ -1,22 +1,21 @@
+### THIS RECIPE IS CURRENTLY BROKEN IN YT-3.0
+### DO NOT TRUST THIS RECIPE UNTIL THIS LINE IS REMOVED
+
 import yt
 from yt.analysis_modules.cosmological_observation.light_ray.api import LightRay
-from yt.analysis_modules.api import AbsorptionSpectrum
+from yt.analysis_modules.absorption_spectrum.api import AbsorptionSpectrum
 from yt.analysis_modules.absorption_spectrum.api import generate_total_fit
 
 # Define and add a field to simulate OVI based on a constant relationship to HI
-def _OVI_NumberDensity(field, data):
-    return data['HI_NumberDensity']
+# Do *NOT* use this for science, because this is not how OVI actually behaves;
+# it is just an example.
 
+ at yt.derived_field(name='OVI_number_density', units='cm**-3')
+def _OVI_number_density(field, data):
+    return data['HI_NumberDensity']*2.0
 
-def _convertOVI(data):
-    return 4.9E-4*.2
 
-yt.add_field('my_OVI_NumberDensity',
-             function=_OVI_NumberDensity,
-             convert_function=_convertOVI)
-
-
-# Define species andi associated parameters to add to continuum
+# Define species and associated parameters to add to continuum
 # Parameters used for both adding the transition to the spectrum
 # and for fitting
 # Note that for single species that produce multiple lines
@@ -37,7 +36,7 @@
                  'init_N': 1E14}
 
 OVI_parameters = {'name': 'OVI',
-                  'field': 'my_OVI_NumberDensity',
+                  'field': 'OVI_number_density',
                   'f': [.1325, .06580],
                   'Gamma': [4.148E8, 4.076E8],
                   'wavelength': [1031.9261, 1037.6167],

diff -r 000b21eeddcffd4406a5938ab09bfca341bdf60c -r b7479711ae9f0dd2e8854713638419dd44f7622a doc/source/cookbook/fits_radio_cubes.ipynb
--- a/doc/source/cookbook/fits_radio_cubes.ipynb
+++ b/doc/source/cookbook/fits_radio_cubes.ipynb
@@ -1,7 +1,7 @@
 {
  "metadata": {
   "name": "",
-  "signature": "sha256:dbc41f6f836cdeb88a549d85e389d6e4e43d163d8c4c267baea8cce0ebdbf441"
+  "signature": "sha256:40add63976fd633e0542cf7674b166507985aa14685be6b4e4e53bd9a23befc2"
  },
  "nbformat": 3,
  "nbformat_minor": 0,
@@ -23,7 +23,7 @@
      "cell_type": "markdown",
      "metadata": {},
      "source": [
-      "This notebook demonstrates some of the capabilties of `yt` on some FITS \"position-position-velocity\" cubes of radio data. "
+      "This notebook demonstrates some of the capabilties of `yt` on some FITS \"position-position-spectrum\" cubes of radio data. "
      ]
     },
     {
@@ -81,8 +81,7 @@
      "collapsed": false,
      "input": [
       "from yt.frontends.fits.misc import PlotWindowWCS\n",
-      "wcs_slc = PlotWindowWCS(slc)\n",
-      "wcs_slc.show()"
+      "PlotWindowWCS(slc)\n"
      ],
      "language": "python",
      "metadata": {},
@@ -99,7 +98,7 @@
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "wcs_slc.save()"
+      "slc.save()"
      ],
      "language": "python",
      "metadata": {},
@@ -109,14 +108,16 @@
      "cell_type": "markdown",
      "metadata": {},
      "source": [
-      "We can also take slices of this dataset at a few different values along the \"z\" axis (corresponding to the velocity), so let's try a few. First, we'll check what the value along the velocity axis at the domain center is, as well as the range of possible values. This is the third value of each array. "
+      "We can also take slices of this dataset at a few different values along the \"z\" axis (corresponding to the velocity), so let's try a few. To pick specific velocity values for slices, we will need to use the dataset's `spec2pixel` method to determine which pixels to slice on:"
      ]
     },
     {
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "print ds.domain_left_edge[2], ds.domain_center[2], ds.domain_right_edge[2]"
+      "import yt.units as u\n",
+      "new_center = ds.domain_center\n",
+      "new_center[2] = ds.spec2pixel(-250000.*u.m/u.s)"
      ],
      "language": "python",
      "metadata": {},
@@ -126,15 +127,32 @@
      "cell_type": "markdown",
      "metadata": {},
      "source": [
-      "Now, we'll choose a few values for the velocity within this range:"
+      "Now we can use this new center to create a new slice:"
      ]
     },
     {
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "new_center = ds.domain_center \n",
-      "new_center[2] = -250000.\n",
+      "slc = yt.SlicePlot(ds, \"z\", [\"intensity\"], center=new_center, origin=\"native\")\n",
+      "slc.show()"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "We can do this a few more times for different values of the velocity:"
+     ]
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "new_center[2] = ds.spec2pixel(-100000.*u.m/u.s)\n",
       "slc = yt.SlicePlot(ds, \"z\", [\"intensity\"], center=new_center, origin=\"native\")\n",
       "slc.show()"
      ],
@@ -146,21 +164,7 @@
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "new_center = ds.domain_center \n",
-      "new_center[2] = -100000.\n",
-      "slc = yt.SlicePlot(ds, \"z\", [\"intensity\"], center=new_center, origin=\"native\")\n",
-      "slc.show()"
-     ],
-     "language": "python",
-     "metadata": {},
-     "outputs": []
-    },
-    {
-     "cell_type": "code",
-     "collapsed": false,
-     "input": [
-      "new_center = ds.domain_center \n",
-      "new_center[2] = -150000.\n",
+      "new_center[2] = ds.spec2pixel(-150000.*u.m/u.s)\n",
       "slc = yt.SlicePlot(ds, \"z\", [\"intensity\"], center=new_center, origin=\"native\")\n",
       "slc.show()"
      ],
@@ -179,14 +183,14 @@
      "cell_type": "markdown",
      "metadata": {},
      "source": [
-      "We can also make a projection of all the emission along the line of sight:"
+      "We can also make a projection of all the emission along the line of sight. Since we're not doing an integration along a path length, we needed to specify `proj_style = \"sum\"`:"
      ]
     },
     {
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "prj = yt.ProjectionPlot(ds, \"z\", [\"intensity\"], origin=\"native\", proj_style=\"sum\")\n",
+      "prj = yt.ProjectionPlot(ds, \"z\", [\"intensity\"], proj_style=\"sum\", origin=\"native\")\n",
       "prj.show()"
      ],
      "language": "python",
@@ -197,13 +201,6 @@
      "cell_type": "markdown",
      "metadata": {},
      "source": [
-      "Since we're not doing an integration along a path length, we needed to specify `proj_style = \"sum\"`. "
-     ]
-    },
-    {
-     "cell_type": "markdown",
-     "metadata": {},
-     "source": [
       "We can also look at the slices perpendicular to the other axes, which will show us the structure along the velocity axis:"
      ]
     },
@@ -211,8 +208,7 @@
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "slc = yt.SlicePlot(ds, \"x\", [\"intensity\"], origin=\"native\", \n",
-      "                   aspect=\"auto\", window_size=(8.0,8.0))\n",
+      "slc = yt.SlicePlot(ds, \"x\", [\"intensity\"], origin=\"native\", window_size=(8,8))\n",
       "slc.show()"
      ],
      "language": "python",
@@ -223,8 +219,7 @@
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "slc = yt.SlicePlot(ds, \"y\", [\"intensity\"], origin=\"native\", \n",
-      "                   aspect=\"auto\", window_size=(8.0,8.0))\n",
+      "slc = yt.SlicePlot(ds, \"y\", [\"intensity\"], origin=\"native\", window_size=(8,8))\n",
       "slc.show()"
      ],
      "language": "python",
@@ -235,7 +230,7 @@
      "cell_type": "markdown",
      "metadata": {},
      "source": [
-      "In these cases, we needed to set `aspect=\"auto\"` and explicitly declare a square `window_size` to get a figure that looks good. "
+      "In these cases, we needed to explicitly declare a square `window_size` to get a figure that looks good. "
      ]
     },
     {
@@ -467,4 +462,4 @@
    "metadata": {}
   }
  ]
-}
\ No newline at end of file
+}

diff -r 000b21eeddcffd4406a5938ab09bfca341bdf60c -r b7479711ae9f0dd2e8854713638419dd44f7622a doc/source/cookbook/fits_xray_images.rst
--- a/doc/source/cookbook/fits_xray_images.rst
+++ b/doc/source/cookbook/fits_xray_images.rst
@@ -1,6 +1,6 @@
 .. _xray_fits:
 
 FITS X-ray Images in yt
-----------------------
+-----------------------
 
-.. notebook:: fits_xray_images.ipynb
\ No newline at end of file
+.. notebook:: fits_xray_images.ipynb

diff -r 000b21eeddcffd4406a5938ab09bfca341bdf60c -r b7479711ae9f0dd2e8854713638419dd44f7622a doc/source/cookbook/free_free_field.py
--- a/doc/source/cookbook/free_free_field.py
+++ b/doc/source/cookbook/free_free_field.py
@@ -1,3 +1,6 @@
+### THIS RECIPE IS CURRENTLY BROKEN IN YT-3.0
+### DO NOT TRUST THIS RECIPE UNTIL THIS LINE IS REMOVED
+
 import numpy as np
 import yt
 # Need to grab the proton mass from the constants database

diff -r 000b21eeddcffd4406a5938ab09bfca341bdf60c -r b7479711ae9f0dd2e8854713638419dd44f7622a doc/source/cookbook/global_phase_plots.py
--- a/doc/source/cookbook/global_phase_plots.py
+++ b/doc/source/cookbook/global_phase_plots.py
@@ -4,10 +4,10 @@
 ds = yt.load("IsolatedGalaxy/galaxy0030/galaxy0030")
 
 # This is an object that describes the entire box
-ad = ds.h.all_data()
+ad = ds.all_data()
 
-# We plot the average VelocityMagnitude (mass-weighted) in our object
-# as a function of Density and temperature
+# We plot the average velocity magnitude (mass-weighted) in our object
+# as a function of density and temperature
 plot = yt.PhasePlot(ad, "density", "temperature", "velocity_magnitude")
 
 # save the plot

diff -r 000b21eeddcffd4406a5938ab09bfca341bdf60c -r b7479711ae9f0dd2e8854713638419dd44f7622a doc/source/cookbook/halo_merger_tree.py
--- a/doc/source/cookbook/halo_merger_tree.py
+++ b/doc/source/cookbook/halo_merger_tree.py
@@ -1,3 +1,6 @@
+### THIS RECIPE IS CURRENTLY BROKEN IN YT-3.0
+### DO NOT TRUST THIS RECIPE UNTIL THIS LINE IS REMOVED
+
 # This script demonstrates some of the halo merger tracking infrastructure,
 # for tracking halos across multiple datadumps in a time series.
 # Ultimately, it outputs an HDF5 file with the important quantities for the

This diff is so big that we needed to truncate the remainder.

https://bitbucket.org/yt_analysis/yt/commits/79d45a527246/
Changeset:   79d45a527246
Branch:      yt-3.0
User:        MatthewTurk
Date:        2014-07-19 15:42:10
Summary:     Merged in jzuhone/yt-3.x/yt-3.0 (pull request #1028)

Default coordinate for FITSSlice
Affected #:  1 file

diff -r 65543888bc8332f02187bfbf40670bef25e5638b -r 79d45a527246e68113025966a6c35a1a2f97b54d yt/utilities/fits_image.py
--- a/yt/utilities/fits_image.py
+++ b/yt/utilities/fits_image.py
@@ -13,6 +13,7 @@
 import numpy as np
 from yt.funcs import mylog, iterable, fix_axis, ensure_list
 from yt.visualization.fixed_resolution import FixedResolutionBuffer
+from yt.visualization.plot_window import get_sanitized_center
 from yt.data_objects.construction_data_containers import YTCoveringGridBase
 from yt.utilities.on_demand_imports import _astropy
 from yt.units.yt_array import YTQuantity
@@ -293,19 +294,20 @@
         The axis of the slice. One of "x","y","z", or 0,1,2.
     fields : string or list of strings
         The fields to slice
-    coord : float, tuple, or YTQuantity
-        The coordinate of the slice along *axis*. Can be a (value,
-        unit) tuple, a YTQuantity, or a float. If a float, it will be
-        interpreted as in units of code_length.
+    center : A sequence floats, a string, or a tuple.
+         The coordinate of the center of the image. If set to 'c', 'center' or
+         left blank, the plot is centered on the middle of the domain. If set to
+         'max' or 'm', the center will be located at the maximum of the
+         ('gas', 'density') field. Units can be specified by passing in center
+         as a tuple containing a coordinate and string unit name or by passing
+         in a YTArray.  If a list or unitless array is supplied, code units are
+         assumed.
     """
-    def __init__(self, ds, axis, fields, coord, **kwargs):
+    def __init__(self, ds, axis, fields, center="c", **kwargs):
         fields = ensure_list(fields)
         axis = fix_axis(axis, ds)
-        if isinstance(coord, tuple):
-            coord = ds.quan(coord[0], coord[1]).in_units("code_length").value
-        elif isinstance(coord, YTQuantity):
-            coord = coord.in_units("code_length").value
-        slc = ds.slice(axis, coord, **kwargs)
+        center = get_sanitized_center(center, ds)
+        slc = ds.slice(axis, center[axis], **kwargs)
         w, frb = construct_image(slc)
         super(FITSSlice, self).__init__(frb, fields=fields, wcs=w)
         for i, field in enumerate(fields):

Repository URL: https://bitbucket.org/yt_analysis/yt/

--

This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.



More information about the yt-svn mailing list