[yt-svn] commit/yt: 5 new changesets

commits-noreply at bitbucket.org commits-noreply at bitbucket.org
Tue Jul 1 04:29:02 PDT 2014


5 new commits in yt:

https://bitbucket.org/yt_analysis/yt/commits/366b6a97ecfd/
Changeset:   366b6a97ecfd
Branch:      yt-3.0
User:        hegan
Date:        2014-06-04 22:58:05
Summary:     Added option to prepend fields to the actions list
Affected #:  1 file

diff -r 6238acba7afaf2ce49353f3305fc6511f90fa9f0 -r 366b6a97ecfd8770a402397fc5abb75a8dcde268 yt/analysis_modules/halo_analysis/halo_catalog.py
--- a/yt/analysis_modules/halo_analysis/halo_catalog.py
+++ b/yt/analysis_modules/halo_analysis/halo_catalog.py
@@ -203,6 +203,10 @@
             field_type = kwargs.pop("field_type")
         else:
             field_type = None
+        if 'prepend' in kwargs:
+            prepend = kwargs.pop("prepend")
+        else:
+            prepend = False
         if field_type is None:
             quantity = quantity_registry.find(key, *args, **kwargs)
         elif (field_type, key) in self.halos_pf.field_info:
@@ -210,7 +214,10 @@
         else:
             raise RuntimeError("HaloCatalog quantity must be a registered function or a field of a known type.")
         self.quantities.append(key)
-        self.actions.append(("quantity", (key, quantity)))
+        if prepend:
+            self.actions.insert(0, ("quantity", (key, quantity)))
+        else:
+            self.actions.append(("quantity", (key, quantity)))
 
     def add_filter(self, halo_filter, *args, **kwargs):
         r"""
@@ -364,7 +371,7 @@
             self.data_source = self.halos_pf.all_data()
 
             # Add all of the default quantities that all halos must have
-            self.add_default_quantities('all')
+            self.add_default_quantities('all', prepend=True)
 
         my_index = np.argsort(self.data_source["particle_identifier"])
         for i in parallel_objects(my_index, njobs=njobs, dynamic=dynamic):
@@ -429,11 +436,11 @@
                 dataset.attrs["units"] = units
         out_file.close()
 
-    def add_default_quantities(self, field_type='halos'):
-        self.add_quantity("particle_identifier", field_type=field_type)
-        self.add_quantity("particle_mass", field_type=field_type)
-        self.add_quantity("particle_position_x", field_type=field_type)
-        self.add_quantity("particle_position_y", field_type=field_type)
-        self.add_quantity("particle_position_z", field_type=field_type)
-        self.add_quantity("virial_radius", field_type=field_type)
+    def add_default_quantities(self, field_type='halos', prepend=False):
+        self.add_quantity("particle_identifier", field_type=field_type,prepend=prepend)
+        self.add_quantity("particle_mass", field_type=field_type,prepend=prepend)
+        self.add_quantity("particle_position_x", field_type=field_type,prepend=prepend)
+        self.add_quantity("particle_position_y", field_type=field_type,prepend=prepend)
+        self.add_quantity("particle_position_z", field_type=field_type,prepend=prepend)
+        self.add_quantity("virial_radius", field_type=field_type,prepend=prepend)
 


https://bitbucket.org/yt_analysis/yt/commits/a5767f974d4d/
Changeset:   a5767f974d4d
Branch:      yt-3.0
User:        hegan
Date:        2014-06-25 21:30:50
Summary:     merged
Affected #:  158 files

diff -r 366b6a97ecfd8770a402397fc5abb75a8dcde268 -r a5767f974d4d47205579a813f7b33fcab443263d .hgignore
--- a/.hgignore
+++ b/.hgignore
@@ -41,6 +41,7 @@
 yt/utilities/lib/PointsInVolume.c
 yt/utilities/lib/QuadTree.c
 yt/utilities/lib/RayIntegrators.c
+yt/utilities/lib/ragged_arrays.c
 yt/utilities/lib/VolumeIntegrator.c
 yt/utilities/lib/grid_traversal.c
 yt/utilities/lib/GridTree.c

diff -r 366b6a97ecfd8770a402397fc5abb75a8dcde268 -r a5767f974d4d47205579a813f7b33fcab443263d doc/README
--- a/doc/README
+++ b/doc/README
@@ -5,6 +5,6 @@
 http://sphinx.pocoo.org/
 
 Because the documentation requires a number of dependencies, we provide
-pre-build versions online, accessible here:
+pre-built versions online, accessible here:
 
-http://yt-project.org/docs/
+http://yt-project.org/docs/dev-3.0/

diff -r 366b6a97ecfd8770a402397fc5abb75a8dcde268 -r a5767f974d4d47205579a813f7b33fcab443263d doc/install_script.sh
--- a/doc/install_script.sh
+++ b/doc/install_script.sh
@@ -568,7 +568,7 @@
 mkdir -p ${DEST_DIR}/data
 cd ${DEST_DIR}/data
 echo 'de6d8c6ea849f0206d219303329a0276b3cce7c051eec34377d42aacbe0a4f47ac5145eb08966a338ecddd2b83c8f787ca9956508ad5c39ee2088ad875166410  xray_emissivity.h5' > xray_emissivity.h5.sha512
-get_ytdata xray_emissivity.h5
+[ ! -e xray_emissivity.h5 ] && get_ytdata xray_emissivity.h5
 
 # Set paths to what they should be when yt is activated.
 export PATH=${DEST_DIR}/bin:$PATH
@@ -608,7 +608,6 @@
 echo '3f53d0b474bfd79fea2536d0a9197eaef6c0927e95f2f9fd52dbd6c1d46409d0e649c21ac418d8f7767a9f10fe6114b516e06f2be4b06aec3ab5bdebc8768220  Forthon-0.8.11.tar.gz' > Forthon-0.8.11.tar.gz.sha512
 echo '4941f5aa21aff3743546495fb073c10d2657ff42b2aff401903498638093d0e31e344cce778980f28a7170c6d29eab72ac074277b9d4088376e8692dc71e55c1  PyX-0.12.1.tar.gz' > PyX-0.12.1.tar.gz.sha512
 echo '3df0ba4b1cfef5f02fb27925de4c2ca414eca9000af6a3d475d39063720afe987287c3d51377e0a36b88015573ef699f700782e1749c7a357b8390971d858a79  Python-2.7.6.tgz' > Python-2.7.6.tgz.sha512
-echo '172f2bc671145ebb0add2669c117863db35851fb3bdb192006cd710d4d038e0037497eb39a6d01091cb923f71a7e8982a77b6e80bf71d6275d5d83a363c8d7e5  rockstar-0.99.6.tar.gz' > rockstar-0.99.6.tar.gz.sha512
 echo '276bd9c061ec9a27d478b33078a86f93164ee2da72210e12e2c9da71dcffeb64767e4460b93f257302b09328eda8655e93c4b9ae85e74472869afbeae35ca71e  blas.tar.gz' > blas.tar.gz.sha512
 echo '00ace5438cfa0c577e5f578d8a808613187eff5217c35164ffe044fbafdfec9e98f4192c02a7d67e01e5a5ccced630583ad1003c37697219b0f147343a3fdd12  bzip2-1.0.6.tar.gz' > bzip2-1.0.6.tar.gz.sha512
 echo 'a296dfcaef7e853e58eed4e24b37c4fa29cfc6ac688def048480f4bb384b9e37ca447faf96eec7b378fd764ba291713f03ac464581d62275e28eb2ec99110ab6  reason-js-20120623.zip' > reason-js-20120623.zip.sha512
@@ -624,7 +623,6 @@
 echo 'd58177f3971b6d07baf6f81a2088ba371c7e43ea64ee7ada261da97c6d725b4bd4927122ac373c55383254e4e31691939276dab08a79a238bfa55172a3eff684  numpy-1.7.1.tar.gz' > numpy-1.7.1.tar.gz.sha512
 echo '9c0a61299779aff613131aaabbc255c8648f0fa7ab1806af53f19fbdcece0c8a68ddca7880d25b926d67ff1b9201954b207919fb09f6a290acb078e8bbed7b68  python-hglib-1.0.tar.gz' > python-hglib-1.0.tar.gz.sha512
 echo 'c65013293dd4049af5db009fdf7b6890a3c6b1e12dd588b58fb5f5a5fef7286935851fb7a530e03ea16f28de48b964e50f48bbf87d34545fd23b80dd4380476b  pyzmq-13.1.0.tar.gz' > pyzmq-13.1.0.tar.gz.sha512
-echo '172f2bc671145ebb0add2669c117863db35851fb3bdb192006cd710d4d038e0037497eb39a6d01091cb923f71a7e8982a77b6e80bf71d6275d5d83a363c8d7e5  rockstar-0.99.6.tar.gz' > rockstar-0.99.6.tar.gz.sha512
 echo '80c8e137c3ccba86575d4263e144ba2c4684b94b5cd620e200f094c92d4e118ea6a631d27bdb259b0869771dfaeeae68c0fdd37fdd740b9027ee185026e921d4  scipy-0.12.0.tar.gz' > scipy-0.12.0.tar.gz.sha512
 echo '96f3e51b46741450bc6b63779c10ebb4a7066860fe544385d64d1eda52592e376a589ef282ace2e1df73df61c10eab1a0d793abbdaf770e60289494d4bf3bcb4  sqlite-autoconf-3071700.tar.gz' > sqlite-autoconf-3071700.tar.gz.sha512
 echo '2992baa3edfb4e1842fb642abf0bf0fc0bf56fc183aab8fed6b3c42fbea928fa110ede7fdddea2d63fc5953e8d304b04da433dc811134fadefb1eecc326121b8  sympy-0.7.3.tar.gz' > sympy-0.7.3.tar.gz.sha512
@@ -657,7 +655,6 @@
 get_ytproject $NOSE.tar.gz
 get_ytproject $PYTHON_HGLIB.tar.gz
 get_ytproject $SYMPY.tar.gz
-get_ytproject $ROCKSTAR.tar.gz
 if [ $INST_BZLIB -eq 1 ]
 then
     if [ ! -e $BZLIB/done ]
@@ -816,6 +813,7 @@
         YT_DIR=`dirname $ORIG_PWD`
     elif [ ! -e yt-hg ]
     then
+        echo "Cloning yt"
         YT_DIR="$PWD/yt-hg/"
         ( ${HG_EXEC} --debug clone https://bitbucket.org/yt_analysis/yt-supplemental/ 2>&1 ) 1>> ${LOG_FILE}
         # Recently the hg server has had some issues with timeouts.  In lieu of
@@ -824,9 +822,9 @@
         ( ${HG_EXEC} --debug clone https://bitbucket.org/yt_analysis/yt/ ./yt-hg 2>&1 ) 1>> ${LOG_FILE}
         # Now we update to the branch we're interested in.
         ( ${HG_EXEC} -R ${YT_DIR} up -C ${BRANCH} 2>&1 ) 1>> ${LOG_FILE}
-    elif [ -e yt-3.0-hg ] 
+    elif [ -e yt-hg ]
     then
-        YT_DIR="$PWD/yt-3.0-hg/"
+        YT_DIR="$PWD/yt-hg/"
     fi
     echo Setting YT_DIR=${YT_DIR}
 fi
@@ -943,14 +941,19 @@
 # Now we build Rockstar and set its environment variable.
 if [ $INST_ROCKSTAR -eq 1 ]
 then
-    if [ ! -e Rockstar/done ]
+    if [ ! -e rockstar/done ]
     then
-        [ ! -e Rockstar ] && tar xfz $ROCKSTAR.tar.gz
         echo "Building Rockstar"
-        cd Rockstar
+        if [ ! -e rockstar ]
+        then
+            ( hg clone http://bitbucket.org/MatthewTurk/rockstar 2>&1 ) 1>> ${LOG_FILE}
+        fi
+        cd rockstar
+        ( hg pull 2>&1 ) 1>> ${LOG_FILE}
+        ( hg up -C tip 2>&1 ) 1>> ${LOG_FILE}
         ( make lib 2>&1 ) 1>> ${LOG_FILE} || do_exit
         cp librockstar.so ${DEST_DIR}/lib
-        ROCKSTAR_DIR=${DEST_DIR}/src/Rockstar
+        ROCKSTAR_DIR=${DEST_DIR}/src/rockstar
         echo $ROCKSTAR_DIR > ${YT_DIR}/rockstar.cfg
         touch done
         cd ..

diff -r 366b6a97ecfd8770a402397fc5abb75a8dcde268 -r a5767f974d4d47205579a813f7b33fcab443263d doc/source/analyzing/analysis_modules/halo_catalogs.rst
--- a/doc/source/analyzing/analysis_modules/halo_catalogs.rst
+++ b/doc/source/analyzing/analysis_modules/halo_catalogs.rst
@@ -226,4 +226,4 @@
 =======
 
 For a full example of how to use these methods together see 
-:ref:`halo_analysis_example`.
+:doc:`halo_analysis_example`.

diff -r 366b6a97ecfd8770a402397fc5abb75a8dcde268 -r a5767f974d4d47205579a813f7b33fcab443263d doc/source/bootcamp/1)_Introduction.ipynb
--- a/doc/source/bootcamp/1)_Introduction.ipynb
+++ b/doc/source/bootcamp/1)_Introduction.ipynb
@@ -1,6 +1,7 @@
 {
  "metadata": {
-  "name": ""
+  "name": "",
+  "signature": "sha256:39620670ce7751b23f30d2123fd3598de1c7843331f65de13e29f4ae9f759e0f"
  },
  "nbformat": 3,
  "nbformat_minor": 0,
@@ -32,9 +33,40 @@
       "5. Derived Fields and Profiles (IsolatedGalaxy dataset)\n",
       "6. Volume Rendering (IsolatedGalaxy dataset)"
      ]
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "The following code will download the data needed for this tutorial automatically using `curl`. It may take some time so please wait when the kernel is busy. You will need to set `download_datasets` to True before using it."
+     ]
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "download_datasets = False\n",
+      "if download_datasets:\n",
+      "    !curl -sSO http://yt-project.org/data/enzo_tiny_cosmology.tar\n",
+      "    print \"Got enzo_tiny_cosmology\"\n",
+      "    !tar xf enzo_tiny_cosmology.tar\n",
+      "    \n",
+      "    !curl -sSO http://yt-project.org/data/Enzo_64.tar\n",
+      "    print \"Got Enzo_64\"\n",
+      "    !tar xf Enzo_64.tar\n",
+      "    \n",
+      "    !curl -sSO http://yt-project.org/data/IsolatedGalaxy.tar\n",
+      "    print \"Got IsolatedGalaxy\"\n",
+      "    !tar xf IsolatedGalaxy.tar\n",
+      "    \n",
+      "    print \"All done!\""
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
     }
    ],
    "metadata": {}
   }
  ]
-}
+}
\ No newline at end of file

diff -r 366b6a97ecfd8770a402397fc5abb75a8dcde268 -r a5767f974d4d47205579a813f7b33fcab443263d doc/source/bootcamp/2)_Data_Inspection.ipynb
--- a/doc/source/bootcamp/2)_Data_Inspection.ipynb
+++ b/doc/source/bootcamp/2)_Data_Inspection.ipynb
@@ -1,7 +1,7 @@
 {
  "metadata": {
   "name": "",
-  "signature": "sha256:15cdc35ddb8b1b938967237e17534149f734f4e7a61ebd37d74b675f8059da20"
+  "signature": "sha256:9d67e9e4ca5ce92dcd0658025dbfbd28be47b47ca8d4531fdac16cc2c2fa038b"
  },
  "nbformat": 3,
  "nbformat_minor": 0,
@@ -21,7 +21,7 @@
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "from yt.mods import *"
+      "import yt"
      ],
      "language": "python",
      "metadata": {},
@@ -38,7 +38,7 @@
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "ds = load(\"IsolatedGalaxy/galaxy0030/galaxy0030\")"
+      "ds = yt.load(\"IsolatedGalaxy/galaxy0030/galaxy0030\")"
      ],
      "language": "python",
      "metadata": {},

diff -r 366b6a97ecfd8770a402397fc5abb75a8dcde268 -r a5767f974d4d47205579a813f7b33fcab443263d doc/source/bootcamp/3)_Simple_Visualization.ipynb
--- a/doc/source/bootcamp/3)_Simple_Visualization.ipynb
+++ b/doc/source/bootcamp/3)_Simple_Visualization.ipynb
@@ -1,7 +1,7 @@
 {
  "metadata": {
   "name": "",
-  "signature": "sha256:eb5fbf5eb55a9c8997c687f072c8c6030e74bef0048a72b4f74a06893c11b80a"
+  "signature": "sha256:c00ba7fdbbd9ea957d06060ad70f06f629b1fd4ebf5379c1fdad2697ab0a4cd6"
  },
  "nbformat": 3,
  "nbformat_minor": 0,
@@ -21,7 +21,7 @@
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "from yt.mods import *"
+      "import yt"
      ],
      "language": "python",
      "metadata": {},
@@ -38,7 +38,7 @@
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "ds = load(\"enzo_tiny_cosmology/DD0046/DD0046\")\n",
+      "ds = yt.load(\"enzo_tiny_cosmology/DD0046/DD0046\")\n",
       "print \"Redshift =\", ds.current_redshift"
      ],
      "language": "python",
@@ -58,7 +58,7 @@
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "p = ProjectionPlot(ds, \"y\", \"density\")\n",
+      "p = yt.ProjectionPlot(ds, \"y\", \"density\")\n",
       "p.show()"
      ],
      "language": "python",
@@ -135,7 +135,7 @@
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "p = ProjectionPlot(ds, \"z\", [\"density\", \"temperature\"], weight_field=\"density\")\n",
+      "p = yt.ProjectionPlot(ds, \"z\", [\"density\", \"temperature\"], weight_field=\"density\")\n",
       "p.show()"
      ],
      "language": "python",
@@ -189,8 +189,8 @@
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "ds = load(\"Enzo_64/DD0043/data0043\")\n",
-      "s = SlicePlot(ds, \"z\", [\"density\", \"velocity_magnitude\"], center=\"max\")\n",
+      "ds = yt.load(\"Enzo_64/DD0043/data0043\")\n",
+      "s = yt.SlicePlot(ds, \"z\", [\"density\", \"velocity_magnitude\"], center=\"max\")\n",
       "s.set_cmap(\"velocity_magnitude\", \"kamae\")\n",
       "s.zoom(10.0)"
      ],
@@ -243,7 +243,7 @@
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "s = SlicePlot(ds, \"x\", [\"density\"], center=\"max\")\n",
+      "s = yt.SlicePlot(ds, \"x\", [\"density\"], center=\"max\")\n",
       "s.annotate_contour(\"temperature\")\n",
       "s.zoom(2.5)"
      ],
@@ -272,4 +272,4 @@
    "metadata": {}
   }
  ]
-}
+}
\ No newline at end of file

diff -r 366b6a97ecfd8770a402397fc5abb75a8dcde268 -r a5767f974d4d47205579a813f7b33fcab443263d doc/source/bootcamp/4)_Data_Objects_and_Time_Series.ipynb
--- a/doc/source/bootcamp/4)_Data_Objects_and_Time_Series.ipynb
+++ b/doc/source/bootcamp/4)_Data_Objects_and_Time_Series.ipynb
@@ -1,7 +1,7 @@
 {
  "metadata": {
   "name": "",
-  "signature": "sha256:41293a66cd6fd5eae6da2d0343549144dc53d72e83286999faab3cf21d801f51"
+  "signature": "sha256:a46e1baa90d32045c2b524100f28bad41b3665249612c9a275ee0375a6f4be20"
  },
  "nbformat": 3,
  "nbformat_minor": 0,
@@ -22,8 +22,10 @@
      "collapsed": false,
      "input": [
       "%matplotlib inline\n",
-      "from yt.mods import *\n",
-      "from matplotlib import pylab"
+      "import yt\n",
+      "import numpy as np\n",
+      "from matplotlib import pylab\n",
+      "from yt.analysis_modules.halo_finding.api import HaloFinder"
      ],
      "language": "python",
      "metadata": {},
@@ -44,7 +46,7 @@
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "ts = DatasetSeries(\"enzo_tiny_cosmology/*/*.hierarchy\")"
+      "ts = yt.DatasetSeries(\"enzo_tiny_cosmology/*/*.hierarchy\")"
      ],
      "language": "python",
      "metadata": {},
@@ -86,8 +88,13 @@
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "pylab.semilogy(times, rho_ex[:,0], '-xk')\n",
-      "pylab.semilogy(times, rho_ex[:,1], '-xr')"
+      "pylab.semilogy(times, rho_ex[:,0], '-xk', label='Minimum')\n",
+      "pylab.semilogy(times, rho_ex[:,1], '-xr', label='Maximum')\n",
+      "pylab.ylabel(\"Density ($g/cm^3$)\")\n",
+      "pylab.xlabel(\"Time (Gyr)\")\n",
+      "pylab.legend()\n",
+      "pylab.ylim(1e-32, 1e-21)\n",
+      "pylab.show()"
      ],
      "language": "python",
      "metadata": {},
@@ -108,13 +115,15 @@
      "cell_type": "code",
      "collapsed": false,
      "input": [
+      "from yt.units import Msun\n",
+      "\n",
       "mass = []\n",
       "zs = []\n",
       "for ds in ts:\n",
       "    halos = HaloFinder(ds)\n",
       "    dd = ds.all_data()\n",
       "    total_mass = dd.quantities.total_quantity(\"cell_mass\").in_units(\"Msun\")\n",
-      "    total_in_baryons = 0.0\n",
+      "    total_in_baryons = 0.0*Msun\n",
       "    for halo in halos:\n",
       "        sp = halo.get_sphere()\n",
       "        total_in_baryons += sp.quantities.total_quantity(\"cell_mass\").in_units(\"Msun\")\n",
@@ -136,7 +145,11 @@
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "pylab.loglog(zs, mass, '-xb')"
+      "pylab.semilogx(zs, mass, '-xb')\n",
+      "pylab.xlabel(\"Redshift\")\n",
+      "pylab.ylabel(\"Mass in halos / Total mass\")\n",
+      "pylab.xlim(max(zs), min(zs))\n",
+      "pylab.ylim(-0.01, .18)"
      ],
      "language": "python",
      "metadata": {},
@@ -154,7 +167,9 @@
       "\n",
       "yt provides the ability to examine rays, or lines, through the domain.  Note that these are not periodic, unlike most other data objects.  We create a ray object and can then examine quantities of it.  Rays have the special fields `t` and `dts`, which correspond to the time the ray enters a given cell and the distance it travels through that cell.\n",
       "\n",
-      "To create a ray, we specify the start and end points."
+      "To create a ray, we specify the start and end points.\n",
+      "\n",
+      "Note that we need to convert these arrays to numpy arrays due to a bug in matplotlib 1.3.1."
      ]
     },
     {
@@ -162,7 +177,7 @@
      "collapsed": false,
      "input": [
       "ray = ds.ray([0.1, 0.2, 0.3], [0.9, 0.8, 0.7])\n",
-      "pylab.semilogy(ray[\"t\"], ray[\"density\"])"
+      "pylab.semilogy(np.array(ray[\"t\"]), np.array(ray[\"density\"]))"
      ],
      "language": "python",
      "metadata": {},
@@ -211,10 +226,12 @@
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "ds = load(\"IsolatedGalaxy/galaxy0030/galaxy0030\")\n",
+      "ds = yt.load(\"IsolatedGalaxy/galaxy0030/galaxy0030\")\n",
       "v, c = ds.find_max(\"density\")\n",
       "sl = ds.slice(0, c[0])\n",
-      "print sl[\"index\", \"x\"], sl[\"index\", \"z\"], sl[\"pdx\"]\n",
+      "print sl[\"index\", \"x\"]\n",
+      "print sl[\"index\", \"z\"]\n",
+      "print sl[\"pdx\"]\n",
       "print sl[\"gas\", \"density\"].shape"
      ],
      "language": "python",
@@ -250,8 +267,8 @@
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "write_image(np.log10(frb[\"gas\", \"density\"]), \"temp.png\")\n",
-      "from IPython.core.display import Image\n",
+      "yt.write_image(np.log10(frb[\"gas\", \"density\"]), \"temp.png\")\n",
+      "from IPython.display import Image\n",
       "Image(filename = \"temp.png\")"
      ],
      "language": "python",
@@ -274,7 +291,7 @@
      "collapsed": false,
      "input": [
       "cp = ds.cutting([0.2, 0.3, 0.5], \"max\")\n",
-      "pw = cp.to_pw(fields = [\"density\"])"
+      "pw = cp.to_pw(fields = [(\"gas\", \"density\")])"
      ],
      "language": "python",
      "metadata": {},
@@ -309,7 +326,8 @@
      "collapsed": false,
      "input": [
       "pws = sl.to_pw(fields=[\"density\"])\n",
-      "pws.show()"
+      "#pws.show()\n",
+      "print pws.plots.keys()"
      ],
      "language": "python",
      "metadata": {},
@@ -361,4 +379,4 @@
    "metadata": {}
   }
  ]
-}
+}
\ No newline at end of file

diff -r 366b6a97ecfd8770a402397fc5abb75a8dcde268 -r a5767f974d4d47205579a813f7b33fcab443263d doc/source/bootcamp/5)_Derived_Fields_and_Profiles.ipynb
--- a/doc/source/bootcamp/5)_Derived_Fields_and_Profiles.ipynb
+++ b/doc/source/bootcamp/5)_Derived_Fields_and_Profiles.ipynb
@@ -1,7 +1,7 @@
 {
  "metadata": {
   "name": "",
-  "signature": "sha256:a19d451f3b4dcfeed448caa22c2cac35c46958e0646c19c226b1e467b76d0718"
+  "signature": "sha256:eca573e749829cacda0a8c07c6d5d11d07a5de657563a44b8c4ffff8f735caed"
  },
  "nbformat": 3,
  "nbformat_minor": 0,
@@ -22,7 +22,9 @@
      "collapsed": false,
      "input": [
       "%matplotlib inline\n",
-      "from yt.mods import *\n",
+      "import yt\n",
+      "import numpy as np\n",
+      "from yt import derived_field\n",
       "from matplotlib import pylab"
      ],
      "language": "python",
@@ -61,7 +63,7 @@
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "ds = load(\"IsolatedGalaxy/galaxy0030/galaxy0030\")\n",
+      "ds = yt.load(\"IsolatedGalaxy/galaxy0030/galaxy0030\")\n",
       "dd = ds.all_data()\n",
       "print dd.quantities.keys()"
      ],
@@ -120,7 +122,9 @@
       "bv = sp.quantities.bulk_velocity()\n",
       "L = sp.quantities.angular_momentum_vector()\n",
       "rho_min, rho_max = sp.quantities.extrema(\"density\")\n",
-      "print bv, L, rho_min, rho_max"
+      "print bv\n",
+      "print L\n",
+      "print rho_min, rho_max"
      ],
      "language": "python",
      "metadata": {},
@@ -143,9 +147,11 @@
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "prof = Profile1D(sp, \"density\", 32, rho_min, rho_max, True, weight_field=\"cell_mass\")\n",
+      "prof = yt.Profile1D(sp, \"density\", 32, rho_min, rho_max, True, weight_field=\"cell_mass\")\n",
       "prof.add_fields([\"temperature\",\"dinosaurs\"])\n",
-      "pylab.loglog(np.array(prof.x), np.array(prof[\"temperature\"]), \"-x\")"
+      "pylab.loglog(np.array(prof.x), np.array(prof[\"temperature\"]), \"-x\")\n",
+      "pylab.xlabel('Density $(g/cm^3)$')\n",
+      "pylab.ylabel('Temperature $(K)$')"
      ],
      "language": "python",
      "metadata": {},
@@ -162,7 +168,9 @@
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "pylab.loglog(np.array(prof.x), np.array(prof[\"dinosaurs\"]), '-x')"
+      "pylab.loglog(np.array(prof.x), np.array(prof[\"dinosaurs\"]), '-x')\n",
+      "pylab.xlabel('Density $(g/cm^3)$')\n",
+      "pylab.ylabel('Dinosaurs $(K cm / s)$')"
      ],
      "language": "python",
      "metadata": {},
@@ -179,9 +187,30 @@
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "prof = Profile1D(sp, \"density\", 32, rho_min, rho_max, True, weight_field=None)\n",
+      "prof = yt.Profile1D(sp, \"density\", 32, rho_min, rho_max, True, weight_field=None)\n",
       "prof.add_fields([\"cell_mass\"])\n",
-      "pylab.loglog(np.array(prof.x), np.array(prof[\"cell_mass\"].in_units(\"Msun\")), '-x')"
+      "pylab.loglog(np.array(prof.x), np.array(prof[\"cell_mass\"].in_units(\"Msun\")), '-x')\n",
+      "pylab.xlabel('Density $(g/cm^3)$')\n",
+      "pylab.ylabel('Cell mass $(M_\\odot)$')"
+     ],
+     "language": "python",
+     "metadata": {},
+     "outputs": []
+    },
+    {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "In addition to the low-level `ProfileND` interface, it's also quite straightforward to quickly create plots of profiles using the `ProfilePlot` class.  Let's redo the last plot using `ProfilePlot`"
+     ]
+    },
+    {
+     "cell_type": "code",
+     "collapsed": false,
+     "input": [
+      "prof = yt.ProfilePlot(sp, 'density', 'cell_mass', weight_field=None)\n",
+      "prof.set_unit('cell_mass', 'Msun')\n",
+      "prof.show()"
      ],
      "language": "python",
      "metadata": {},

diff -r 366b6a97ecfd8770a402397fc5abb75a8dcde268 -r a5767f974d4d47205579a813f7b33fcab443263d doc/source/bootcamp/6)_Volume_Rendering.ipynb
--- a/doc/source/bootcamp/6)_Volume_Rendering.ipynb
+++ b/doc/source/bootcamp/6)_Volume_Rendering.ipynb
@@ -1,7 +1,7 @@
 {
  "metadata": {
   "name": "",
-  "signature": "sha256:2929940fc3977b495aa124dee851f7602d61e073ed65407dd95e7cf597684b35"
+  "signature": "sha256:2a24bbe82955f9d948b39cbd1b1302968ff57f62f73afb2c7a5c4953393d00ae"
  },
  "nbformat": 3,
  "nbformat_minor": 0,
@@ -21,8 +21,8 @@
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "from yt.mods import *\n",
-      "ds = load(\"IsolatedGalaxy/galaxy0030/galaxy0030\")"
+      "import yt\n",
+      "ds = yt.load(\"IsolatedGalaxy/galaxy0030/galaxy0030\")"
      ],
      "language": "python",
      "metadata": {},
@@ -43,7 +43,7 @@
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "tf = ColorTransferFunction((-28, -24))\n",
+      "tf = yt.ColorTransferFunction((-28, -24))\n",
       "tf.add_layers(4, w=0.01)\n",
       "cam = ds.camera([0.5, 0.5, 0.5], [1.0, 1.0, 1.0], (20, 'kpc'), 512, tf, fields=[\"density\"])\n",
       "cam.show()"
@@ -80,7 +80,7 @@
      "cell_type": "code",
      "collapsed": false,
      "input": [
-      "tf = ColorTransferFunction((-28, -25))\n",
+      "tf = yt.ColorTransferFunction((-28, -25))\n",
       "tf.add_layers(4, w=0.03)\n",
       "cam = ds.camera([0.5, 0.5, 0.5], [1.0, 1.0, 1.0], (20.0, 'kpc'), 512, tf, no_ghost=False)\n",
       "cam.show(clip_ratio=4.0)"

diff -r 366b6a97ecfd8770a402397fc5abb75a8dcde268 -r a5767f974d4d47205579a813f7b33fcab443263d doc/source/cookbook/aligned_cutting_plane.py
--- a/doc/source/cookbook/aligned_cutting_plane.py
+++ b/doc/source/cookbook/aligned_cutting_plane.py
@@ -1,18 +1,20 @@
+### THIS RECIPE IS CURRENTLY BROKEN IN YT-3.0
+### DO NOT TRUST THIS RECIPE UNTIL THIS LINE IS REMOVED
+
 import yt
 
 # Load the dataset.
 ds = yt.load("IsolatedGalaxy/galaxy0030/galaxy0030")
 
-# Create a 1 kpc radius sphere, centered on the maximum gas density.  Note
-# that this sphere is very small compared to the size of our final plot,
-# and it has a non-axially aligned L vector.
-sp = ds.sphere("m", (1.0, "kpc"))
+# Create a 15 kpc radius sphere, centered on the center of the sim volume
+sp = ds.sphere("center", (15.0, "kpc"))
 
 # Get the angular momentum vector for the sphere.
 L = sp.quantities.angular_momentum_vector()
 
 print "Angular momentum vector: {0}".format(L)
 
-# Create an OffAxisSlicePlot on the object with the L vector as its normal
-p = yt.OffAxisSlicePlot(ds, L, "density", sp.center, (15, "kpc"))
+# Create an OffAxisSlicePlot of density centered on the object with the L 
+# vector as its normal and a width of 25 kpc on a side
+p = yt.OffAxisSlicePlot(ds, L, "density", sp.center, (25, "kpc"))
 p.save()

diff -r 366b6a97ecfd8770a402397fc5abb75a8dcde268 -r a5767f974d4d47205579a813f7b33fcab443263d doc/source/cookbook/amrkdtree_downsampling.py
--- a/doc/source/cookbook/amrkdtree_downsampling.py
+++ b/doc/source/cookbook/amrkdtree_downsampling.py
@@ -1,3 +1,6 @@
+### THIS RECIPE IS CURRENTLY BROKEN IN YT-3.0
+### DO NOT TRUST THIS RECIPE UNTIL THIS LINE IS REMOVED 
+
 # Using AMRKDTree Homogenized Volumes to examine large datasets
 # at lower resolution.
 
@@ -10,17 +13,17 @@
 import yt
 from yt.utilities.amr_kdtree.api import AMRKDTree
 
-# Load up a data and print out the maximum refinement level
+# Load up a dataset
 ds = yt.load('IsolatedGalaxy/galaxy0030/galaxy0030')
 
 kd = AMRKDTree(ds)
-# Print out the total volume of all the bricks
-print kd.count_volume()
-# Print out the number of cells
-print kd.count_cells()
+
+# Print out specifics of KD Tree
+print "Total volume of all bricks = %i" % kd.count_volume()
+print "Total number of cells = %i" % kd.count_cells()
 
 tf = yt.ColorTransferFunction((-30, -22))
-cam = ds.h.camera([0.5, 0.5, 0.5], [0.2, 0.3, 0.4], 0.10, 256,
+cam = ds.camera([0.5, 0.5, 0.5], [0.2, 0.3, 0.4], 0.10, 256,
                   tf, volume=kd)
 tf.add_layers(4, 0.01, col_bounds=[-27.5, -25.5], colormap='RdBu_r')
 cam.snapshot("v1.png", clip_ratio=6.0)

diff -r 366b6a97ecfd8770a402397fc5abb75a8dcde268 -r a5767f974d4d47205579a813f7b33fcab443263d doc/source/cookbook/amrkdtree_to_uniformgrid.py
--- /dev/null
+++ b/doc/source/cookbook/amrkdtree_to_uniformgrid.py
@@ -0,0 +1,33 @@
+import numpy as np
+import yt
+
+#This is an example of how to map an amr data set
+#to a uniform grid. In this case the highest
+#level of refinement is mapped into a 1024x1024x1024 cube
+
+#first the amr data is loaded
+ds = yt.load("~/pfs/galaxy/new_tests/feedback_8bz/DD0021/DD0021")
+
+#next we get the maxium refinement level
+lmax = ds.parameters['MaximumRefinementLevel']
+
+#calculate the center of the domain
+domain_center = (ds.domain_right_edge - ds.domain_left_edge)/2
+
+#determine the cellsize in the highest refinement level
+cell_size = pf.domain_width/(pf.domain_dimensions*2**lmax)
+
+#calculate the left edge of the new grid
+left_edge = domain_center - 512*cell_size
+
+#the number of cells per side of the new grid
+ncells = 1024
+
+#ask yt for the specified covering grid
+cgrid = pf.h.covering_grid(lmax, left_edge, np.array([ncells,]*3))
+
+#get a map of the density into the new grid
+density_map = cgrid["density"].astype(dtype="float32")
+
+#save the file as a numpy array for convenient future processing
+np.save("/pfs/goldbaum/galaxy/new_tests/feedback_8bz/gas_density_DD0021_log_densities.npy", density_map)

diff -r 366b6a97ecfd8770a402397fc5abb75a8dcde268 -r a5767f974d4d47205579a813f7b33fcab443263d doc/source/cookbook/average_value.py
--- a/doc/source/cookbook/average_value.py
+++ b/doc/source/cookbook/average_value.py
@@ -5,9 +5,10 @@
 field = "temperature"  # The field to average
 weight = "cell_mass"  # The weight for the average
 
-dd = ds.h.all_data()  # This is a region describing the entire box,
-                      # but note it doesn't read anything in yet!
+ad = ds.all_data()  # This is a region describing the entire box,
+                    # but note it doesn't read anything in yet!
+
 # We now use our 'quantities' call to get the average quantity
-average_value = dd.quantities["WeightedAverageQuantity"](field, weight)
+average_value = ad.quantities.weighted_average_quantity(field, weight)
 
-print "Average %s (weighted by %s) is %0.5e" % (field, weight, average_value)
+print "Average %s (weighted by %s) is %0.3e %s" % (field, weight, average_value, average_value.units)

diff -r 366b6a97ecfd8770a402397fc5abb75a8dcde268 -r a5767f974d4d47205579a813f7b33fcab443263d doc/source/cookbook/boolean_data_objects.py
--- a/doc/source/cookbook/boolean_data_objects.py
+++ b/doc/source/cookbook/boolean_data_objects.py
@@ -1,23 +1,32 @@
+### THIS RECIPE IS CURRENTLY BROKEN IN YT-3.0
+### DO NOT TRUST THIS RECIPE UNTIL THIS LINE IS REMOVED
+
 import yt
 
 ds = yt.load("Enzo_64/DD0043/data0043")  # load data
-# Make a few data ojbects to start.
+# Make a few data ojbects to start. Two boxes and two spheres.
 re1 = ds.region([0.5, 0.5, 0.5], [0.4, 0.4, 0.4], [0.6, 0.6, 0.6])
 re2 = ds.region([0.5, 0.5, 0.5], [0.5, 0.5, 0.5], [0.6, 0.6, 0.6])
 sp1 = ds.sphere([0.5, 0.5, 0.5], 0.05)
 sp2 = ds.sphere([0.1, 0.2, 0.3], 0.1)
+
 # The "AND" operator. This will make a region identical to re2.
 bool1 = ds.boolean([re1, "AND", re2])
 xp = bool1["particle_position_x"]
+
 # The "OR" operator. This will make a region identical to re1.
 bool2 = ds.boolean([re1, "OR", re2])
+
 # The "NOT" operator. This will make a region like re1, but with the corner
 # that re2 covers cut out.
 bool3 = ds.boolean([re1, "NOT", re2])
+
 # Disjoint regions can be combined with the "OR" operator.
 bool4 = ds.boolean([sp1, "OR", sp2])
+
 # Find oddly-shaped overlapping regions.
 bool5 = ds.boolean([re2, "AND", sp1])
+
 # Nested logic with parentheses.
 # This is re1 with the oddly-shaped region cut out.
 bool6 = ds.boolean([re1, "NOT", "(", re1, "AND", sp1, ")"])

diff -r 366b6a97ecfd8770a402397fc5abb75a8dcde268 -r a5767f974d4d47205579a813f7b33fcab443263d doc/source/cookbook/camera_movement.py
--- a/doc/source/cookbook/camera_movement.py
+++ b/doc/source/cookbook/camera_movement.py
@@ -1,11 +1,13 @@
-import numpy as np
+### THIS RECIPE IS CURRENTLY BROKEN IN YT-3.0
+### DO NOT TRUST THIS RECIPE UNTIL THIS LINE IS REMOVED
 
 import yt
+import numpy as np
 
 # Follow the simple_volume_rendering cookbook for the first part of this.
 ds = yt.load("IsolatedGalaxy/galaxy0030/galaxy0030")  # load data
-dd = ds.all_data()
-mi, ma = dd.quantities["Extrema"]("density")
+ad = ds.all_data()
+mi, ma = ad.quantities.extrema("density")
 
 # Set up transfer function
 tf = yt.ColorTransferFunction((np.log10(mi), np.log10(ma)))
@@ -40,4 +42,4 @@
 # Zoom in by a factor of 10 over 5 frames
 for i, snapshot in enumerate(cam.zoomin(10.0, 5, clip_ratio=8.0)):
     snapshot.write_png('camera_movement_%04i.png' % frame)
-    frame += 1
\ No newline at end of file
+    frame += 1

diff -r 366b6a97ecfd8770a402397fc5abb75a8dcde268 -r a5767f974d4d47205579a813f7b33fcab443263d doc/source/cookbook/contours_on_slice.py
--- a/doc/source/cookbook/contours_on_slice.py
+++ b/doc/source/cookbook/contours_on_slice.py
@@ -1,13 +1,12 @@
 import yt
 
 # first add density contours on a density slice
-pf = yt.load("GasSloshing/sloshing_nomag2_hdf5_plt_cnt_0150")  # load data
-p = yt.SlicePlot(pf, "x", "density")
+ds = yt.load("GasSloshing/sloshing_nomag2_hdf5_plt_cnt_0150")  
+p = yt.SlicePlot(ds, "x", "density")
 p.annotate_contour("density")
 p.save()
 
-# then add temperature contours on the same densty slice
-pf = yt.load("GasSloshing/sloshing_nomag2_hdf5_plt_cnt_0150")  # load data
-p = yt.SlicePlot(pf, "x", "density")
+# then add temperature contours on the same density slice
+p = yt.SlicePlot(ds, "x", "density")
 p.annotate_contour("temperature")
-p.save(str(pf)+'_T_contour')
+p.save(str(ds)+'_T_contour')

diff -r 366b6a97ecfd8770a402397fc5abb75a8dcde268 -r a5767f974d4d47205579a813f7b33fcab443263d doc/source/cookbook/extract_fixed_resolution_data.py
--- a/doc/source/cookbook/extract_fixed_resolution_data.py
+++ b/doc/source/cookbook/extract_fixed_resolution_data.py
@@ -8,21 +8,26 @@
 level = 2
 dims = ds.domain_dimensions * ds.refine_by**level
 
-# Now, we construct an object that describes the data region and structure we
-# want
-cube = ds.covering_grid(2,  # The level we are willing to extract to; higher
-                            # levels than this will not contribute to the data!
+# We construct an object that describes the data region and structure we want
+# In this case, we want all data up to the maximum "level" of refinement 
+# across the entire simulation volume.  Higher levels than this will not 
+# contribute to our covering grid.
+cube = ds.covering_grid(level,  
                         left_edge=[0.0, 0.0, 0.0],
+                        dims=dims,
                         # And any fields to preload (this is optional!)
-                        dims=dims,
                         fields=["density"])
 
 # Now we open our output file using h5py
-# Note that we open with 'w' which will overwrite existing files!
+# Note that we open with 'w' (write), which will overwrite existing files!
 f = h5py.File("my_data.h5", "w")
 
-# We create a dataset at the root note, calling it density...
+# We create a dataset at the root, calling it "density"
 f.create_dataset("/density", data=cube["density"])
 
 # We close our file
 f.close()
+
+# If we want to then access this datacube in the h5 file, we can now...
+f = h5py.File("my_data.h5", "r")
+print f["density"].value

diff -r 366b6a97ecfd8770a402397fc5abb75a8dcde268 -r a5767f974d4d47205579a813f7b33fcab443263d doc/source/cookbook/ffmpeg_volume_rendering.py
--- /dev/null
+++ b/doc/source/cookbook/ffmpeg_volume_rendering.py
@@ -0,0 +1,99 @@
+#This is an example of how to make videos of 
+#uniform grid data using Theia and ffmpeg
+
+#The Scene object to hold the ray caster and view camera
+from yt.visualization.volume_rendering.theia.scene import TheiaScene
+
+#GPU based raycasting algorithm to use 
+from yt.visualization.volume_rendering.theia.algorithms.front_to_back import FrontToBackRaycaster
+
+#These will be used to define how to color the data
+from yt.visualization.volume_rendering.transfer_functions import ColorTransferFunction
+from yt.visualization.color_maps import *
+
+#This will be used to launch ffmpeg
+import subprocess as sp
+
+#Of course we need numpy for math magic
+import numpy as np
+
+#Opacity scaling function
+def scale_func(v, mi, ma):
+      return  np.minimum(1.0, (v-mi)/(ma-mi) + 0.0)
+
+#load the uniform grid from a numpy array file
+bolshoi = "/home/bogert/log_densities_1024.npy"
+density_grid = np.load(bolshoi)
+
+#Set the TheiaScene to use the density_grid and 
+#setup the raycaster for a resulting 1080p image
+ts = TheiaScene(volume = density_grid, raycaster = FrontToBackRaycaster(size = (1920,1080) ))
+
+#the min and max values in the data to color
+mi, ma = 0.0, 3.6
+
+#setup colortransferfunction
+bins = 5000
+tf = ColorTransferFunction( (mi, ma), bins)
+tf.map_to_colormap(0.5, ma, colormap="spring", scale_func = scale_func)
+
+#pass the transfer function to the ray caster
+ts.source.raycaster.set_transfer(tf)
+
+#Initial configuration for start of video
+#set initial opacity and brightness values
+#then zoom into the center of the data 30%
+ts.source.raycaster.set_opacity(0.03)
+ts.source.raycaster.set_brightness(2.3)
+ts.camera.zoom(30.0)
+
+#path to ffmpeg executable
+FFMPEG_BIN = "/usr/local/bin/ffmpeg"
+
+pipe = sp.Popen([ FFMPEG_BIN,
+        '-y', # (optional) overwrite the output file if it already exists
+	#This must be set to rawvideo because the image is an array
+        '-f', 'rawvideo', 
+	#This must be set to rawvideo because the image is an array
+        '-vcodec','rawvideo',
+	#The size of the image array and resulting video
+        '-s', '1920x1080', 
+	#This must be rgba to match array format (uint32)
+        '-pix_fmt', 'rgba',
+	#frame rate of video
+        '-r', '29.97', 
+        #Indicate that the input to ffmpeg comes from a pipe
+        '-i', '-', 
+        # Tells FFMPEG not to expect any audio
+        '-an', 
+        #Setup video encoder
+	#Use any encoder you life available from ffmpeg
+        '-vcodec', 'libx264', '-preset', 'ultrafast', '-qp', '0',
+        '-pix_fmt', 'yuv420p',
+        #Name of the output
+        'bolshoiplanck2.mkv' ],
+        stdin=sp.PIPE,stdout=sp.PIPE)
+		
+		
+#Now we loop and produce 500 frames
+for k in range (0,500) :
+    #update the scene resulting in a new image
+    ts.update()
+
+    #get the image array from the ray caster
+    array = ts.source.get_results()
+
+    #send the image array to ffmpeg
+    array.tofile(pipe.stdin)
+
+    #rotate the scene by 0.01 rads in x,y & z
+    ts.camera.rotateX(0.01)
+    ts.camera.rotateZ(0.01)
+    ts.camera.rotateY(0.01)
+
+    #zoom in 0.01% for a total of a 5% zoom
+    ts.camera.zoom(0.01)
+
+
+#Close the pipe to ffmpeg
+pipe.terminate()

diff -r 366b6a97ecfd8770a402397fc5abb75a8dcde268 -r a5767f974d4d47205579a813f7b33fcab443263d doc/source/cookbook/find_clumps.py
--- a/doc/source/cookbook/find_clumps.py
+++ b/doc/source/cookbook/find_clumps.py
@@ -1,3 +1,6 @@
+### THIS RECIPE IS CURRENTLY BROKEN IN YT-3.0
+### DO NOT TRUST THIS RECIPE UNTIL THIS LINE IS REMOVED
+
 import numpy as np
 
 import yt

diff -r 366b6a97ecfd8770a402397fc5abb75a8dcde268 -r a5767f974d4d47205579a813f7b33fcab443263d doc/source/cookbook/fit_spectrum.py
--- a/doc/source/cookbook/fit_spectrum.py
+++ b/doc/source/cookbook/fit_spectrum.py
@@ -1,22 +1,21 @@
+### THIS RECIPE IS CURRENTLY BROKEN IN YT-3.0
+### DO NOT TRUST THIS RECIPE UNTIL THIS LINE IS REMOVED
+
 import yt
 from yt.analysis_modules.cosmological_observation.light_ray.api import LightRay
-from yt.analysis_modules.api import AbsorptionSpectrum
+from yt.analysis_modules.absorption_spectrum.api import AbsorptionSpectrum
 from yt.analysis_modules.absorption_spectrum.api import generate_total_fit
 
 # Define and add a field to simulate OVI based on a constant relationship to HI
-def _OVI_NumberDensity(field, data):
-    return data['HI_NumberDensity']
+# Do *NOT* use this for science, because this is not how OVI actually behaves;
+# it is just an example.
 
+ at yt.derived_field(name='OVI_number_density', units='cm**-3')
+def _OVI_number_density(field, data):
+    return data['HI_NumberDensity']*2.0
 
-def _convertOVI(data):
-    return 4.9E-4*.2
 
-yt.add_field('my_OVI_NumberDensity',
-             function=_OVI_NumberDensity,
-             convert_function=_convertOVI)
-
-
-# Define species andi associated parameters to add to continuum
+# Define species and associated parameters to add to continuum
 # Parameters used for both adding the transition to the spectrum
 # and for fitting
 # Note that for single species that produce multiple lines
@@ -37,7 +36,7 @@
                  'init_N': 1E14}
 
 OVI_parameters = {'name': 'OVI',
-                  'field': 'my_OVI_NumberDensity',
+                  'field': 'OVI_number_density',
                   'f': [.1325, .06580],
                   'Gamma': [4.148E8, 4.076E8],
                   'wavelength': [1031.9261, 1037.6167],

diff -r 366b6a97ecfd8770a402397fc5abb75a8dcde268 -r a5767f974d4d47205579a813f7b33fcab443263d doc/source/cookbook/free_free_field.py
--- a/doc/source/cookbook/free_free_field.py
+++ b/doc/source/cookbook/free_free_field.py
@@ -1,3 +1,6 @@
+### THIS RECIPE IS CURRENTLY BROKEN IN YT-3.0
+### DO NOT TRUST THIS RECIPE UNTIL THIS LINE IS REMOVED
+
 import numpy as np
 import yt
 # Need to grab the proton mass from the constants database

diff -r 366b6a97ecfd8770a402397fc5abb75a8dcde268 -r a5767f974d4d47205579a813f7b33fcab443263d doc/source/cookbook/global_phase_plots.py
--- a/doc/source/cookbook/global_phase_plots.py
+++ b/doc/source/cookbook/global_phase_plots.py
@@ -4,10 +4,10 @@
 ds = yt.load("IsolatedGalaxy/galaxy0030/galaxy0030")
 
 # This is an object that describes the entire box
-ad = ds.h.all_data()
+ad = ds.all_data()
 
-# We plot the average VelocityMagnitude (mass-weighted) in our object
-# as a function of Density and temperature
+# We plot the average velocity magnitude (mass-weighted) in our object
+# as a function of density and temperature
 plot = yt.PhasePlot(ad, "density", "temperature", "velocity_magnitude")
 
 # save the plot

diff -r 366b6a97ecfd8770a402397fc5abb75a8dcde268 -r a5767f974d4d47205579a813f7b33fcab443263d doc/source/cookbook/halo_merger_tree.py
--- a/doc/source/cookbook/halo_merger_tree.py
+++ b/doc/source/cookbook/halo_merger_tree.py
@@ -1,3 +1,6 @@
+### THIS RECIPE IS CURRENTLY BROKEN IN YT-3.0
+### DO NOT TRUST THIS RECIPE UNTIL THIS LINE IS REMOVED
+
 # This script demonstrates some of the halo merger tracking infrastructure,
 # for tracking halos across multiple datadumps in a time series.
 # Ultimately, it outputs an HDF5 file with the important quantities for the

diff -r 366b6a97ecfd8770a402397fc5abb75a8dcde268 -r a5767f974d4d47205579a813f7b33fcab443263d doc/source/cookbook/halo_plotting.py
--- a/doc/source/cookbook/halo_plotting.py
+++ b/doc/source/cookbook/halo_plotting.py
@@ -1,16 +1,20 @@
-"""
-This is a mechanism for plotting circles representing identified particle halos
-on an image.  For more information, see :ref:`halo_finding`.
-"""
-from yt.mods import * # set up our namespace
+### THIS RECIPE IS CURRENTLY BROKEN IN YT-3.0
+### DO NOT TRUST THIS RECIPE UNTIL THIS LINE IS REMOVED
 
-data_pf = load("Enzo_64/RD0006/RedshiftOutput0006")
+import yt
+from yt.analysis_modules.halo_analysis.halo_catalog import HaloCatalog
 
-halo_pf = load('rockstar_halos/halos_0.0.bin')
+# Load the dataset
+ds = yt.load("Enzo_64/RD0006/RedshiftOutput0006")
 
-hc - HaloCatalog(halos_pf = halo_pf)
+# Load the halo list from a rockstar output for this dataset
+halos = yt.load('rockstar_halos/halos_0.0.bin')
+
+# Create the halo catalog from this halo list
+hc = HaloCatalog(halos_pf = halos)
 hc.load()
 
-p = ProjectionPlot(pf, "x", "density")
+# Create a projection with the halos overplot on top
+p = yt.ProjectionPlot(ds, "x", "density")
 p.annotate_halos(hc)
 p.save()

diff -r 366b6a97ecfd8770a402397fc5abb75a8dcde268 -r a5767f974d4d47205579a813f7b33fcab443263d doc/source/cookbook/halo_profiler.py
--- a/doc/source/cookbook/halo_profiler.py
+++ b/doc/source/cookbook/halo_profiler.py
@@ -1,3 +1,6 @@
+### THIS RECIPE IS CURRENTLY BROKEN IN YT-3.0
+### DO NOT TRUST THIS RECIPE UNTIL THIS LINE IS REMOVED
+
 from yt.mods import *
 
 from yt.analysis_modules.halo_profiler.api import *

diff -r 366b6a97ecfd8770a402397fc5abb75a8dcde268 -r a5767f974d4d47205579a813f7b33fcab443263d doc/source/cookbook/hse_field.py
--- a/doc/source/cookbook/hse_field.py
+++ b/doc/source/cookbook/hse_field.py
@@ -1,11 +1,14 @@
+### THIS RECIPE IS CURRENTLY BROKEN IN YT-3.0
+### DO NOT TRUST THIS RECIPE UNTIL THIS LINE IS REMOVED
+
 import numpy as np
 import yt
 
 # Define the components of the gravitational acceleration vector field by
 # taking the gradient of the gravitational potential
 
-
-def _Grav_Accel_x(field, data):
+ at yt.derived_field(name='grav_accel_x', units='cm/s**2', take_log=False)
+def grav_accel_x(field, data):
 
     # We need to set up stencils
 
@@ -19,13 +22,14 @@
     gx -= data["gravitational_potential"][sl_left, 1:-1, 1:-1]/dx
 
     new_field = np.zeros(data["gravitational_potential"].shape,
-                         dtype='float64')
+                         dtype='float64')*gx.unit_array
     new_field[1:-1, 1:-1, 1:-1] = -gx
 
     return new_field
 
 
-def _Grav_Accel_y(field, data):
+ at yt.derived_field(name='grav_accel_y', units='cm/s**2', take_log=False)
+def grav_accel_y(field, data):
 
     # We need to set up stencils
 
@@ -39,13 +43,14 @@
     gy -= data["gravitational_potential"][1:-1, sl_left, 1:-1]/dy
 
     new_field = np.zeros(data["gravitational_potential"].shape,
-                         dtype='float64')
+                         dtype='float64')*gx.unit_array
     new_field[1:-1, 1:-1, 1:-1] = -gy
 
     return new_field
 
 
-def _Grav_Accel_z(field, data):
+ at yt.derived_field(name='grav_accel_z', units='cm/s**2', take_log=False)
+def grav_accel_z(field, data):
 
     # We need to set up stencils
 
@@ -59,7 +64,7 @@
     gz -= data["gravitational_potential"][1:-1, 1:-1, sl_left]/dz
 
     new_field = np.zeros(data["gravitational_potential"].shape,
-                         dtype='float64')
+                         dtype='float64')*gx.unit_array
     new_field[1:-1, 1:-1, 1:-1] = -gz
 
     return new_field
@@ -68,7 +73,8 @@
 # Define the components of the pressure gradient field
 
 
-def _Grad_Pressure_x(field, data):
+ at yt.derived_field(name='grad_pressure_x', units='g/(cm*s)**2', take_log=False)
+def grad_pressure_x(field, data):
 
     # We need to set up stencils
 
@@ -81,13 +87,14 @@
     px = data["pressure"][sl_right, 1:-1, 1:-1]/dx
     px -= data["pressure"][sl_left, 1:-1, 1:-1]/dx
 
-    new_field = np.zeros(data["pressure"].shape, dtype='float64')
+    new_field = np.zeros(data["pressure"].shape, dtype='float64')*px.unit_array
     new_field[1:-1, 1:-1, 1:-1] = px
 
     return new_field
 
 
-def _Grad_Pressure_y(field, data):
+ at yt.derived_field(name='grad_pressure_y', units='g/(cm*s)**2', take_log=False)
+def grad_pressure_y(field, data):
 
     # We need to set up stencils
 
@@ -100,13 +107,14 @@
     py = data["pressure"][1:-1, sl_right, 1:-1]/dy
     py -= data["pressure"][1:-1, sl_left, 1:-1]/dy
 
-    new_field = np.zeros(data["pressure"].shape, dtype='float64')
+    new_field = np.zeros(data["pressure"].shape, dtype='float64')*px.unit_array
     new_field[1:-1, 1:-1, 1:-1] = py
 
     return new_field
 
 
-def _Grad_Pressure_z(field, data):
+ at yt.derived_field(name='grad_pressure_z', units='g/(cm*s)**2', take_log=False)
+def grad_pressure_z(field, data):
 
     # We need to set up stencils
 
@@ -119,7 +127,7 @@
     pz = data["pressure"][1:-1, 1:-1, sl_right]/dz
     pz -= data["pressure"][1:-1, 1:-1, sl_left]/dz
 
-    new_field = np.zeros(data["pressure"].shape, dtype='float64')
+    new_field = np.zeros(data["pressure"].shape, dtype='float64')*px.unit_array
     new_field[1:-1, 1:-1, 1:-1] = pz
 
     return new_field
@@ -127,8 +135,8 @@
 
 # Define the "degree of hydrostatic equilibrium" field
 
-
-def _HSE(field, data):
+ at yt.derived_field(name='HSE', units=None, take_log=False)
+def HSE(field, data):
 
     gx = data["density"]*data["Grav_Accel_x"]
     gy = data["density"]*data["Grav_Accel_y"]
@@ -138,31 +146,10 @@
     hy = data["Grad_Pressure_y"] - gy
     hz = data["Grad_Pressure_z"] - gz
 
-    h = np.sqrt((hx*hx+hy*hy+hz*hz)/(gx*gx+gy*gy+gz*gz))
+    h = np.sqrt((hx*hx+hy*hy+hz*hz)/(gx*gx+gy*gy+gz*gz))*gx.unit_array
 
     return h
 
-# Now add the fields to the database
-
-yt.add_field("Grav_Accel_x", function=_Grav_Accel_x, take_log=False,
-             validators=[yt.ValidateSpatial(1, ["gravitational_potential"])])
-
-yt.add_field("Grav_Accel_y", function=_Grav_Accel_y, take_log=False,
-             validators=[yt.ValidateSpatial(1, ["gravitational_potential"])])
-
-yt.add_field("Grav_Accel_z", function=_Grav_Accel_z, take_log=False,
-             validators=[yt.ValidateSpatial(1, ["gravitational_potential"])])
-
-yt.add_field("Grad_Pressure_x", function=_Grad_Pressure_x, take_log=False,
-             validators=[yt.ValidateSpatial(1, ["pressure"])])
-
-yt.add_field("Grad_Pressure_y", function=_Grad_Pressure_y, take_log=False,
-             validators=[yt.ValidateSpatial(1, ["pressure"])])
-
-yt.add_field("Grad_Pressure_z", function=_Grad_Pressure_z, take_log=False,
-             validators=[yt.ValidateSpatial(1, ["pressure"])])
-
-yt.add_field("HSE", function=_HSE, take_log=False)
 
 # Open two files, one at the beginning and the other at a later time when
 # there's a lot of sloshing going on.
@@ -173,8 +160,8 @@
 # Sphere objects centered at the cluster potential minimum with a radius
 # of 200 kpc
 
-sphere_i = dsi.h.sphere(dsi.domain_center, (200, "kpc"))
-sphere_f = dsf.h.sphere(dsf.domain_center, (200, "kpc"))
+sphere_i = dsi.sphere(dsi.domain_center, (200, "kpc"))
+sphere_f = dsf.sphere(dsf.domain_center, (200, "kpc"))
 
 # Average "degree of hydrostatic equilibrium" in these spheres
 
@@ -188,9 +175,9 @@
 # of the two files
 
 slc_i = yt.SlicePlot(dsi, 2, ["density", "HSE"], center=dsi.domain_center,
-                     width=(1.0, "mpc"))
+                     width=(1.0, "Mpc"))
 slc_f = yt.SlicePlot(dsf, 2, ["density", "HSE"], center=dsf.domain_center,
-                     width=(1.0, "mpc"))
+                     width=(1.0, "Mpc"))
 
 slc_i.save("initial")
 slc_f.save("final")

diff -r 366b6a97ecfd8770a402397fc5abb75a8dcde268 -r a5767f974d4d47205579a813f7b33fcab443263d doc/source/cookbook/image_background_colors.py
--- a/doc/source/cookbook/image_background_colors.py
+++ b/doc/source/cookbook/image_background_colors.py
@@ -1,21 +1,24 @@
-from yt.mods import *
-
 # This shows how to save ImageArray objects, such as those returned from 
 # volume renderings, to pngs with varying backgrounds.
 
+import yt
+import numpy as np
+
 # Lets make a fake "rendering" that has 4 channels and looks like a linear
 # gradient from the bottom to top.
+
 im = np.zeros([64,128,4])
 for i in xrange(im.shape[0]):
     for k in xrange(im.shape[2]):
         im[i,:,k] = np.linspace(0.,10.*k, im.shape[1])
-im_arr = ImageArray(im)
+im_arr = yt.ImageArray(im)
 
 # in this case you would have gotten im_arr from something like:
 # im_arr = cam.snapshot() 
 
 # To save it with the default settings, we can just use write_png, where it 
 # rescales the image and uses a black background.
+
 im_arr.write_png('standard.png')
  
 # write_png accepts a background keyword argument that defaults to 'black'.
@@ -24,12 +27,8 @@
 # white (1.,1.,1.,1.)
 # None  (0.,0.,0.,0.) <-- Transparent!
 # any rgba list/array: [r,g,b,a], bounded by 0..1
+
 im_arr.write_png('black_bg.png', background='black')
 im_arr.write_png('white_bg.png', background='white')
 im_arr.write_png('green_bg.png', background=[0.,1.,0.,1.])
 im_arr.write_png('transparent_bg.png', background=None)
-
-
-
-
-

diff -r 366b6a97ecfd8770a402397fc5abb75a8dcde268 -r a5767f974d4d47205579a813f7b33fcab443263d doc/source/cookbook/index.rst
--- a/doc/source/cookbook/index.rst
+++ b/doc/source/cookbook/index.rst
@@ -18,9 +18,6 @@
 `here <http://yt-project.org/data/>`_, where you will find links to download 
 individual datasets.
 
-If you want to take a look at more complex recipes, or submit your own,
-check out the `yt Hub <http://hub.yt-project.org>`_.
-
 .. note:: To contribute your own recipes, please follow the instructions 
     on how to contribute documentation code: :ref:`writing_documentation`.
 

diff -r 366b6a97ecfd8770a402397fc5abb75a8dcde268 -r a5767f974d4d47205579a813f7b33fcab443263d doc/source/cookbook/light_cone_projection.py
--- a/doc/source/cookbook/light_cone_projection.py
+++ b/doc/source/cookbook/light_cone_projection.py
@@ -1,9 +1,13 @@
-from yt.mods import *
-from yt.analysis_modules.api import LightCone
+### THIS RECIPE IS CURRENTLY BROKEN IN YT-3.0
+### DO NOT TRUST THIS RECIPE UNTIL THIS LINE IS REMOVED
+
+import yt
+from yt.analysis_modules.cosmological_observation.light_cone.light_cone import LightCone
 
 # Create a LightCone object extending from z = 0 to z = 0.1
 # with a 600 arcminute field of view and a resolution of
 # 60 arcseconds.
+
 # We have already set up the redshift dumps to be
 # used for this, so we will not use any of the time
 # data dumps.

diff -r 366b6a97ecfd8770a402397fc5abb75a8dcde268 -r a5767f974d4d47205579a813f7b33fcab443263d doc/source/cookbook/light_cone_with_halo_mask.py
--- a/doc/source/cookbook/light_cone_with_halo_mask.py
+++ b/doc/source/cookbook/light_cone_with_halo_mask.py
@@ -1,7 +1,10 @@
-from yt.mods import *
+### THIS RECIPE IS CURRENTLY BROKEN IN YT-3.0
+### DO NOT TRUST THIS RECIPE UNTIL THIS LINE IS REMOVED
 
-from yt.analysis_modules.api import LightCone
-from yt.analysis_modules.halo_profiler.api import *
+import yt
+
+from yt.analysis_modules.cosmological_observation.light_cone.light_cone import LightCone
+from yt.analysis_modules.halo_profiler.api import HaloProfiler
 
 # Instantiate a light cone object as usual.
 lc = LightCone('enzo_tiny_cosmology/32Mpc_32.enzo',

diff -r 366b6a97ecfd8770a402397fc5abb75a8dcde268 -r a5767f974d4d47205579a813f7b33fcab443263d doc/source/cookbook/make_light_ray.py
--- a/doc/source/cookbook/make_light_ray.py
+++ b/doc/source/cookbook/make_light_ray.py
@@ -1,13 +1,16 @@
+### THIS RECIPE IS CURRENTLY BROKEN IN YT-3.0
+### DO NOT TRUST THIS RECIPE UNTIL THIS LINE IS REMOVED
+
 import os
 import sys
-
-from yt.mods import *
-
-from yt.analysis_modules.halo_profiler.api import *
-from yt.analysis_modules.cosmological_observation.light_ray.api import \
+import yt
+from yt.analysis_modules.halo_profiler.api import HaloProfiler
+from yt.analysis_modules.cosmological_observation.light_ray.light_ray import \
      LightRay
 
-if not os.path.isdir("LR"): os.mkdir('LR')
+# Create a directory for the light rays
+if not os.path.isdir("LR"): 
+    os.mkdir('LR')
      
 # Create a LightRay object extending from z = 0 to z = 0.1
 # and use only the redshift dumps.

diff -r 366b6a97ecfd8770a402397fc5abb75a8dcde268 -r a5767f974d4d47205579a813f7b33fcab443263d doc/source/cookbook/multi_plot_3x2_FRB.py
--- a/doc/source/cookbook/multi_plot_3x2_FRB.py
+++ b/doc/source/cookbook/multi_plot_3x2_FRB.py
@@ -1,12 +1,14 @@
-from yt.mods import * # set up our namespace
+import yt
+import numpy as np
+from yt.visualization.api import get_multi_plot
 import matplotlib.colorbar as cb
 from matplotlib.colors import LogNorm
 
 fn = "Enzo_64/RD0006/RedshiftOutput0006" # parameter file to load
 
-
-pf = load(fn) # load data
-v, c = pf.h.find_max("density")
+# load data and get center value and center location as maximum density location
+ds = yt.load(fn) 
+v, c = ds.find_max("density")
 
 # set up our Fixed Resolution Buffer parameters: a width, resolution, and center
 width = (1.0, 'unitary')
@@ -28,7 +30,7 @@
 # over the columns, which will become axes of slicing.
 plots = []
 for ax in range(3):
-    sli = pf.slice(ax, c[ax])
+    sli = ds.slice(ax, c[ax])
     frb = sli.to_frb(width, res)
     den_axis = axes[ax][0]
     temp_axis = axes[ax][1]
@@ -39,11 +41,16 @@
         ax.xaxis.set_visible(False)
         ax.yaxis.set_visible(False)
 
-    plots.append(den_axis.imshow(frb['density'], norm=LogNorm()))
+    # converting our fixed resolution buffers to NDarray so matplotlib can
+    # render them
+    dens = np.array(frb['density'])
+    temp = np.array(frb['temperature'])
+
+    plots.append(den_axis.imshow(dens, norm=LogNorm()))
     plots[-1].set_clim((5e-32, 1e-29))
     plots[-1].set_cmap("bds_highcontrast")
 
-    plots.append(temp_axis.imshow(frb['temperature'], norm=LogNorm()))
+    plots.append(temp_axis.imshow(temp, norm=LogNorm()))
     plots[-1].set_clim((1e3, 1e8))
     plots[-1].set_cmap("hot")
     
@@ -60,4 +67,4 @@
     cbar.set_label(t)
 
 # And now we're done!  
-fig.savefig("%s_3x2.png" % pf)
+fig.savefig("%s_3x2.png" % ds)

diff -r 366b6a97ecfd8770a402397fc5abb75a8dcde268 -r a5767f974d4d47205579a813f7b33fcab443263d doc/source/cookbook/multi_plot_slice_and_proj.py
--- a/doc/source/cookbook/multi_plot_slice_and_proj.py
+++ b/doc/source/cookbook/multi_plot_slice_and_proj.py
@@ -1,4 +1,5 @@
-from yt.mods import * # set up our namespace
+import yt
+import numpy as np
 from yt.visualization.base_plot_types import get_multi_plot
 import matplotlib.colorbar as cb
 from matplotlib.colors import LogNorm
@@ -6,7 +7,7 @@
 fn = "GasSloshing/sloshing_nomag2_hdf5_plt_cnt_0150" # parameter file to load
 orient = 'horizontal'
 
-pf = load(fn) # load data
+ds = yt.load(fn) # load data
 
 # There's a lot in here:
 #   From this we get a containing figure, a list-of-lists of axes into which we
@@ -17,12 +18,11 @@
 #   bw is the base-width in inches, but 4 is about right for most cases.
 fig, axes, colorbars = get_multi_plot(3, 2, colorbar=orient, bw = 4)
 
-slc = pf.slice(2, 0.0, fields=["density","temperature","velocity_magnitude"], 
-                 center=pf.domain_center)
-proj = pf.proj("density", 2, weight_field="density", center=pf.domain_center)
+slc = yt.SlicePlot(ds, 'z', fields=["density","temperature","velocity_magnitude"])
+proj = yt.ProjectionPlot(ds, 'z', "density", weight_field="density")
 
-slc_frb = slc.to_frb((1.0, "mpc"), 512)
-proj_frb = proj.to_frb((1.0, "mpc"), 512)
+slc_frb = slc.data_source.to_frb((1.0, "Mpc"), 512)
+proj_frb = proj.data_source.to_frb((1.0, "Mpc"), 512)
 
 dens_axes = [axes[0][0], axes[1][0]]
 temp_axes = [axes[0][1], axes[1][1]]
@@ -37,12 +37,22 @@
     vax.xaxis.set_visible(False)
     vax.yaxis.set_visible(False)
 
-plots = [dens_axes[0].imshow(slc_frb["density"], origin='lower', norm=LogNorm()),
-         dens_axes[1].imshow(proj_frb["density"], origin='lower', norm=LogNorm()),
-         temp_axes[0].imshow(slc_frb["temperature"], origin='lower'),    
-         temp_axes[1].imshow(proj_frb["temperature"], origin='lower'),
-         vels_axes[0].imshow(slc_frb["velocity_magnitude"], origin='lower', norm=LogNorm()),
-         vels_axes[1].imshow(proj_frb["velocity_magnitude"], origin='lower', norm=LogNorm())]
+# Converting our Fixed Resolution Buffers to numpy arrays so that matplotlib
+# can render them
+
+slc_dens = np.array(slc_frb['density'])
+proj_dens = np.array(proj_frb['density'])
+slc_temp = np.array(slc_frb['temperature'])
+proj_temp = np.array(proj_frb['temperature'])
+slc_vel = np.array(slc_frb['velocity_magnitude'])
+proj_vel = np.array(proj_frb['velocity_magnitude'])
+
+plots = [dens_axes[0].imshow(slc_dens, origin='lower', norm=LogNorm()),
+         dens_axes[1].imshow(proj_dens, origin='lower', norm=LogNorm()),
+         temp_axes[0].imshow(slc_temp, origin='lower'),    
+         temp_axes[1].imshow(proj_temp, origin='lower'),
+         vels_axes[0].imshow(slc_vel, origin='lower', norm=LogNorm()),
+         vels_axes[1].imshow(proj_vel, origin='lower', norm=LogNorm())]
          
 plots[0].set_clim((1.0e-27,1.0e-25))
 plots[0].set_cmap("bds_highcontrast")
@@ -58,12 +68,12 @@
 plots[5].set_cmap("gist_rainbow")
 
 titles=[r'$\mathrm{Density}\ (\mathrm{g\ cm^{-3}})$', 
-        r'$\mathrm{temperature}\ (\mathrm{K})$',
-        r'$\mathrm{VelocityMagnitude}\ (\mathrm{cm\ s^{-1}})$']
+        r'$\mathrm{Temperature}\ (\mathrm{K})$',
+        r'$\mathrm{Velocity Magnitude}\ (\mathrm{cm\ s^{-1}})$']
 
 for p, cax, t in zip(plots[0:6:2], colorbars, titles):
     cbar = fig.colorbar(p, cax=cax, orientation=orient)
     cbar.set_label(t)
 
 # And now we're done! 
-fig.savefig("%s_3x2" % pf)
+fig.savefig("%s_3x2" % ds)

diff -r 366b6a97ecfd8770a402397fc5abb75a8dcde268 -r a5767f974d4d47205579a813f7b33fcab443263d doc/source/cookbook/multi_width_image.py
--- a/doc/source/cookbook/multi_width_image.py
+++ b/doc/source/cookbook/multi_width_image.py
@@ -1,15 +1,16 @@
-from yt.mods import *
+import yt
 
 # Load the dataset.
-pf = load("IsolatedGalaxy/galaxy0030/galaxy0030")
+ds = yt.load("IsolatedGalaxy/galaxy0030/galaxy0030")
 
 # Create a slice plot for the dataset.  With no additional arguments,
 # the width will be the size of the domain and the center will be the
 # center of the simulation box
-slc = SlicePlot(pf,2,'density')
+slc = yt.SlicePlot(ds, 'z', 'density')
 
-# Create a list of a couple of widths and units.
-widths = [(1, 'mpc'),
+# Create a list of a couple of widths and units. 
+# (N.B. Mpc (megaparsec) != mpc (milliparsec)
+widths = [(1, 'Mpc'),
           (15, 'kpc')]
 
 # Loop through the list of widths and units.
@@ -19,12 +20,12 @@
     slc.set_width(width, unit)
 
     # Write out the image with a unique name.
-    slc.save("%s_%010d_%s" % (pf, width, unit))
+    slc.save("%s_%010d_%s" % (ds, width, unit))
 
 zoomFactors = [2,4,5]
 
 # recreate the original slice
-slc = SlicePlot(pf,2,'density')
+slc = yt.SlicePlot(ds, 'z', 'density')
 
 for zoomFactor in zoomFactors:
 
@@ -32,4 +33,4 @@
     slc.zoom(zoomFactor)
 
     # Write out the image with a unique name.
-    slc.save("%s_%i" % (pf, zoomFactor))
+    slc.save("%s_%i" % (ds, zoomFactor))

diff -r 366b6a97ecfd8770a402397fc5abb75a8dcde268 -r a5767f974d4d47205579a813f7b33fcab443263d doc/source/cookbook/multiplot_2x2.py
--- a/doc/source/cookbook/multiplot_2x2.py
+++ b/doc/source/cookbook/multiplot_2x2.py
@@ -1,9 +1,9 @@
-from yt.mods import *
+import yt
 import matplotlib.pyplot as plt
 from mpl_toolkits.axes_grid1 import AxesGrid
 
 fn = "IsolatedGalaxy/galaxy0030/galaxy0030"
-pf = load(fn) # load data
+ds = yt.load(fn) # load data
 
 fig = plt.figure()
 
@@ -22,11 +22,16 @@
                 cbar_size="3%",
                 cbar_pad="0%")
 
-fields = ['density', 'velocity_x', 'velocity_y', 'VelocityMagnitude']
+fields = ['density', 'velocity_x', 'velocity_y', 'velocity_magnitude']
 
 # Create the plot.  Since SlicePlot accepts a list of fields, we need only
 # do this once.
-p = SlicePlot(pf, 'z', fields)
+p = yt.SlicePlot(ds, 'z', fields)
+
+# Velocity is going to be both positive and negative, so let's make these
+# slices linear
+p.set_log('velocity_x', False)
+p.set_log('velocity_y', False)
 p.zoom(2)
 
 # For each plotted field, force the SlicePlot to redraw itself onto the AxesGrid

diff -r 366b6a97ecfd8770a402397fc5abb75a8dcde268 -r a5767f974d4d47205579a813f7b33fcab443263d doc/source/cookbook/multiplot_2x2_coordaxes_slice.py
--- a/doc/source/cookbook/multiplot_2x2_coordaxes_slice.py
+++ b/doc/source/cookbook/multiplot_2x2_coordaxes_slice.py
@@ -1,9 +1,9 @@
-from yt.mods import *
+import yt
 import matplotlib.pyplot as plt
 from mpl_toolkits.axes_grid1 import AxesGrid
 
 fn = "IsolatedGalaxy/galaxy0030/galaxy0030"
-pf = load(fn) # load data
+ds = yt.load(fn) # load data
 
 fig = plt.figure()
 
@@ -27,7 +27,7 @@
 
 for i, (direction, field) in enumerate(zip(cuts, fields)):
     # Load the data and create a single plot
-    p = SlicePlot(pf, direction, field)
+    p = yt.SlicePlot(ds, direction, field)
     p.zoom(40)
 
     # This forces the ProjectionPlot to redraw itself on the AxesGrid axes.

diff -r 366b6a97ecfd8770a402397fc5abb75a8dcde268 -r a5767f974d4d47205579a813f7b33fcab443263d doc/source/cookbook/multiplot_2x2_time_series.py
--- a/doc/source/cookbook/multiplot_2x2_time_series.py
+++ b/doc/source/cookbook/multiplot_2x2_time_series.py
@@ -1,4 +1,4 @@
-from yt.mods import *
+import yt
 import matplotlib.pyplot as plt
 from mpl_toolkits.axes_grid1 import AxesGrid
 
@@ -23,8 +23,8 @@
 
 for i, fn in enumerate(fns):
     # Load the data and create a single plot
-    pf = load(fn) # load data
-    p = ProjectionPlot(pf, 'z', 'density', width=(55, 'Mpccm'))
+    ds = yt.load(fn) # load data
+    p = yt.ProjectionPlot(ds, 'z', 'density', width=(55, 'Mpccm'))
 
     # Ensure the colorbar limits match for all plots
     p.set_zlim('density', 1e-4, 1e-2)

diff -r 366b6a97ecfd8770a402397fc5abb75a8dcde268 -r a5767f974d4d47205579a813f7b33fcab443263d doc/source/cookbook/offaxis_projection.py
--- a/doc/source/cookbook/offaxis_projection.py
+++ b/doc/source/cookbook/offaxis_projection.py
@@ -1,7 +1,8 @@
-from yt.mods import *
+import yt
+import numpy as np
 
 # Load the dataset.
-pf = load("IsolatedGalaxy/galaxy0030/galaxy0030")
+ds = yt.load("IsolatedGalaxy/galaxy0030/galaxy0030")
 
 # Choose a center for the render.
 c = [0.5, 0.5, 0.5]
@@ -25,10 +26,10 @@
 # Create the off axis projection.
 # Setting no_ghost to False speeds up the process, but makes a
 # slighly lower quality image.
-image = off_axis_projection(pf, c, L, W, Npixels, "density", no_ghost=False)
+image = yt.off_axis_projection(ds, c, L, W, Npixels, "density", no_ghost=False)
 
 # Write out the final image and give it a name
 # relating to what our dataset is called.
 # We save the log of the values so that the colors do not span
 # many orders of magnitude.  Try it without and see what happens.
-write_image(np.log10(image), "%s_offaxis_projection.png" % pf)
+yt.write_image(np.log10(image), "%s_offaxis_projection.png" % ds)

diff -r 366b6a97ecfd8770a402397fc5abb75a8dcde268 -r a5767f974d4d47205579a813f7b33fcab443263d doc/source/cookbook/offaxis_projection_colorbar.py
--- a/doc/source/cookbook/offaxis_projection_colorbar.py
+++ b/doc/source/cookbook/offaxis_projection_colorbar.py
@@ -1,8 +1,9 @@
-from yt.mods import * # set up our namespace
+import yt
+import numpy as np
 
 fn = "IsolatedGalaxy/galaxy0030/galaxy0030" # parameter file to load
 
-pf = load(fn) # load data
+ds = yt.load(fn) # load data
 
 # Now we need a center of our volume to render.  Here we'll just use
 # 0.5,0.5,0.5, because volume renderings are not periodic.
@@ -31,9 +32,9 @@
 # Also note that we set the field which we want to project as "density", but
 # really we could use any arbitrary field like "temperature", "metallicity"
 # or whatever.
-image = off_axis_projection(pf, c, L, W, Npixels, "density", no_ghost=False)
+image = yt.off_axis_projection(ds, c, L, W, Npixels, "density", no_ghost=False)
 
 # Image is now an NxN array representing the intensities of the various pixels.
 # And now, we call our direct image saver.  We save the log of the result.
-write_projection(image, "offaxis_projection_colorbar.png", 
-                 colorbar_label="Column Density (cm$^{-2}$)")
+yt.write_projection(image, "offaxis_projection_colorbar.png", 
+                    colorbar_label="Column Density (cm$^{-2}$)")

diff -r 366b6a97ecfd8770a402397fc5abb75a8dcde268 -r a5767f974d4d47205579a813f7b33fcab443263d doc/source/cookbook/opaque_rendering.py
--- a/doc/source/cookbook/opaque_rendering.py
+++ b/doc/source/cookbook/opaque_rendering.py
@@ -1,20 +1,15 @@
-## Opaque Volume Rendering
+### THIS RECIPE IS CURRENTLY BROKEN IN YT-3.0
+### DO NOT TRUST THIS RECIPE UNTIL THIS LINE IS REMOVED
 
-# The new version of yt also features opaque rendering, using grey opacity.
-# For example, this makes blues opaque to red and green.  In this example we
-# will explore how the opacity model you choose changes the appearance of the
-# rendering.
+import yt
+import numpy as np
 
-# Here we start by loading up a dataset, in this case galaxy0030.
-
-from yt.mods import *
-
-pf = load("IsolatedGalaxy/galaxy0030/galaxy0030")
+ds = yt.load("IsolatedGalaxy/galaxy0030/galaxy0030")
 
 # We start by building a transfer function, and initializing a camera.
 
-tf = ColorTransferFunction((-30, -22))
-cam = pf.h.camera([0.5, 0.5, 0.5], [0.2, 0.3, 0.4], 0.10, 256, tf)
+tf = yt.ColorTransferFunction((-30, -22))
+cam = ds.camera([0.5, 0.5, 0.5], [0.2, 0.3, 0.4], 0.10, 256, tf)
 
 # Now let's add some isocontours, and take a snapshot.
 
@@ -66,5 +61,3 @@
 
 # That looks pretty different, but the main thing is that you can see that the
 # inner contours are somewhat visible again.  
-
-

This diff is so big that we needed to truncate the remainder.

https://bitbucket.org/yt_analysis/yt/commits/e4dd926d3104/
Changeset:   e4dd926d3104
Branch:      yt-3.0
User:        hegan
Date:        2014-06-25 21:47:36
Summary:     slightly cleaner prepend behaviour
Affected #:  1 file

diff -r a5767f974d4d47205579a813f7b33fcab443263d -r e4dd926d31041f07201587d0a37d0eb7091f0c3e yt/analysis_modules/halo_analysis/halo_catalog.py
--- a/yt/analysis_modules/halo_analysis/halo_catalog.py
+++ b/yt/analysis_modules/halo_analysis/halo_catalog.py
@@ -371,7 +371,7 @@
             self.data_source = self.halos_pf.all_data()
 
             # Add all of the default quantities that all halos must have
-            self.add_default_quantities('all', prepend=True)
+            self.add_default_quantities('all')
 
         my_index = np.argsort(self.data_source["particle_identifier"])
         for i in parallel_objects(my_index, njobs=njobs, dynamic=dynamic):
@@ -436,11 +436,11 @@
                 dataset.attrs["units"] = units
         out_file.close()
 
-    def add_default_quantities(self, field_type='halos', prepend=False):
-        self.add_quantity("particle_identifier", field_type=field_type,prepend=prepend)
-        self.add_quantity("particle_mass", field_type=field_type,prepend=prepend)
-        self.add_quantity("particle_position_x", field_type=field_type,prepend=prepend)
-        self.add_quantity("particle_position_y", field_type=field_type,prepend=prepend)
-        self.add_quantity("particle_position_z", field_type=field_type,prepend=prepend)
-        self.add_quantity("virial_radius", field_type=field_type,prepend=prepend)
+    def add_default_quantities(self, field_type='halos'):
+        self.add_quantity("particle_identifier", field_type=field_type,prepend=True)
+        self.add_quantity("particle_mass", field_type=field_type,prepend=True)
+        self.add_quantity("particle_position_x", field_type=field_type,prepend=True)
+        self.add_quantity("particle_position_y", field_type=field_type,prepend=True)
+        self.add_quantity("particle_position_z", field_type=field_type,prepend=True)
+        self.add_quantity("virial_radius", field_type=field_type,prepend=True)
 


https://bitbucket.org/yt_analysis/yt/commits/f65291c32a4c/
Changeset:   f65291c32a4c
Branch:      yt-3.0
User:        hegan
Date:        2014-06-25 21:56:43
Summary:     condensed notation
Affected #:  1 file

diff -r e4dd926d31041f07201587d0a37d0eb7091f0c3e -r f65291c32a4cbf9ec128d8044254840845f11076 yt/analysis_modules/halo_analysis/halo_catalog.py
--- a/yt/analysis_modules/halo_analysis/halo_catalog.py
+++ b/yt/analysis_modules/halo_analysis/halo_catalog.py
@@ -203,10 +203,7 @@
             field_type = kwargs.pop("field_type")
         else:
             field_type = None
-        if 'prepend' in kwargs:
-            prepend = kwargs.pop("prepend")
-        else:
-            prepend = False
+        prepend = kwargs.pop("prepend",False)
         if field_type is None:
             quantity = quantity_registry.find(key, *args, **kwargs)
         elif (field_type, key) in self.halos_pf.field_info:


https://bitbucket.org/yt_analysis/yt/commits/21ae8b1d7544/
Changeset:   21ae8b1d7544
Branch:      yt-3.0
User:        brittonsmith
Date:        2014-07-01 13:28:55
Summary:     Merged in hegan/yt/yt-3.0 (pull request #981)

Added option to prepend fields to the actions list
Affected #:  1 file

diff -r 6e33df0e38cbe4b7887b272bd7559597322177ad -r 21ae8b1d754407e453111684de7f6cb7f621d69d yt/analysis_modules/halo_analysis/halo_catalog.py
--- a/yt/analysis_modules/halo_analysis/halo_catalog.py
+++ b/yt/analysis_modules/halo_analysis/halo_catalog.py
@@ -209,6 +209,7 @@
             field_type = kwargs.pop("field_type")
         else:
             field_type = None
+        prepend = kwargs.pop("prepend",False)
         if field_type is None:
             quantity = quantity_registry.find(key, *args, **kwargs)
         elif (field_type, key) in self.halos_pf.field_info:
@@ -216,7 +217,10 @@
         else:
             raise RuntimeError("HaloCatalog quantity must be a registered function or a field of a known type.")
         self.quantities.append(key)
-        self.actions.append(("quantity", (key, quantity)))
+        if prepend:
+            self.actions.insert(0, ("quantity", (key, quantity)))
+        else:
+            self.actions.append(("quantity", (key, quantity)))
 
     def add_filter(self, halo_filter, *args, **kwargs):
         r"""
@@ -436,10 +440,10 @@
         out_file.close()
 
     def add_default_quantities(self, field_type='halos'):
-        self.add_quantity("particle_identifier", field_type=field_type)
-        self.add_quantity("particle_mass", field_type=field_type)
-        self.add_quantity("particle_position_x", field_type=field_type)
-        self.add_quantity("particle_position_y", field_type=field_type)
-        self.add_quantity("particle_position_z", field_type=field_type)
-        self.add_quantity("virial_radius", field_type=field_type)
+        self.add_quantity("particle_identifier", field_type=field_type,prepend=True)
+        self.add_quantity("particle_mass", field_type=field_type,prepend=True)
+        self.add_quantity("particle_position_x", field_type=field_type,prepend=True)
+        self.add_quantity("particle_position_y", field_type=field_type,prepend=True)
+        self.add_quantity("particle_position_z", field_type=field_type,prepend=True)
+        self.add_quantity("virial_radius", field_type=field_type,prepend=True)

Repository URL: https://bitbucket.org/yt_analysis/yt/

--

This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.



More information about the yt-svn mailing list