[yt-svn] commit/yt: 4 new changesets

commits-noreply at bitbucket.org commits-noreply at bitbucket.org
Mon May 6 20:06:24 PDT 2013


4 new commits in yt:

https://bitbucket.org/yt_analysis/yt/commits/9586a3112cd4/
Changeset:   9586a3112cd4
Branch:      yt
User:        atmyers
Date:        2013-05-07 02:04:01
Summary:     make the pluto iohandler something different than the chombo one
Affected #:  2 files

diff -r 34b95297062b9f6dedf50d8a127e94ba1ec8e278 -r 9586a3112cd440027b1f353c59e95dd638ea5676 yt/frontends/pluto/api.py
--- a/yt/frontends/pluto/api.py
+++ b/yt/frontends/pluto/api.py
@@ -38,4 +38,4 @@
       add_pluto_field
 
 from .io import \
-      IOHandlerChomboHDF5
+      IOHandlerPlutoHDF5

diff -r 34b95297062b9f6dedf50d8a127e94ba1ec8e278 -r 9586a3112cd440027b1f353c59e95dd638ea5676 yt/frontends/pluto/io.py
--- a/yt/frontends/pluto/io.py
+++ b/yt/frontends/pluto/io.py
@@ -31,8 +31,8 @@
 from yt.utilities.io_handler import \
            BaseIOHandler
 
-class IOHandlerChomboHDF5(BaseIOHandler):
-    _data_style = "chombo_hdf5"
+class IOHandlerPlutoHDF5(BaseIOHandler):
+    _data_style = "pluto_hdf5"
     _offset_string = 'data:offsets=0'
     _data_string = 'data:datatype=0'
 


https://bitbucket.org/yt_analysis/yt/commits/a5a04b9207b1/
Changeset:   a5a04b9207b1
Branch:      yt
User:        atmyers
Date:        2013-05-07 02:07:17
Summary:     Merged yt_analysis/yt into yt
Affected #:  36 files

diff -r 9586a3112cd440027b1f353c59e95dd638ea5676 -r a5a04b9207b187eb09478969630d01275da603f1 .hgignore
--- a/.hgignore
+++ b/.hgignore
@@ -4,7 +4,9 @@
 freetype.cfg
 hdf5.cfg
 png.cfg
+rockstar.cfg
 yt_updater.log
+yt/analysis_modules/halo_finding/rockstar/rockstar_interface.c
 yt/frontends/ramses/_ramses_reader.cpp
 yt/utilities/amr_utils.c
 yt/utilities/kdtree/forthonf2c.h

diff -r 9586a3112cd440027b1f353c59e95dd638ea5676 -r a5a04b9207b187eb09478969630d01275da603f1 doc/install_script.sh
--- a/doc/install_script.sh
+++ b/doc/install_script.sh
@@ -34,7 +34,7 @@
 
 INST_HG=1       # Install Mercurial or not?  If hg is not already
                 # installed, yt cannot be installed.
-INST_ZLIB=1     # On some systems (Kraken) matplotlib has issues with 
+INST_ZLIB=1     # On some systems (Kraken) matplotlib has issues with
                 # the system zlib, which is compiled statically.
                 # If need be, you can turn this off.
 INST_BZLIB=1    # On some systems, libbzip2 is missing.  This can
@@ -76,7 +76,7 @@
    echo "the script to re-enable root-level installation.  Sorry!"
    exit 1
 fi
-if [[ ${DEST_DIR%/} == /usr/local ]] 
+if [[ ${DEST_DIR%/} == /usr/local ]]
 then
    echo "******************************************************"
    echo "*                                                    *"
@@ -170,6 +170,19 @@
         echo "   $ module load gcc"
         echo
     fi
+    if [ "${MYHOST##midway}" != "${MYHOST}" ]
+    then
+        echo "Looks like you're on Midway."
+        echo
+        echo " ******************************************"
+        echo " * It may be better to use the yt module! *"
+        echo " *                                        *"
+        echo " *   $ module load yt                     *"
+        echo " *                                        *"
+        echo " ******************************************"
+        echo
+        return
+    fi
     if [ "${MYOS##Darwin}" != "${MYOS}" ]
     then
         echo "Looks like you're running on Mac OSX."
@@ -181,7 +194,7 @@
 	echo "must register for an account on the apple developer tools"
 	echo "website: https://developer.apple.com/downloads to obtain the"
 	echo "download link."
-	echo 
+	echo
 	echo "We have gathered some additional instructions for each"
 	echo "version of OS X below. If you have trouble installing yt"
 	echo "after following these instructions, don't hesitate to contact"
@@ -192,15 +205,15 @@
 	echo "menu bar.  We're assuming that you've installed all operating"
 	echo "system updates; if you have an older version, we suggest"
 	echo "running software update and installing all available updates."
-	echo 
-        echo "OS X 10.5.8: search for and download Xcode 3.1.4 from the" 
+	echo
+        echo "OS X 10.5.8: search for and download Xcode 3.1.4 from the"
 	echo "Apple developer tools website."
         echo
         echo "OS X 10.6.8: search for and download Xcode 3.2 from the Apple"
 	echo "developer tools website.  You can either download the"
 	echo "Xcode 3.2.2 Developer Tools package (744 MB) and then use"
-	echo "Software Update to update to XCode 3.2.6 or" 
-	echo "alternatively, you can download the Xcode 3.2.6/iOS SDK" 
+	echo "Software Update to update to XCode 3.2.6 or"
+	echo "alternatively, you can download the Xcode 3.2.6/iOS SDK"
 	echo "bundle (4.1 GB)."
         echo
         echo "OS X 10.7.5: download Xcode 4.2 from the mac app store"
@@ -208,20 +221,20 @@
         echo "Alternatively, download the Xcode command line tools from"
         echo "the Apple developer tools website."
         echo
-	echo "OS X 10.8.2: download Xcode 4.6 from the mac app store."
+	echo "OS X 10.8.2: download Xcode 4.6.1 from the mac app store."
 	echo "(search for Xcode)."
 	echo "Additionally, you will have to manually install the Xcode"
-	echo "command line tools, see:" 
+	echo "command line tools, see:"
 	echo "http://stackoverflow.com/questions/9353444"
 	echo "Alternatively, download the Xcode command line tools from"
 	echo "the Apple developer tools website."
 	echo
-        echo "NOTE: It's possible that the installation will fail, if so," 
-	echo "please set the following environment variables, remove any" 
+        echo "NOTE: It's possible that the installation will fail, if so,"
+	echo "please set the following environment variables, remove any"
 	echo "broken installation tree, and re-run this script verbatim."
         echo
-        echo "$ export CC=gcc-4.2"
-        echo "$ export CXX=g++-4.2"
+        echo "$ export CC=gcc"
+        echo "$ export CXX=g++"
 	echo
         OSX_VERSION=`sw_vers -productVersion`
         if [ "${OSX_VERSION##10.8}" != "${OSX_VERSION}" ]
@@ -278,7 +291,7 @@
         echo
         echo " INST_ZLIB=0"
         echo " INST_FTYPE=0"
-        echo 
+        echo
         echo " to avoid conflicts with other command-line programs "
         echo " (like eog and evince, for example)."
     fi
@@ -424,7 +437,7 @@
     cd ..
 }
 
-if type -P wget &>/dev/null 
+if type -P wget &>/dev/null
 then
     echo "Using wget"
     export GETFILE="wget -nv"
@@ -486,28 +499,27 @@
 cd ${DEST_DIR}/src
 
 # Now we dump all our SHA512 files out.
-
-echo 'eda1b8090e5e21e7e039ef4dd03de186a7b416df9d5a4e4422abeeb4d51383b9a6858e1ac4902d8e5010f661b295bbb2452c43c8738be668379b4eb4835d0f61  Cython-0.17.1.tar.gz' > Cython-0.17.1.tar.gz.sha512
-echo '44eea803870a66ff0bab08d13a8b3388b5578ebc1c807d1d9dca0a93e6371e91b15d02917a00b3b20dc67abb5a21dabaf9b6e9257a561f85eeff2147ac73b478  PyX-0.11.1.tar.gz' > PyX-0.11.1.tar.gz.sha512
-echo 'b981f8464575bb24c297631c87a3b9172312804a0fc14ce1fa7cb41ce2b0d2fd383cd1c816d6e10c36467d18bf9492d6faf557c81c04ff3b22debfa93f30ad0b  Python-2.7.3.tgz' > Python-2.7.3.tgz.sha512
-echo 'c017d3d59dd324ac91af0edc178c76b60a5f90fbb775cf843e39062f95bd846238f2c53705f8890ed3f34bc0e6e75671a73d13875eb0287d6201cb45f0a2d338  bzip2-1.0.5.tar.gz' > bzip2-1.0.5.tar.gz.sha512
+echo 'fb85d71bb4f80b35f0d0f1735c650dd75c5f84b05635ddf91d6241ff103b5a49158c5b851a20c15e05425f6dde32a4971b35fcbd7445f61865b4d61ffd1fbfa1  Cython-0.18.tar.gz' > Cython-0.18.tar.gz.sha512
+echo '4941f5aa21aff3743546495fb073c10d2657ff42b2aff401903498638093d0e31e344cce778980f28a7170c6d29eab72ac074277b9d4088376e8692dc71e55c1  PyX-0.12.1.tar.gz' > PyX-0.12.1.tar.gz.sha512
+echo '3349152c47ed2b63c5c9aabcfa92b8497ea9d71ca551fd721e827fcb8f91ff9fbbee6bba8f8cb2dea185701b8798878b4b2435c1496b63d4b4a37c624a625299  Python-2.7.4.tgz' > Python-2.7.4.tgz.sha512
+echo '00ace5438cfa0c577e5f578d8a808613187eff5217c35164ffe044fbafdfec9e98f4192c02a7d67e01e5a5ccced630583ad1003c37697219b0f147343a3fdd12  bzip2-1.0.6.tar.gz' > bzip2-1.0.6.tar.gz.sha512
 echo 'a296dfcaef7e853e58eed4e24b37c4fa29cfc6ac688def048480f4bb384b9e37ca447faf96eec7b378fd764ba291713f03ac464581d62275e28eb2ec99110ab6  reason-js-20120623.zip' > reason-js-20120623.zip.sha512
-echo 'b519218f93946400326e9b656669269ecb3e5232b944e18fbc3eadc4fe2b56244d68aae56d6f69042b4c87c58c881ee2aaa279561ea0f0f48d5842155f4de9de  freetype-2.4.4.tar.gz' > freetype-2.4.4.tar.gz.sha512
-echo 'b3290c498191684781ca5286ab454eb1bd045e8d894f5b86fb86beb88f174e22ac3ab008fb02d6562051d9fa6a9593920cab433223f6d5473999913223b8e183  h5py-2.1.0.tar.gz' > h5py-2.1.0.tar.gz.sha512
+echo 'b46c93d76f8ce09c94765b20b2eeadf71207671f1131777de178b3727c235b4dd77f6e60d62442b96648c3c6749e9e4c1194c1b02af7e946576be09e1ff7ada3  freetype-2.4.11.tar.gz' > freetype-2.4.11.tar.gz.sha512
+echo '15ca0209e8d8f172cb0708a2de946fbbde8551d9bebc4a95fa7ae31558457a7f43249d5289d7675490c577deb4e0153698fd2407644078bf30bd5ab10135fce3  h5py-2.1.2.tar.gz' > h5py-2.1.2.tar.gz.sha512
 echo 'c68a425bacaa7441037910b9166f25b89e1387776a7749a5350793f89b1690350df5f018060c31d03686e7c3ed2aa848bd2b945c96350dc3b6322e087934783a  hdf5-1.8.9.tar.gz' > hdf5-1.8.9.tar.gz.sha512
-echo 'dbefad00fa34f4f21dca0f1e92e95bd55f1f4478fa0095dcf015b4d06f0c823ff11755cd777e507efaf1c9098b74af18f613ec9000e5c3a5cc1c7554fb5aefb8  libpng-1.5.12.tar.gz' > libpng-1.5.12.tar.gz.sha512
-echo '5b1a0fb52dcb21ca5f0ab71c8a49550e1e8cf633552ec6598dc43f0b32c03422bf5af65b30118c163231ecdddfd40846909336f16da318959106076e80a3fad0  matplotlib-1.2.0.tar.gz' > matplotlib-1.2.0.tar.gz.sha512
-echo '91693ca5f34934956a7c2c98bb69a5648b2a5660afd2ecf4a05035c5420450d42c194eeef0606d7683e267e4eaaaab414df23f30b34c88219bdd5c1a0f1f66ed  mercurial-2.5.1.tar.gz' > mercurial-2.5.1.tar.gz.sha512
-echo 'de3dd37f753614055dcfed910e9886e03688b8078492df3da94b1ec37be796030be93291cba09e8212fffd3e0a63b086902c3c25a996cf1439e15c5b16e014d9  numpy-1.6.1.tar.gz' > numpy-1.6.1.tar.gz.sha512
-echo '5ad681f99e75849a5ca6f439c7a19bb51abc73d121b50f4f8e4c0da42891950f30407f761a53f0fe51b370b1dbd4c4f5a480557cb2444c8c7c7d5412b328a474  sqlite-autoconf-3070500.tar.gz' > sqlite-autoconf-3070500.tar.gz.sha512
-echo 'edae735960279d92acf58e1f4095c6392a7c2059b8f1d2c46648fc608a0fb06b392db2d073f4973f5762c034ea66596e769b95b3d26ad963a086b9b2d09825f2  zlib-1.2.3.tar.bz2' > zlib-1.2.3.tar.bz2.sha512
+echo 'b2b53ed358bacab9e8d63a51f17bd5f121ece60a1d7c53e8a8eb08ad8b1e4393a8d7a86eec06e2efc62348114f0d84c0a3dfc805e68e6edd93b20401962b3554  libpng-1.6.1.tar.gz' > libpng-1.6.1.tar.gz.sha512
+echo '497f91725eaf361bdb9bdf38db2bff5068a77038f1536df193db64c9b887e3b0d967486daee722eda6e2c4e60f034eee030673e53d07bf0db0f3f7c0ef3bd208  matplotlib-1.2.1.tar.gz' > matplotlib-1.2.1.tar.gz.sha512
+echo '928fdeaaf0eaec80adbd8765521de9666ab56aaa2101fb9ab2cb392d8b29475d3b052d89652ff9b67522cfcc6cd958717ac715f51b0573ee088e9a595f29afe2  mercurial-2.5.4.tar.gz' > mercurial-2.5.4.tar.gz.sha512
+echo 'a485daa556f6c76003de1dbb3e42b3daeee0a320c69c81b31a7d2ebbc2cf8ab8e96c214a4758e5e7bf814295dc1d6aa563092b714db7e719678d8462135861a8  numpy-1.7.0.tar.gz' > numpy-1.7.0.tar.gz.sha512
+echo '293d78d14a9347cb83e1a644e5f3e4447ed6fc21642c51683e5495dda08d2312194a73d1fc3c1d78287e33ed065aa251ecbaa7c0ea9189456c1702e96d78becd  sqlite-autoconf-3071601.tar.gz' > sqlite-autoconf-3071601.tar.gz.sha512
+echo 'b1c073ad26684e354f7c522c14655840592e03872bc0a94690f89cae2ff88f146fce1dad252ff27a889dac4a32ff9f8ab63ba940671f9da89e9ba3e19f1bf58d  zlib-1.2.7.tar.gz' > zlib-1.2.7.tar.gz.sha512
 echo '05ac335727a2c3036f31a2506fdd2615aa436bfbe2f81799fe6c51bffe2591ad6a8427f3b25c34e7e709fb4e7607a0589dc7a22185c1f9b894e90de6711a88aa  ipython-0.13.1.tar.gz' > ipython-0.13.1.tar.gz.sha512
-echo 'fb3cf421b2dc48c31956b3e3ee4ab6ebc743deec3bf626c2238a1996c8c51be87260bd6aa662793a1f0c34dcda9b3146763777bb162dfad6fec4ca7acc403b2e  zeromq-2.2.0.tar.gz' > zeromq-2.2.0.tar.gz.sha512
-echo 'd761b492352841cdc125d9f0c99ee6d6c435812472ea234728b7f0fb4ad1048e1eec9b399df2081fbc926566f333f7780fedd0ce23255a6633fe5c60ed15a6af  pyzmq-2.1.11.tar.gz' > pyzmq-2.1.11.tar.gz.sha512
-echo '57fa5e57dfb98154a42d2d477f29401c2260ae7ad3a8128a4098b42ee3b35c54367b1a3254bc76b9b3b14b4aab7c3e1135858f68abc5636daedf2f01f9b8a3cf  tornado-2.2.tar.gz' > tornado-2.2.tar.gz.sha512
-echo '1332e3d5465ca249c357314cf15d2a4e5e83a941841021b8f6a17a107dce268a7a082838ade5e8db944ecde6bfb111211ab218aa414ee90aafbb81f1491b3b93  Forthon-0.8.10.tar.gz' > Forthon-0.8.10.tar.gz.sha512
+echo 'b9d061ca49e54ea917e0aed2b2a48faef33061dbf6d17eae7f8c3fff0b35ca883e7324f6cb24bda542443f669dcd5748037a5f2309f4c359d68adef520894865  zeromq-3.2.2.tar.gz' > zeromq-3.2.2.tar.gz.sha512
+echo '852fce8a8308c4e1e4b19c77add2b2055ca2ba570b28e8364888df490af92b860c72e860adfb075b3405a9ceb62f343889f20a8711c9353a7d9059adee910f83  pyzmq-13.0.2.tar.gz' > pyzmq-13.0.2.tar.gz.sha512
+echo '303bd3fbea22be57fddf7df78ddf5a783d355a0c8071b1363250daafc20232ddd28eedc44aa1194f4a7afd82f9396628c5bb06819e02b065b6a1b1ae8a7c19e1  tornado-3.0.tar.gz' > tornado-3.0.tar.gz.sha512
+echo '3f53d0b474bfd79fea2536d0a9197eaef6c0927e95f2f9fd52dbd6c1d46409d0e649c21ac418d8f7767a9f10fe6114b516e06f2be4b06aec3ab5bdebc8768220  Forthon-0.8.11.tar.gz' > Forthon-0.8.11.tar.gz.sha512
 echo 'c13116c1f0547000cc565e15774687b9e884f8b74fb62a84e578408a868a84961704839065ae4f21b662e87f2aaedf6ea424ea58dfa9d3d73c06281f806d15dd  nose-1.2.1.tar.gz' > nose-1.2.1.tar.gz.sha512
-echo '73de2c99406a38f85273931597525cec4ebef55b93712adca3b0bfea8ca3fc99446e5d6495817e9ad55cf4d48feb7fb49734675c4cc8938db8d4a5225d30eca7  python-hglib-0.2.tar.gz' > python-hglib-0.2.tar.gz.sha512
+echo 'd67de9567256e6f1649e4f3f7dfee63371d5f00fd3fd4f92426198f862e97c57f70e827d19f4e5e1929ad85ef2ce7aa5a0596b101cafdac71672e97dc115b397  python-hglib-0.3.tar.gz' > python-hglib-0.3.tar.gz.sha512
 echo 'ffc602eb346717286b3d0a6770c60b03b578b3cf70ebd12f9e8b1c8c39cdb12ef219ddaa041d7929351a6b02dbb8caf1821b5452d95aae95034cbf4bc9904a7a  sympy-0.7.2.tar.gz' > sympy-0.7.2.tar.gz.sha512
 echo '172f2bc671145ebb0add2669c117863db35851fb3bdb192006cd710d4d038e0037497eb39a6d01091cb923f71a7e8982a77b6e80bf71d6275d5d83a363c8d7e5  rockstar-0.99.6.tar.gz' > rockstar-0.99.6.tar.gz.sha512
 echo 'd4fdd62f2db5285cd133649bd1bfa5175cb9da8304323abd74e0ef1207d55e6152f0f944da1da75f73e9dafb0f3bb14efba3c0526c732c348a653e0bd223ccfa  scipy-0.11.0.tar.gz' > scipy-0.11.0.tar.gz.sha512
@@ -515,50 +527,50 @@
 echo '8770214491e31f0a7a3efaade90eee7b0eb20a8a6ab635c5f854d78263f59a1849133c14ef5123d01023f0110cbb9fc6f818da053c01277914ae81473430a952  lapack-3.4.2.tar.gz' > lapack-3.4.2.tar.gz.sha512
 # Individual processes
 [ -z "$HDF5_DIR" ] && get_ytproject hdf5-1.8.9.tar.gz
-[ $INST_ZLIB -eq 1 ] && get_ytproject zlib-1.2.3.tar.bz2 
-[ $INST_BZLIB -eq 1 ] && get_ytproject bzip2-1.0.5.tar.gz
-[ $INST_PNG -eq 1 ] && get_ytproject libpng-1.5.12.tar.gz
-[ $INST_FTYPE -eq 1 ] && get_ytproject freetype-2.4.4.tar.gz
-[ $INST_SQLITE3 -eq 1 ] && get_ytproject sqlite-autoconf-3070500.tar.gz
-[ $INST_PYX -eq 1 ] && get_ytproject PyX-0.11.1.tar.gz
-[ $INST_0MQ -eq 1 ] && get_ytproject zeromq-2.2.0.tar.gz
-[ $INST_0MQ -eq 1 ] && get_ytproject pyzmq-2.1.11.tar.gz
-[ $INST_0MQ -eq 1 ] && get_ytproject tornado-2.2.tar.gz
+[ $INST_ZLIB -eq 1 ] && get_ytproject zlib-1.2.7.tar.gz
+[ $INST_BZLIB -eq 1 ] && get_ytproject bzip2-1.0.6.tar.gz
+[ $INST_PNG -eq 1 ] && get_ytproject libpng-1.6.1.tar.gz
+[ $INST_FTYPE -eq 1 ] && get_ytproject freetype-2.4.11.tar.gz
+[ $INST_SQLITE3 -eq 1 ] && get_ytproject sqlite-autoconf-3071601.tar.gz
+[ $INST_PYX -eq 1 ] && get_ytproject PyX-0.12.1.tar.gz
+[ $INST_0MQ -eq 1 ] && get_ytproject zeromq-3.2.2.tar.gz
+[ $INST_0MQ -eq 1 ] && get_ytproject pyzmq-13.0.2.tar.gz
+[ $INST_0MQ -eq 1 ] && get_ytproject tornado-3.0.tar.gz
 [ $INST_SCIPY -eq 1 ] && get_ytproject scipy-0.11.0.tar.gz
 [ $INST_SCIPY -eq 1 ] && get_ytproject blas.tar.gz
 [ $INST_SCIPY -eq 1 ] && get_ytproject lapack-3.4.2.tar.gz
-get_ytproject Python-2.7.3.tgz
-get_ytproject numpy-1.6.1.tar.gz
-get_ytproject matplotlib-1.2.0.tar.gz
-get_ytproject mercurial-2.5.1.tar.gz
+get_ytproject Python-2.7.4.tgz
+get_ytproject numpy-1.7.0.tar.gz
+get_ytproject matplotlib-1.2.1.tar.gz
+get_ytproject mercurial-2.5.4.tar.gz
 get_ytproject ipython-0.13.1.tar.gz
-get_ytproject h5py-2.1.0.tar.gz
-get_ytproject Cython-0.17.1.tar.gz
+get_ytproject h5py-2.1.2.tar.gz
+get_ytproject Cython-0.18.tar.gz
 get_ytproject reason-js-20120623.zip
-get_ytproject Forthon-0.8.10.tar.gz
-get_ytproject nose-1.2.1.tar.gz 
-get_ytproject python-hglib-0.2.tar.gz
+get_ytproject Forthon-0.8.11.tar.gz
+get_ytproject nose-1.2.1.tar.gz
+get_ytproject python-hglib-0.3.tar.gz
 get_ytproject sympy-0.7.2.tar.gz
 get_ytproject rockstar-0.99.6.tar.gz
 if [ $INST_BZLIB -eq 1 ]
 then
-    if [ ! -e bzip2-1.0.5/done ]
+    if [ ! -e bzip2-1.0.6/done ]
     then
-        [ ! -e bzip2-1.0.5 ] && tar xfz bzip2-1.0.5.tar.gz
+        [ ! -e bzip2-1.0.6 ] && tar xfz bzip2-1.0.6.tar.gz
         echo "Installing BZLIB"
-        cd bzip2-1.0.5
-        if [ `uname` = "Darwin" ] 
+        cd bzip2-1.0.6
+        if [ `uname` = "Darwin" ]
         then
-            if [ -z "${CC}" ] 
+            if [ -z "${CC}" ]
             then
                 sed -i.bak 's/soname/install_name/' Makefile-libbz2_so
             else
-                sed -i.bak -e 's/soname/install_name/' -e "s/CC=gcc/CC=${CC}/" Makefile-libbz2_so 
+                sed -i.bak -e 's/soname/install_name/' -e "s/CC=gcc/CC=${CC}/" Makefile-libbz2_so
             fi
         fi
         ( make install CFLAGS=-fPIC LDFLAGS=-fPIC PREFIX=${DEST_DIR} 2>&1 ) 1>> ${LOG_FILE} || do_exit
         ( make -f Makefile-libbz2_so CFLAGS=-fPIC LDFLAGS=-fPIC PREFIX=${DEST_DIR} 2>&1 ) 1>> ${LOG_FILE} || do_exit
-        ( cp -v libbz2.so.1.0.4 ${DEST_DIR}/lib 2>&1 ) 1>> ${LOG_FILE} || do_exit
+        ( cp -v libbz2.so.1.0.6 ${DEST_DIR}/lib 2>&1 ) 1>> ${LOG_FILE} || do_exit
         touch done
         cd ..
     fi
@@ -569,11 +581,11 @@
 
 if [ $INST_ZLIB -eq 1 ]
 then
-    if [ ! -e zlib-1.2.3/done ]
+    if [ ! -e zlib-1.2.7/done ]
     then
-        [ ! -e zlib-1.2.3 ] && tar xfj zlib-1.2.3.tar.bz2
+        [ ! -e zlib-1.2.7 ] && tar xfz zlib-1.2.7.tar.gz
         echo "Installing ZLIB"
-        cd zlib-1.2.3
+        cd zlib-1.2.7
         ( ./configure --shared --prefix=${DEST_DIR}/ 2>&1 ) 1>> ${LOG_FILE} || do_exit
         ( make install 2>&1 ) 1>> ${LOG_FILE} || do_exit
         ( make clean 2>&1) 1>> ${LOG_FILE} || do_exit
@@ -587,11 +599,11 @@
 
 if [ $INST_PNG -eq 1 ]
 then
-    if [ ! -e libpng-1.5.12/done ]
+    if [ ! -e libpng-1.6.1/done ]
     then
-        [ ! -e libpng-1.5.12 ] && tar xfz libpng-1.5.12.tar.gz
+        [ ! -e libpng-1.6.1 ] && tar xfz libpng-1.6.1.tar.gz
         echo "Installing PNG"
-        cd libpng-1.5.12
+        cd libpng-1.6.1
         ( ./configure CPPFLAGS=-I${DEST_DIR}/include CFLAGS=-I${DEST_DIR}/include --prefix=${DEST_DIR}/ 2>&1 ) 1>> ${LOG_FILE} || do_exit
         ( make install 2>&1 ) 1>> ${LOG_FILE} || do_exit
         ( make clean 2>&1) 1>> ${LOG_FILE} || do_exit
@@ -605,11 +617,11 @@
 
 if [ $INST_FTYPE -eq 1 ]
 then
-    if [ ! -e freetype-2.4.4/done ]
+    if [ ! -e freetype-2.4.11/done ]
     then
-        [ ! -e freetype-2.4.4 ] && tar xfz freetype-2.4.4.tar.gz
+        [ ! -e freetype-2.4.11 ] && tar xfz freetype-2.4.11.tar.gz
         echo "Installing FreeType2"
-        cd freetype-2.4.4
+        cd freetype-2.4.11
         ( ./configure CFLAGS=-I${DEST_DIR}/include --prefix=${DEST_DIR}/ 2>&1 ) 1>> ${LOG_FILE} || do_exit
         ( make install 2>&1 ) 1>> ${LOG_FILE} || do_exit
         ( make clean 2>&1) 1>> ${LOG_FILE} || do_exit
@@ -642,11 +654,11 @@
 
 if [ $INST_SQLITE3 -eq 1 ]
 then
-    if [ ! -e sqlite-autoconf-3070500/done ]
+    if [ ! -e sqlite-autoconf-3071601/done ]
     then
-        [ ! -e sqlite-autoconf-3070500 ] && tar xfz sqlite-autoconf-3070500.tar.gz
+        [ ! -e sqlite-autoconf-3071601 ] && tar xfz sqlite-autoconf-3071601.tar.gz
         echo "Installing SQLite3"
-        cd sqlite-autoconf-3070500
+        cd sqlite-autoconf-3071601
         ( ./configure --prefix=${DEST_DIR}/ 2>&1 ) 1>> ${LOG_FILE} || do_exit
         ( make ${MAKE_PROCS} install 2>&1 ) 1>> ${LOG_FILE} || do_exit
         ( make clean 2>&1) 1>> ${LOG_FILE} || do_exit
@@ -655,11 +667,11 @@
     fi
 fi
 
-if [ ! -e Python-2.7.3/done ]
+if [ ! -e Python-2.7.4/done ]
 then
     echo "Installing Python.  This may take a while, but don't worry.  yt loves you."
-    [ ! -e Python-2.7.3 ] && tar xfz Python-2.7.3.tgz
-    cd Python-2.7.3
+    [ ! -e Python-2.7.4 ] && tar xfz Python-2.7.4.tgz
+    cd Python-2.7.4
     ( ./configure --prefix=${DEST_DIR}/ ${PYCONF_ARGS} 2>&1 ) 1>> ${LOG_FILE} || do_exit
 
     ( make ${MAKE_PROCS} 2>&1 ) 1>> ${LOG_FILE} || do_exit
@@ -674,12 +686,11 @@
 
 if [ $INST_HG -eq 1 ]
 then
-    echo "Installing Mercurial."
-    do_setup_py mercurial-2.5.1
+    do_setup_py mercurial-2.5.4
     export HG_EXEC=${DEST_DIR}/bin/hg
 else
     # We assume that hg can be found in the path.
-    if type -P hg &>/dev/null 
+    if type -P hg &>/dev/null
     then
         export HG_EXEC=hg
     else
@@ -696,7 +707,7 @@
     elif [ -e $ORIG_PWD/../yt/mods.py ]
     then
         YT_DIR=`dirname $ORIG_PWD`
-    elif [ ! -e yt-hg ] 
+    elif [ ! -e yt-hg ]
     then
         YT_DIR="$PWD/yt-hg/"
         ( ${HG_EXEC} --debug clone https://bitbucket.org/yt_analysis/yt-supplemental/ 2>&1 ) 1>> ${LOG_FILE}
@@ -706,7 +717,7 @@
         ( ${HG_EXEC} --debug clone https://bitbucket.org/yt_analysis/yt/ ./yt-hg 2>&1 ) 1>> ${LOG_FILE}
         # Now we update to the branch we're interested in.
         ( ${HG_EXEC} -R ${YT_DIR} up -C ${BRANCH} 2>&1 ) 1>> ${LOG_FILE}
-    elif [ -e yt-hg ] 
+    elif [ -e yt-hg ]
     then
         YT_DIR="$PWD/yt-hg/"
     fi
@@ -714,7 +725,7 @@
 fi
 
 # This fixes problems with gfortran linking.
-unset LDFLAGS 
+unset LDFLAGS
 
 echo "Installing distribute"
 ( ${DEST_DIR}/bin/python2.7 ${YT_DIR}/distribute_setup.py 2>&1 ) 1>> ${LOG_FILE} || do_exit
@@ -724,7 +735,7 @@
 
 if [ $INST_SCIPY -eq 0 ]
 then
-    do_setup_py numpy-1.6.1 ${NUMPY_ARGS}
+    do_setup_py numpy-1.7.0 ${NUMPY_ARGS}
 else
     if [ ! -e scipy-0.11.0/done ]
     then
@@ -752,8 +763,8 @@
 	fi
     fi
     export BLAS=$PWD/BLAS/libfblas.a
-    export LAPACK=$PWD/lapack-3.4.2/liblapack.a    
-    do_setup_py numpy-1.6.1 ${NUMPY_ARGS}
+    export LAPACK=$PWD/lapack-3.4.2/liblapack.a
+    do_setup_py numpy-1.7.0 ${NUMPY_ARGS}
     do_setup_py scipy-0.11.0 ${NUMPY_ARGS}
 fi
 
@@ -776,10 +787,10 @@
     echo "Setting CFLAGS ${CFLAGS}"
 fi
 # Now we set up the basedir for matplotlib:
-mkdir -p ${DEST_DIR}/src/matplotlib-1.2.0
-echo "[directories]" >> ${DEST_DIR}/src/matplotlib-1.2.0/setup.cfg
-echo "basedirlist = ${DEST_DIR}" >> ${DEST_DIR}/src/matplotlib-1.2.0/setup.cfg
-do_setup_py matplotlib-1.2.0
+mkdir -p ${DEST_DIR}/src/matplotlib-1.2.1
+echo "[directories]" >> ${DEST_DIR}/src/matplotlib-1.2.1/setup.cfg
+echo "basedirlist = ${DEST_DIR}" >> ${DEST_DIR}/src/matplotlib-1.2.1/setup.cfg
+do_setup_py matplotlib-1.2.1
 if [ -n "${OLD_LDFLAGS}" ]
 then
     export LDFLAG=${OLD_LDFLAGS}
@@ -791,29 +802,29 @@
 # Now we do our IPython installation, which has two optional dependencies.
 if [ $INST_0MQ -eq 1 ]
 then
-    if [ ! -e zeromq-2.2.0/done ]
+    if [ ! -e zeromq-3.2.2/done ]
     then
-        [ ! -e zeromq-2.2.0 ] && tar xfz zeromq-2.2.0.tar.gz
+        [ ! -e zeromq-3.2.2 ] && tar xfz zeromq-3.2.2.tar.gz
         echo "Installing ZeroMQ"
-        cd zeromq-2.2.0
+        cd zeromq-3.2.2
         ( ./configure --prefix=${DEST_DIR}/ 2>&1 ) 1>> ${LOG_FILE} || do_exit
         ( make install 2>&1 ) 1>> ${LOG_FILE} || do_exit
         ( make clean 2>&1) 1>> ${LOG_FILE} || do_exit
         touch done
         cd ..
     fi
-    do_setup_py pyzmq-2.1.11 --zmq=${DEST_DIR}
-    do_setup_py tornado-2.2
+    do_setup_py pyzmq-13.0.2 --zmq=${DEST_DIR}
+    do_setup_py tornado-3.0
 fi
 
 do_setup_py ipython-0.13.1
-do_setup_py h5py-2.1.0
-do_setup_py Cython-0.17.1
-do_setup_py Forthon-0.8.10
+do_setup_py h5py-2.1.2
+do_setup_py Cython-0.18
+do_setup_py Forthon-0.8.11
 do_setup_py nose-1.2.1
-do_setup_py python-hglib-0.2
+do_setup_py python-hglib-0.3
 do_setup_py sympy-0.7.2
-[ $INST_PYX -eq 1 ] && do_setup_py PyX-0.11.1
+[ $INST_PYX -eq 1 ] && do_setup_py PyX-0.12.1
 
 # Now we build Rockstar and set its environment variable.
 if [ $INST_ROCKSTAR -eq 1 ]

diff -r 9586a3112cd440027b1f353c59e95dd638ea5676 -r a5a04b9207b187eb09478969630d01275da603f1 setup.py
--- a/setup.py
+++ b/setup.py
@@ -18,6 +18,9 @@
 from distutils.core import Command
 from distutils.spawn import find_executable
 
+def find_fortran_deps():
+    return (find_executable("Forthon"),
+            find_executable("gfortran"))
 
 class BuildForthon(Command):
 
@@ -41,9 +44,7 @@
     def run(self):
 
         """runner"""
-        Forthon_exe = find_executable("Forthon")
-        gfortran_exe = find_executable("gfortran")
-
+        (Forthon_exe, gfortran_exe) = find_fortran_deps()
         if None in (Forthon_exe, gfortran_exe):
             sys.stderr.write(
                 "fKDpy.so won't be built due to missing Forthon/gfortran\n"
@@ -154,7 +155,7 @@
 
 import setuptools
 
-VERSION = "2.5dev"
+VERSION = "2.6dev"
 
 if os.path.exists('MANIFEST'):
     os.remove('MANIFEST')
@@ -193,9 +194,13 @@
 
 class my_install_data(np_install_data.install_data):
     def run(self):
-        self.distribution.data_files.append(
-            ('yt/utilities/kdtree', ['yt/utilities/kdtree/fKDpy.so'])
-        )
+        (Forthon_exe, gfortran_exe) = find_fortran_deps()
+        if None in (Forthon_exe, gfortran_exe):
+            pass
+        else:
+            self.distribution.data_files.append(
+                ('yt/utilities/kdtree', ['yt/utilities/kdtree/fKDpy.so'])
+                )
         np_install_data.install_data.run(self)
 
 class my_build_py(build_py):

diff -r 9586a3112cd440027b1f353c59e95dd638ea5676 -r a5a04b9207b187eb09478969630d01275da603f1 yt/analysis_modules/halo_finding/halo_objects.py
--- a/yt/analysis_modules/halo_finding/halo_objects.py
+++ b/yt/analysis_modules/halo_finding/halo_objects.py
@@ -1367,6 +1367,7 @@
         self._groups = []
         self._max_dens = -1
         self.pf = pf
+        self.redshift = pf.current_redshift
         self.out_list = out_list
         self._data_source = pf.h.all_data()
         mylog.info("Parsing Rockstar halo list")

diff -r 9586a3112cd440027b1f353c59e95dd638ea5676 -r a5a04b9207b187eb09478969630d01275da603f1 yt/analysis_modules/halo_finding/rockstar/rockstar.py
--- a/yt/analysis_modules/halo_finding/rockstar/rockstar.py
+++ b/yt/analysis_modules/halo_finding/rockstar/rockstar.py
@@ -238,6 +238,7 @@
         tpf = ts[0]
 
         def _particle_count(field, data):
+            if data.NumberOfParticles == 0: return 0
             try:
                 data["particle_type"]
                 has_particle_type=True

diff -r 9586a3112cd440027b1f353c59e95dd638ea5676 -r a5a04b9207b187eb09478969630d01275da603f1 yt/config.py
--- a/yt/config.py
+++ b/yt/config.py
@@ -62,7 +62,7 @@
     notebook_password = '',
     answer_testing_tolerance = '3',
     answer_testing_bitwise = 'False',
-    gold_standard_filename = 'gold006',
+    gold_standard_filename = 'gold007',
     local_standard_filename = 'local001',
     sketchfab_api_key = 'None'
     )

diff -r 9586a3112cd440027b1f353c59e95dd638ea5676 -r a5a04b9207b187eb09478969630d01275da603f1 yt/data_objects/data_containers.py
--- a/yt/data_objects/data_containers.py
+++ b/yt/data_objects/data_containers.py
@@ -36,6 +36,8 @@
 import itertools
 import shelve
 import cStringIO
+import fileinput
+from re import finditer
 
 from yt.funcs import *
 from yt.config import ytcfg
@@ -3585,12 +3587,12 @@
         given the tilt about the x axis when e0 was aligned
         to x after t1, t2 rotations about z, y
         """
-        RX = get_rotation_matrix(-tilt, (1,0,0)).transpose()
-        RY = get_rotation_matrix(-t2,   (0,1,0)).transpose()
-        RZ = get_rotation_matrix(-t1,   (0,0,1)).transpose()
-        e1 = ((0, 1, 0) * RX).sum(axis = 1)
-        e1 = (e1 * RY).sum(axis = 1)
-        e1 = (e1 * RZ).sum(axis = 1)
+        RX = get_rotation_matrix(-tilt, (1, 0, 0)).transpose()
+        RY = get_rotation_matrix(-t2,   (0, 1, 0)).transpose()
+        RZ = get_rotation_matrix(-t1,   (0, 0, 1)).transpose()
+        e1 = ((0, 1, 0) * RX).sum(axis=1)
+        e1 = (e1 * RY).sum(axis=1)
+        e1 = (e1 * RZ).sum(axis=1)
         e2 = np.cross(e0, e1)
 
         self._e1 = e1
@@ -3610,87 +3612,64 @@
         can just use the sphere one and forget about checking orientation
         but feed in the A parameter for radius
         """
-    def _get_list_of_grids(self, field = None):
+    def _get_list_of_grids(self, field=None):
         """
         This returns the grids that are possibly within the ellipse
         """
-        grids,ind = self.hierarchy.find_sphere_grids(self.center, self._A)
+        grids, ind = self.hierarchy.find_sphere_grids(self.center, self._A)
         # Now we sort by level
         grids = grids.tolist()
-        grids.sort(key=lambda x: (x.Level, \
-                                  x.LeftEdge[0], \
-                                  x.LeftEdge[1], \
+        grids.sort(key=lambda x: (x.Level,
+                                  x.LeftEdge[0],
+                                  x.LeftEdge[1],
                                   x.LeftEdge[2]))
-        self._grids = np.array(grids, dtype = 'object')
+        self._grids = np.array(grids, dtype='object')
 
     def _is_fully_enclosed(self, grid):
         """
         check if all grid corners are inside the ellipsoid
         """
-        # vector from corner to center
-        vr = (grid._corners - self.center)
-        # 3 possible cases of locations taking periodic BC into account
-        # just listing the components, find smallest later
-        dotarr=np.array([vr, vr + self.DW, vr - self.DW])
-        # these vrdote# finds the product of vr components with e#
-        # square the results
-        # find the smallest
-        # sums it
-        vrdote0_2 = (np.multiply(dotarr, self._e0)**2).min(axis \
-                                                           = 0).sum(axis = 1)
-        vrdote1_2 = (np.multiply(dotarr, self._e1)**2).min(axis \
-                                                           = 0).sum(axis = 1)
-        vrdote2_2 = (np.multiply(dotarr, self._e2)**2).min(axis \
-                                                           = 0).sum(axis = 1)
-        return np.all(vrdote0_2 / self._A**2 + \
-                      vrdote1_2 / self._B**2 + \
-                      vrdote2_2 / self._C**2 <=1.0)
-
-    @restore_grid_state # Pains me not to decorate with cache_mask here
-    def _get_cut_mask(self, grid, field = None):
+        return False
+
+    @restore_grid_state  # Pains me not to decorate with cache_mask here
+    def _get_cut_mask(self, grid, field=None):
         """
         This checks if each cell is inside the ellipsoid
         """
         # We have the *property* center, which is not necessarily
         # the same as the field_parameter
         if self._is_fully_enclosed(grid):
-            return True # We do not want child masking here
+            return True  # We do not want child masking here
         if not isinstance(grid, (FakeGridForParticles, GridChildMaskWrapper)) \
            and grid.id in self._cut_masks:
             return self._cut_masks[grid.id]
-        Inside = np.zeros(grid["x"].shape, dtype = 'float64')
-        dim = grid["x"].shape
-        # need this to take into account non-cube root grid tiles
-        if (len(dim) == 1):
-            dot_evec = np.zeros([3, dim[0]])
-        elif (len(dim) == 2):
-            dot_evec = np.zeros([3, dim[0], dim[1]])
-        elif (len(dim) == 3):
-            dot_evec = np.zeros([3, dim[0], dim[1], dim[2]])
+
+        dot_evecx = np.zeros(grid.ActiveDimensions)
+        dot_evecy = np.zeros(grid.ActiveDimensions)
+        dot_evecz = np.zeros(grid.ActiveDimensions)
 
         for i, ax in enumerate('xyz'):
             # distance to center
-            ar  = grid[ax]-self.center[i]
-            # cases to take into account periodic BC
-            case = np.array([ar, ar + self.DW[i], ar - self.DW[i]])
-            # find which of the 3 cases is smallest in magnitude
-            index = np.abs(case).argmin(axis = 0)
-            # restrict distance to only the smallest cases
-            vec = np.choose(index, case)
+            ar = grid[ax]-self.center[i]
+            # correct for periodicity
+            vec = np.array([ar, ar + self.DW[i], ar - self.DW[i]])
+            ind = np.argmin(np.abs(vec), axis=0)
+            vec = np.choose(ind, vec)
             # sum up to get the dot product with e_vectors
-            dot_evec += np.array([vec * self._e0[i], \
-                                  vec * self._e1[i], \
-                                  vec * self._e2[i]])
+            dot_evecx += vec * self._e0[i] / self._A
+            dot_evecy += vec * self._e1[i] / self._B
+            dot_evecz += vec * self._e2[i] / self._C
+
         # Calculate the eqn of ellipsoid, if it is inside
         # then result should be <= 1.0
-        Inside = dot_evec[0]**2 / self._A**2 + \
-                 dot_evec[1]**2 / self._B**2 + \
-                 dot_evec[2]**2 / self._C**2
-        cm = ((Inside <= 1.0) & grid.child_mask)
+        cm = ((dot_evecx**2 +
+               dot_evecy**2 +
+               dot_evecz**2 <= 1.0) & grid.child_mask)
         if not isinstance(grid, (FakeGridForParticles, GridChildMaskWrapper)):
             self._cut_masks[grid.id] = cm
         return cm
 
+
 class AMRCoveringGridBase(AMR3DData):
     """A 3D region with all data extracted to a single, specified
     resolution.
@@ -4374,6 +4353,230 @@
                 vv[:,i,j] = self.vertices[j,i::3]
         return vv
 
+    def export_obj(self, filename, transparency = 1.0, dist_fac = None,
+                   color_field = None, emit_field = None, color_map = "algae", 
+                   color_log = True, emit_log = True, plot_index = None, 
+                   color_field_max = None, color_field_min = None, 
+                   emit_field_max = None, emit_field_min = None):
+        r"""This exports the surface to the OBJ format, suitable for visualization
+        in many different programs (e.g., Blender).  NOTE: this exports an .obj file 
+        and an .mtl file, both with the general 'filename' as a prefix.  
+        The .obj file points to the .mtl file in its header, so if you move the 2 
+        files, make sure you change the .obj header to account for this. ALSO NOTE: 
+        the emit_field needs to be a combination of the other 2 fields used to 
+        have the emissivity track with the color.
+
+        Parameters
+        ----------
+        filename : string
+            The file this will be exported to.  This cannot be a file-like object.
+            Note - there are no file extentions included - both obj & mtl files 
+            are created.
+        transparency : float
+            This gives the transparency of the output surface plot.  Values
+            from 0.0 (invisible) to 1.0 (opaque).
+        dist_fac : float
+            Divide the axes distances by this amount.
+        color_field : string
+            Should a field be sample and colormapped?
+        emit_field : string
+            Should we track the emissivity of a field?
+              NOTE: this should be a combination of the other 2 fields being used.
+        color_map : string
+            Which color map should be applied?
+        color_log : bool
+            Should the color field be logged before being mapped?
+        emit_log : bool
+            Should the emitting field be logged before being mapped?
+        plot_index : integer
+            Index of plot for multiple plots.  If none, then only 1 plot.
+        color_field_max : float
+            Maximum value of the color field across all surfaces.
+        color_field_min : float
+            Minimum value of the color field across all surfaces.
+        emit_field_max : float
+            Maximum value of the emitting field across all surfaces.
+        emit_field_min : float
+            Minimum value of the emitting field across all surfaces.
+
+        Examples
+        --------
+
+        >>> sp = pf.h.sphere("max", (10, "kpc"))
+        >>> trans = 1.0
+        >>> distf = 3.1e18*1e3 # distances into kpc
+        >>> surf = pf.h.surface(sp, "Density", 5e-27)
+        >>> surf.export_obj("my_galaxy", transparency=trans, dist_fac = distf)
+
+        >>> sp = pf.h.sphere("max", (10, "kpc"))
+        >>> mi, ma = sp.quantities['Extrema']('Temperature')[0]
+        >>> rhos = [1e-24, 1e-25]
+        >>> trans = [0.5, 1.0]
+        >>> distf = 3.1e18*1e3 # distances into kpc
+        >>> for i, r in enumerate(rhos):
+        ...     surf = pf.h.surface(sp,'Density',r)
+        ...     surf.export_obj("my_galaxy", transparency=trans[i], 
+        ...                      color_field='Temperature', dist_fac = distf, 
+        ...                      plot_index = i, color_field_max = ma, 
+        ...                      color_field_min = mi)
+
+        >>> sp = pf.h.sphere("max", (10, "kpc"))
+        >>> rhos = [1e-24, 1e-25]
+        >>> trans = [0.5, 1.0]
+        >>> distf = 3.1e18*1e3 # distances into kpc
+        >>> def _Emissivity(field, data):
+        ...     return (data['Density']*data['Density']*np.sqrt(data['Temperature']))
+        >>> add_field("Emissivity", function=_Emissivity, units=r"\rm{g K}/\rm{cm}^{6}")
+        >>> for i, r in enumerate(rhos):
+        ...     surf = pf.h.surface(sp,'Density',r)
+        ...     surf.export_obj("my_galaxy", transparency=trans[i], 
+        ...                      color_field='Temperature', emit_field = 'Emissivity', 
+        ...                      dist_fac = distf, plot_index = i)
+
+        """
+        if self.vertices is None:
+            self.get_data(color_field,"face")
+        elif color_field is not None:
+            if color_field not in self.field_data:
+                self[color_field]
+        if emit_field is not None:
+            if color_field not in self.field_data:
+                self[emit_field]
+        only_on_root(self._export_obj, filename, transparency, dist_fac, color_field, emit_field, 
+                             color_map, color_log, emit_log, plot_index, color_field_max, 
+                             color_field_min, emit_field_max, emit_field_min)
+
+    def _color_samples_obj(self, cs, em, color_log, emit_log, color_map, arr, 
+                           color_field_max, color_field_min, 
+                           emit_field_max, emit_field_min): # this now holds for obj files
+        if color_log: cs = np.log10(cs)
+        if emit_log: em = np.log10(em)
+        if color_field_min is None:
+            mi = cs.min()
+        else:
+            mi = color_field_min
+            if color_log: mi = np.log10(mi)
+        if color_field_max is None:
+            ma = cs.max()
+        else:
+            ma = color_field_max
+            if color_log: ma = np.log10(ma)
+        cs = (cs - mi) / (ma - mi)
+        # to get color indicies for OBJ formatting
+        from yt.visualization._colormap_data import color_map_luts
+        lut = color_map_luts[color_map]
+        x = np.mgrid[0.0:1.0:lut[0].shape[0]*1j]
+        arr["cind"][:] = (np.interp(cs,x,x)*(lut[0].shape[0]-1)).astype("uint8")
+        # now, get emission
+        if emit_field_min is None:
+            emi = em.min()
+        else:
+            emi = emit_field_min
+            if emit_log: emi = np.log10(emi)
+        if emit_field_max is None:
+            ema = em.max()
+        else:
+            ema = emit_field_max
+            if emit_log: ema = np.log10(ema)
+        em = (em - emi)/(ema - emi)
+        x = np.mgrid[0.0:255.0:2j] # assume 1 emissivity per color
+        arr["emit"][:] = (np.interp(em,x,x))*2.0 # for some reason, max emiss = 2
+
+    @parallel_root_only
+    def _export_obj(self, filename, transparency, dist_fac = None, 
+                    color_field = None, emit_field = None, color_map = "algae", 
+                    color_log = True, emit_log = True, plot_index = None, 
+                    color_field_max = None, color_field_min = None, 
+                    emit_field_max = None, emit_field_min = None):
+        if plot_index is None:
+            plot_index = 0
+        if isinstance(filename, file):
+            fobj = filename + '.obj'
+            fmtl = filename + '.mtl'
+        else:
+            if plot_index == 0:
+                fobj = open(filename + '.obj', "w")
+                fmtl = open(filename + '.mtl', 'w')
+                cc = 1
+            else:
+                # read in last vertex
+                linesave = ''
+                for line in fileinput.input(filename + '.obj'):
+                    if line[0] == 'f':
+                        linesave = line
+                p = [m.start() for m in finditer(' ', linesave)]
+                cc = int(linesave[p[len(p)-1]:])+1
+                fobj = open(filename + '.obj', "a")
+                fmtl = open(filename + '.mtl', 'a')
+        ftype = [("cind", "uint8"), ("emit", "float")]
+        vtype = [("x","float"),("y","float"), ("z","float")]
+        if plot_index == 0:
+            fobj.write("# yt OBJ file\n")
+            fobj.write("# www.yt-project.com\n")
+            fobj.write("mtllib " + filename + '.mtl\n\n')  # use this material file for the faces
+            fmtl.write("# yt MLT file\n")
+            fmtl.write("# www.yt-project.com\n\n")
+        #(0) formulate vertices
+        nv = self.vertices.shape[1] # number of groups of vertices
+        f = np.empty(nv/self.vertices.shape[0], dtype=ftype) # store sets of face colors
+        v = np.empty(nv, dtype=vtype) # stores vertices
+        if color_field is not None:
+            cs = self[color_field]
+        else:
+            cs = np.empty(self.vertices.shape[1]/self.vertices.shape[0])
+        if emit_field is not None:
+            em = self[emit_field]
+        else:
+            em = np.empty(self.vertices.shape[1]/self.vertices.shape[0])            
+        self._color_samples_obj(cs, em, color_log, emit_log, color_map, f, 
+                                color_field_max, color_field_min, 
+                                emit_field_max, emit_field_min) # map color values to color scheme
+        from yt.visualization._colormap_data import color_map_luts # import colors for mtl file
+        lut = color_map_luts[color_map] # enumerate colors
+        # interpolate emissivity to enumerated colors
+        emiss = np.interp(np.mgrid[0:lut[0].shape[0]],np.mgrid[0:len(cs)],f["emit"][:])
+        if dist_fac is None: # then normalize by bounds
+            DLE = self.pf.domain_left_edge
+            DRE = self.pf.domain_right_edge
+            bounds = [(DLE[i], DRE[i]) for i in range(3)]
+            for i, ax in enumerate("xyz"):
+                # Do the bounds first since we cast to f32
+                tmp = self.vertices[i,:]
+                np.subtract(tmp, bounds[i][0], tmp)
+                w = bounds[i][1] - bounds[i][0]
+                np.divide(tmp, w, tmp)
+                np.subtract(tmp, 0.5, tmp) # Center at origin.
+                v[ax][:] = tmp   
+        else:
+            for i, ax in enumerate("xyz"):
+                tmp = self.vertices[i,:]
+                np.divide(tmp, dist_fac, tmp)
+                v[ax][:] = tmp
+        #(1) write all colors per surface to mtl file
+        for i in range(0,lut[0].shape[0]): 
+            omname = "material_" + str(i) + '_' + str(plot_index)  # name of the material
+            fmtl.write("newmtl " + omname +'\n') # the specific material (color) for this face
+            fmtl.write("Ka %.6f %.6f %.6f\n" %(0.0, 0.0, 0.0)) # ambient color, keep off
+            fmtl.write("Kd %.6f %.6f %.6f\n" %(lut[0][i], lut[1][i], lut[2][i])) # color of face
+            fmtl.write("Ks %.6f %.6f %.6f\n" %(0.0, 0.0, 0.0)) # specular color, keep off
+            fmtl.write("d %.6f\n" %(transparency))  # transparency
+            fmtl.write("em %.6f\n" %(emiss[i])) # emissivity per color
+            fmtl.write("illum 2\n") # not relevant, 2 means highlights on?
+            fmtl.write("Ns %.6f\n\n" %(0.0)) #keep off, some other specular thing
+        #(2) write vertices
+        for i in range(0,self.vertices.shape[1]):
+            fobj.write("v %.6f %.6f %.6f\n" %(v["x"][i], v["y"][i], v["z"][i]))    
+        fobj.write("#done defining vertices\n\n")
+        #(3) define faces and materials for each face
+        for i in range(0,self.triangles.shape[0]):
+            omname = 'material_' + str(f["cind"][i]) + '_' + str(plot_index) # which color to use
+            fobj.write("usemtl " + omname + '\n') # which material to use for this face (color)
+            fobj.write("f " + str(cc) + ' ' + str(cc+1) + ' ' + str(cc+2) + '\n\n') # vertices to color
+            cc = cc+3
+        fmtl.close()
+        fobj.close()
+
+
     def export_ply(self, filename, bounds = None, color_field = None,
                    color_map = "algae", color_log = True, sample_type = "face"):
         r"""This exports the surface to the PLY format, suitable for visualization
@@ -4606,22 +4809,46 @@
             mylog.error("Problem uploading.")
         return upload_id
 
+# Many of these items are set up specifically to ensure that
+# we are not breaking old pickle files.  This means we must only call the
+# _reconstruct_object and that we cannot mandate any additional arguments to
+# the reconstruction function.
+#
+# In the future, this would be better off being set up to more directly
+# reference objects or retain state, perhaps with a context manager.
+#
+# One final detail: time series or multiple parameter files in a single pickle
+# seems problematic.
+
+class ReconstructedObject(tuple):
+    pass
+
+def _check_nested_args(arg, ref_pf):
+    if not isinstance(arg, (tuple, list, ReconstructedObject)):
+        return arg
+    elif isinstance(arg, ReconstructedObject) and ref_pf == arg[0]:
+        return arg[1]
+    narg = [_check_nested_args(a, ref_pf) for a in arg]
+    return narg
+
+def _get_pf_by_hash(hash):
+    from yt.data_objects.static_output import _cached_pfs
+    for pf in _cached_pfs.values():
+        if pf._hash() == hash: return pf
+    return None
 
 def _reconstruct_object(*args, **kwargs):
     pfid = args[0]
     dtype = args[1]
+    pf = _get_pf_by_hash(pfid)
+    if not pf:
+        pfs = ParameterFileStore()
+        pf = pfs.get_pf_hash(pfid)
     field_parameters = args[-1]
     # will be much nicer when we can do pfid, *a, fp = args
-    args, new_args = args[2:-1], []
-    for arg in args:
-        if iterable(arg) and len(arg) == 2 \
-           and not isinstance(arg, types.DictType) \
-           and isinstance(arg[1], AMRData):
-            new_args.append(arg[1])
-        else: new_args.append(arg)
-    pfs = ParameterFileStore()
-    pf = pfs.get_pf_hash(pfid)
+    args = args[2:-1]
+    new_args = [_check_nested_args(a, pf) for a in args]
     cls = getattr(pf.h, dtype)
     obj = cls(*new_args)
     obj.field_parameters.update(field_parameters)
-    return pf, obj
+    return ReconstructedObject((pf, obj))

diff -r 9586a3112cd440027b1f353c59e95dd638ea5676 -r a5a04b9207b187eb09478969630d01275da603f1 yt/data_objects/hierarchy.py
--- a/yt/data_objects/hierarchy.py
+++ b/yt/data_objects/hierarchy.py
@@ -236,6 +236,8 @@
                 fn = os.path.join(self.directory,
                         "%s.yt" % self.parameter_file.basename)
         dir_to_check = os.path.dirname(fn)
+        if dir_to_check == '':
+            dir_to_check = '.'
         # We have four options:
         #    Writeable, does not exist      : create, open as append
         #    Writeable, does exist          : open as append
@@ -317,7 +319,7 @@
         under the name *name* on the node /Objects.
         """
         s = cPickle.dumps(obj, protocol=-1)
-        self.save_data(s, "/Objects", name, force = True)
+        self.save_data(np.array(s, dtype='c'), "/Objects", name, force = True)
 
     def load_object(self, name):
         """

diff -r 9586a3112cd440027b1f353c59e95dd638ea5676 -r a5a04b9207b187eb09478969630d01275da603f1 yt/data_objects/particle_io.py
--- a/yt/data_objects/particle_io.py
+++ b/yt/data_objects/particle_io.py
@@ -58,7 +58,8 @@
 
     def get_data(self, fields):
         fields = ensure_list(fields)
-        rvs = self.source.get_data(fields, force_particle_read=True)
+        self.source.get_data(fields, force_particle_read=True)
+        rvs = [self.source[field] for field in fields]
         if len(fields) == 1: return rvs[0]
         return rvs
 

diff -r 9586a3112cd440027b1f353c59e95dd638ea5676 -r a5a04b9207b187eb09478969630d01275da603f1 yt/data_objects/tests/test_pickle.py
--- /dev/null
+++ b/yt/data_objects/tests/test_pickle.py
@@ -0,0 +1,69 @@
+"""
+Testsuite for pickling yt objects.
+
+Author: Elizabeth Tasker <tasker at astro1.sci.hokudai.ac.jp>
+Affiliation: Hokkaido University
+Homepage: http://yt-project.org/
+License:
+  Copyright (C) 2013 Elizabeth Tasker. All Rights Reserved.
+
+  This file is part of yt.
+
+  yt is free software; you can redistribute it and/or modify
+  it under the terms of the GNU General Public License as published by
+  the Free Software Foundation; either version 3 of the License, or
+  (at your option) any later version.
+
+  This program is distributed in the hope that it will be useful,
+  but WITHOUT ANY WARRANTY; without even the implied warranty of
+  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+  GNU General Public License for more details.
+
+  You should have received a copy of the GNU General Public License
+  along with this program.  If not, see <http://www.gnu.org/licenses/>.
+"""
+import cPickle
+import os
+import tempfile
+from yt.testing \
+    import fake_random_pf, assert_equal
+
+
+def setup():
+    """Test specific setup."""
+    from yt.config import ytcfg
+    ytcfg["yt", "__withintesting"] = "True"
+
+
+def test_save_load_pickle():
+    """Main test for loading pickled objects"""
+    test_pf = fake_random_pf(64)
+
+    # create extracted region from boolean (fairly complex object)
+    center = (test_pf.domain_left_edge + test_pf.domain_right_edge) / 2
+    sp_outer = test_pf.h.sphere(center, test_pf.domain_width[0])
+    sp_inner = test_pf.h.sphere(center, test_pf.domain_width[0] / 10.0)
+    sp_boolean = test_pf.h.boolean([sp_outer, "NOT", sp_inner])
+
+    minv, maxv = sp_boolean.quantities["Extrema"]("Density")[0]
+    contour_threshold = min(minv * 10.0, 0.9 * maxv)
+
+    contours = sp_boolean.extract_connected_sets(
+        "Density", 1, contour_threshold, maxv + 1, log_space=True, cache=True)
+
+    # save object
+    cpklfile = tempfile.NamedTemporaryFile(delete=False)
+    cPickle.dump(contours[1][0], cpklfile)
+    cpklfile.close()
+
+    # load object
+    test_load = cPickle.load(open(cpklfile.name, "rb"))
+
+    assert_equal.description = \
+        "%s: File was pickle-loaded succesfully" % __name__
+    yield assert_equal, test_load is not None, True
+    assert_equal.description = \
+        "%s: Length of pickle-loaded connected set object" % __name__
+    yield assert_equal, len(contours[1][0]), len(test_load)
+
+    os.remove(cpklfile.name)

diff -r 9586a3112cd440027b1f353c59e95dd638ea5676 -r a5a04b9207b187eb09478969630d01275da603f1 yt/data_objects/universal_fields.py
--- a/yt/data_objects/universal_fields.py
+++ b/yt/data_objects/universal_fields.py
@@ -801,8 +801,9 @@
     for i, ax in enumerate('xyz'):
         np.subtract(data["%s%s" % (field_prefix, ax)], center[i], r)
         if data.pf.periodicity[i] == True:
-            np.subtract(DW[i], r, rdw)
             np.abs(r, r)
+            np.subtract(r, DW[i], rdw)
+            np.abs(rdw, rdw)
             np.minimum(r, rdw, r)
         np.power(r, 2.0, r)
         np.add(radius, r, radius)
@@ -997,7 +998,7 @@
     return blank
 add_field("particle_density", function=_pdensity,
           validators=[ValidateGridType()], convert_function=_convertDensity,
-          display_name=r"$\mathrm{Particle}\/\mathrm{Density}$")
+          display_name=r"\mathrm{Particle}\/\mathrm{Density}")
 
 def _MagneticEnergy(field,data):
     """This assumes that your front end has provided Bx, By, Bz in
@@ -1033,8 +1034,8 @@
     return data['MagneticEnergy']
 add_field("MagneticPressure",
           function=_MagneticPressure,
-          display_name=r"\rm{Magnetic}\/\rm{Energy}",
-          units="\rm{ergs}\/\rm{cm}^{-3}")
+          display_name=r"\rm{Magnetic}\/\rm{Pressure}",
+          units=r"\rm{ergs}\/\rm{cm}^{-3}")
 
 def _BPoloidal(field,data):
     normal = data.get_field_parameter("normal")

diff -r 9586a3112cd440027b1f353c59e95dd638ea5676 -r a5a04b9207b187eb09478969630d01275da603f1 yt/frontends/athena/data_structures.py
--- a/yt/frontends/athena/data_structures.py
+++ b/yt/frontends/athena/data_structures.py
@@ -53,7 +53,7 @@
 class AthenaGrid(AMRGridPatch):
     _id_offset = 0
     def __init__(self, id, hierarchy, level, start, dimensions):
-        df = hierarchy.storage_filename
+        df = hierarchy.parameter_file.filename[4:-4]
         if 'id0' not in hierarchy.parameter_file.filename:
             gname = hierarchy.parameter_file.filename
         else:
@@ -119,12 +119,13 @@
 
     grid = AthenaGrid
     _data_style='athena'
+    _data_file = None
     
     def __init__(self, pf, data_style='athena'):
         self.parameter_file = weakref.proxy(pf)
+        self.directory = os.path.dirname(self.parameter_file.filename)
         self.data_style = data_style
         # for now, the hierarchy file is the parameter file!
-        self.storage_filename = self.parameter_file.storage_filename
         self.hierarchy_filename = self.parameter_file.filename
         #self.directory = os.path.dirname(self.hierarchy_filename)
         self._fhandle = file(self.hierarchy_filename,'rb')
@@ -132,9 +133,6 @@
 
         self._fhandle.close()
 
-    def _initialize_data_storage(self):
-        pass
-
     def _detect_fields(self):
         field_map = {}
         f = open(self.hierarchy_filename,'rb')
@@ -337,12 +335,14 @@
     _data_style = "athena"
 
     def __init__(self, filename, data_style='athena',
-                 storage_filename = None, parameters = {}):
+                 storage_filename=None, parameters={}):
         self.specified_parameters = parameters
         StaticOutput.__init__(self, filename, data_style)
         self.filename = filename
-        self.storage_filename = filename[4:-4]
-        
+        if storage_filename is None:
+            storage_filename = '%s.yt' % filename.split('/')[-1]
+        self.storage_filename = storage_filename
+
         # Unfortunately we now have to mandate that the hierarchy gets 
         # instantiated so that we can make sure we have the correct left 
         # and right domain edges.
@@ -402,7 +402,7 @@
         if dimensionality == 1 : self.domain_dimensions[1] = np.int32(1)
         self.dimensionality = dimensionality
         self.current_time = grid["time"]
-        self.unique_identifier = self._handle.__hash__()
+        self.unique_identifier = self.parameter_filename.__hash__()
         self.cosmological_simulation = False
         self.num_ghost_zones = 0
         self.field_ordering = 'fortran'

diff -r 9586a3112cd440027b1f353c59e95dd638ea5676 -r a5a04b9207b187eb09478969630d01275da603f1 yt/frontends/castro/data_structures.py
--- a/yt/frontends/castro/data_structures.py
+++ b/yt/frontends/castro/data_structures.py
@@ -60,7 +60,7 @@
 
     def __init__(self, LeftEdge, RightEdge, index, level, filename, offset,
                  dimensions, start, stop, paranoia=False, **kwargs):
-        super(CastroGrid, self).__init__(self, index, **kwargs)
+        super(CastroGrid, self).__init__(index, **kwargs)
         self.filename = filename
         self._offset = offset
         self._paranoid = paranoia  # TODO: Factor this behavior out in tests
@@ -72,7 +72,7 @@
         self.LeftEdge  = LeftEdge.copy()
         self.RightEdge = RightEdge.copy()
         self.index = index
-        self.level = level
+        self.Level = level
 
     def get_global_startindex(self):
         return self.start_index
@@ -115,8 +115,6 @@
     grid = CastroGrid
 
     def __init__(self, pf, data_style='castro_native'):
-        super(CastroHierarchy, self).__init__(self, pf, self.data_style)
-
         self.field_indexes = {}
         self.parameter_file = weakref.proxy(pf)
         header_filename = os.path.join(pf.fullplotdir, 'Header')
@@ -128,6 +126,8 @@
                                 self.parameter_file.paranoid_read) 
         self.read_particle_header()
         self._cache_endianness(self.levels[-1].grids[-1])
+
+        super(CastroHierarchy, self).__init__(pf, data_style)
         self._setup_data_io()
         self._setup_field_list()
         self._populate_hierarchy()
@@ -181,7 +181,7 @@
         counter += 1
 
         self.dx = np.zeros((self.n_levels, 3))
-        for i, line in enumerate(self.__global_header_lines[counter:counter+self.n_levels]):
+        for i, line in enumerate(self._global_header_lines[counter:counter+self.n_levels]):
             self.dx[i] = np.array(map(float, line.split()))
         counter += self.n_levels
         self.geometry = int(self._global_header_lines[counter])
@@ -424,21 +424,6 @@
         return self.grids[mask]
 
     def _setup_field_list(self):
-        self.derived_field_list = []
-
-        for field in self.field_info:
-            try:
-                fd = self.field_info[field].get_dependencies(pf=self.parameter_file)
-            except:
-                continue
-
-            available = np.all([f in self.field_list for f in fd.requested])
-            if available: self.derived_field_list.append(field)
-
-        for field in self.field_list:
-            if field not in self.derived_field_list:
-                self.derived_field_list.append(field)
-
         if self.parameter_file.use_particles:
             # We know which particle fields will exist -- pending further
             # changes in the future.
@@ -521,15 +506,15 @@
          * ASCII (not implemented in yt)
 
         """
-        super(CastroStaticOutput, self).__init__(self, plotname.rstrip("/"),
-                                                 data_style='castro_native')
         self.storage_filename = storage_filename
         self.paranoid_read = paranoia
         self.parameter_filename = paramFilename
         self.fparameter_filename = fparamFilename
         self.__ipfn = paramFilename
+        self.fparameters = {}
+        super(CastroStaticOutput, self).__init__(plotname.rstrip("/"),
+                                                 data_style='castro_native')
 
-        self.fparameters = {}
 
         # These should maybe not be hardcoded?
         ### TODO: this.
@@ -618,6 +603,7 @@
                 self.domain_left_edge = np.array([float(i) for i in vals.split()])
             elif param.startswith("particles.write_in_plotfile"):
                 self.use_particles = boxlib_bool_to_int(vals)
+            self.fparameters[param] = vals
 
         self.parameters["TopGridRank"] = len(self.parameters["TopGridDimensions"])
         self.dimensionality = self.parameters["TopGridRank"]
@@ -655,8 +641,11 @@
         for line in lines:
             if line.count("=") == 1:
                 param, vals = map(strip, map(rstrip, line.split("=")))
-                if vals.count("'") == 0:
-                    t = map(float, [a.replace('D','e').replace('d','e') for a in vals.split()]) # all are floating point.
+                if vals.count("'") == 0 and vals.count("\"") == 0:
+                    try:
+                        t = map(float, [a.replace('D','e').replace('d','e') for a in vals.split()]) # all are floating point.
+                    except ValueError:
+                        print "Failed on line", line
                 else:
                     t = vals.split()
                 if len(t) == 1:

diff -r 9586a3112cd440027b1f353c59e95dd638ea5676 -r a5a04b9207b187eb09478969630d01275da603f1 yt/frontends/castro/io.py
--- a/yt/frontends/castro/io.py
+++ b/yt/frontends/castro/io.py
@@ -100,10 +100,10 @@
             if (gridSize != grid.ActiveDimensions).any():
                 print "Paranoia Error: Cell_H and %s do not agree on grid dimensions." %grid.filename
                 error_count += 1
-            if bytesPerReal != grid.hierarchy._bytesPerReal:
+            if bytesPerReal != grid.hierarchy._bytes_per_real:
                 print "Paranoia Error: Cell_H and %s do not agree on bytes per real number." %grid.filename
                 error_count += 1
-            if (bytesPerReal == grid.hierarchy._bytesPerReal and dtype != grid.hierarchy._dtype):
+            if (bytesPerReal == grid.hierarchy._bytes_per_real and dtype != grid.hierarchy._dtype):
                 print "Paranoia Error: Cell_H and %s do not agree on endianness." %grid.filename
                 error_count += 1
 
@@ -114,7 +114,7 @@
             start = grid.start_index
             stop = grid.stop_index
             dtype = grid.hierarchy._dtype
-            bytesPerReal = grid.hierarchy._bytesPerReal
+            bytesPerReal = grid.hierarchy._bytes_per_real
 
         nElements = grid.ActiveDimensions.prod()
 

diff -r 9586a3112cd440027b1f353c59e95dd638ea5676 -r a5a04b9207b187eb09478969630d01275da603f1 yt/frontends/enzo/data_structures.py
--- a/yt/frontends/enzo/data_structures.py
+++ b/yt/frontends/enzo/data_structures.py
@@ -634,6 +634,24 @@
         else:
             self.derived_field_list = self.__class__._cached_derived_field_list
 
+    def _detect_fields(self):
+        self.field_list = []
+        # Do this only on the root processor to save disk work.
+        mylog.info("Gathering a field list (this may take a moment.)")
+        field_list = set()
+        random_sample = self._generate_random_grids()
+        for grid in random_sample:
+            try:
+                gf = self.io._read_field_names(grid)
+            except self.io._read_exception:
+                mylog.debug("Grid %s is a bit funky?", grid.id)
+                continue
+            mylog.debug("Grid %s has: %s", grid.id, gf)
+            field_list = field_list.union(gf)
+        field_list = self.comm.par_combine_object(list(field_list),
+                        datatype="list", op = "cat")
+        self.field_list = list(set(field_list))
+
     def _generate_random_grids(self):
         my_rank = self.comm.rank
         my_grids = self.grids[self.grid_procs.ravel() == my_rank]
@@ -770,7 +788,7 @@
         data_label_factors = {}
         for line in (l.strip() for l in lines):
             if len(line) < 2: continue
-            param, vals = (i.strip() for i in line.split("="))
+            param, vals = (i.strip() for i in line.split("=",1))
             # First we try to decipher what type of value it is.
             vals = vals.split()
             # Special case approaching.

diff -r 9586a3112cd440027b1f353c59e95dd638ea5676 -r a5a04b9207b187eb09478969630d01275da603f1 yt/frontends/enzo/fields.py
--- a/yt/frontends/enzo/fields.py
+++ b/yt/frontends/enzo/fields.py
@@ -130,14 +130,21 @@
 def _ThermalEnergy(field, data):
     if data.pf["HydroMethod"] == 2:
         return data["TotalEnergy"]
-    else:
-        if data.pf["DualEnergyFormalism"]:
-            return data["GasEnergy"]
-        else:
-            return data["TotalEnergy"] - 0.5*(
-                   data["x-velocity"]**2.0
-                 + data["y-velocity"]**2.0
-                 + data["z-velocity"]**2.0 )
+    
+    if data.pf["DualEnergyFormalism"]:
+        return data["GasEnergy"]
+
+    if data.pf["HydroMethod"] in (4,6):
+        return data["TotalEnergy"] - 0.5*(
+            data["x-velocity"]**2.0
+            + data["y-velocity"]**2.0
+            + data["z-velocity"]**2.0 ) \
+            - data["MagneticEnergy"]/data["Density"]
+
+    return data["TotalEnergy"] - 0.5*(
+        data["x-velocity"]**2.0
+        + data["y-velocity"]**2.0
+        + data["z-velocity"]**2.0 )
 add_field("ThermalEnergy", function=_ThermalEnergy,
           units=r"\rm{ergs}/\rm{g}")
 
@@ -171,22 +178,22 @@
 # We set up fields for both TotalEnergy and Total_Energy in the known fields
 # lists.  Note that this does not mean these will be the used definitions.
 add_enzo_field("TotalEnergy", function=NullFunc,
-          display_name = "$\rm{Total}\/\rm{Energy}$",
+          display_name = r"\rm{Total}\/ \rm{Energy}",
           units=r"\rm{ergs}/\rm{g}", convert_function=_convertEnergy)
 add_enzo_field("Total_Energy", function=NullFunc,
-          display_name = "$\rm{Total}\/\rm{Energy}$",
+          display_name = r"\rm{Total}\/ \rm{Energy}",
           units=r"\rm{ergs}/\rm{g}", convert_function=_convertEnergy)
 
 def _Total_Energy(field, data):
     return data["TotalEnergy"] / _convertEnergy(data)
 add_field("Total_Energy", function=_Total_Energy,
-          display_name = "$\rm{Total}\/\rm{Energy}$",
+          display_name = r"\rm{Total}\/ \rm{Energy}",
           units=r"\rm{ergs}/\rm{g}", convert_function=_convertEnergy)
 
 def _TotalEnergy(field, data):
     return data["Total_Energy"] / _convertEnergy(data)
 add_field("TotalEnergy", function=_TotalEnergy,
-          display_name = "$\rm{Total}\/\rm{Energy}$",
+          display_name = r"\rm{Total}\/ \rm{Energy}",
           units=r"\rm{ergs}/\rm{g}", convert_function=_convertEnergy)
 
 def _NumberDensity(field, data):
@@ -369,7 +376,7 @@
         if not filter.any(): return blank
         num = filter.sum()
     else:
-        filter = None
+        filter = Ellipsis
         num = data["particle_position_x"].size
     amr_utils.CICDeposit_3(data["particle_position_x"][filter].astype(np.float64),
                            data["particle_position_y"][filter].astype(np.float64),

diff -r 9586a3112cd440027b1f353c59e95dd638ea5676 -r a5a04b9207b187eb09478969630d01275da603f1 yt/frontends/flash/fields.py
--- a/yt/frontends/flash/fields.py
+++ b/yt/frontends/flash/fields.py
@@ -5,7 +5,7 @@
 Affiliation: UCSD
 Homepage: http://yt-project.org/
 License:
-  Copyright (C) 2010-2011 Matthew Turk, John ZuHone.  All Rights Reserved.
+  Copyright (C) 2010-2012 Matthew Turk, John ZuHone, Anthony Scopatz.  All Rights Reserved.
 
   This file is part of yt.
 
@@ -24,6 +24,7 @@
 """
 
 import numpy as np
+from yt.utilities.exceptions import *
 from yt.data_objects.field_info_container import \
     FieldInfoContainer, \
     NullFunc, \
@@ -36,7 +37,7 @@
     ValidateGridType
 import yt.data_objects.universal_fields
 from yt.utilities.physical_constants import \
-    kboltz, mh
+    kboltz, mh, Na
 KnownFLASHFields = FieldInfoContainer()
 add_flash_field = KnownFLASHFields.add_field
 
@@ -63,6 +64,7 @@
 translation_dict = {"x-velocity": "velx",
                     "y-velocity": "vely",
                     "z-velocity": "velz",
+                    "VelocityMagnitude": "velo",
                     "Density": "dens",
                     "Temperature": "temp",
                     "Pressure" : "pres", 
@@ -154,15 +156,40 @@
 add_flash_field("temp", function=NullFunc, take_log=True,
                 convert_function=_get_convert("temp"),
                 units=r"\rm{K}")
+add_flash_field("tion", function=NullFunc, take_log=True,
+                units=r"\rm{K}")
 add_flash_field("tele", function=NullFunc, take_log=True,
                 convert_function=_get_convert("tele"),
                 units = r"\rm{K}")
+add_flash_field("trad", function=NullFunc, take_log=True,
+                units = r"\rm{K}")
 add_flash_field("pres", function=NullFunc, take_log=True,
                 convert_function=_get_convert("pres"),
                 units=r"\rm{erg}/\rm{cm}^{3}")
+add_flash_field("pion", function=NullFunc, take_log=True,
+                display_name="Ion Pressure",
+                units=r"\rm{erg}/\rm{cm}^3")
+add_flash_field("pele", function=NullFunc, take_log=True,
+                display_name="Electron Pressure, P_e",
+                units=r"\rm{erg}/\rm{cm}^3")
+add_flash_field("prad", function=NullFunc, take_log=True,
+                display_name="Radiation Pressure",
+                units = r"\rm{erg}/\rm{cm}^3")
+add_flash_field("eion", function=NullFunc, take_log=True,
+                display_name="Ion Internal Energy",
+                units=r"\rm{erg}")
+add_flash_field("eele", function=NullFunc, take_log=True,
+                display_name="Electron Internal Energy",
+                units=r"\rm{erg}")
+add_flash_field("erad", function=NullFunc, take_log=True,
+                display_name="Radiation Internal Energy",
+                units=r"\rm{erg}")
 add_flash_field("pden", function=NullFunc, take_log=True,
                 convert_function=_get_convert("pden"),
                 units=r"\rm{g}/\rm{cm}^{3}")
+add_flash_field("depo", function=NullFunc, take_log=True,
+                units = r"\rm{ergs}/\rm{g}")
+add_flash_field("ye", function=NullFunc, take_log=True,)
 add_flash_field("magx", function=NullFunc, take_log=False,
                 convert_function=_get_convert("magx"),
                 units = r"\mathrm{Gau\ss}")
@@ -192,6 +219,34 @@
                 units = r"\rm{ergs}/\rm{g}")
 add_flash_field("flam", function=NullFunc, take_log=False,
                 convert_function=_get_convert("flam"))
+add_flash_field("absr", function=NullFunc, take_log=False,
+                display_name="Absorption Coefficient")
+add_flash_field("emis", function=NullFunc, take_log=False,
+                display_name="Emissivity")
+add_flash_field("cond", function=NullFunc, take_log=False,
+                display_name="Conductivity")
+add_flash_field("dfcf", function=NullFunc, take_log=False,
+                display_name="Diffusion Equation Scalar")
+add_flash_field("fllm", function=NullFunc, take_log=False,
+                display_name="Flux Limit")
+add_flash_field("pipe", function=NullFunc, take_log=False,
+                display_name="P_i/P_e")
+add_flash_field("tite", function=NullFunc, take_log=False,
+                display_name="T_i/T_e")
+add_flash_field("dbgs", function=NullFunc, take_log=False,
+                display_name="Debug for Shocks")
+add_flash_field("cham", function=NullFunc, take_log=False,
+                display_name="Chamber Material Fraction")
+add_flash_field("targ", function=NullFunc, take_log=False,
+                display_name="Target Material Fraction")
+add_flash_field("sumy", function=NullFunc, take_log=False)
+add_flash_field("mgdc", function=NullFunc, take_log=False,
+                display_name="Emission Minus Absorption Diffusion Terms")
+
+for i in range(1, 1000):
+    add_flash_field("r{0:03}".format(i), function=NullFunc, take_log=False,
+        display_name="Energy Group {0}".format(i))
+
 
 for f,v in translation_dict.items():
     if v not in KnownFLASHFields:
@@ -300,3 +355,17 @@
           units=r"\rm{Gauss}\/\rm{cm}^{-1}")
 
 
+
+## Derived FLASH Fields
+def _nele(field, data):
+    return data['dens'] * data['ye'] * Na
+add_field('nele', function=_nele, take_log=True, units=r"\rm{cm}^{-3}")
+add_field('edens', function=_nele, take_log=True, units=r"\rm{cm}^{-3}")
+
+def _nion(field, data):
+    return data['dens'] * data['sumy'] * Na
+add_field('nion', function=_nion, take_log=True, units=r"\rm{cm}^{-3}")
+
+def _abar(field, data):
+    return 1.0 / data['sumy']
+add_field('abar', function=_abar, take_log=False)

diff -r 9586a3112cd440027b1f353c59e95dd638ea5676 -r a5a04b9207b187eb09478969630d01275da603f1 yt/frontends/flash/tests/test_outputs.py
--- a/yt/frontends/flash/tests/test_outputs.py
+++ b/yt/frontends/flash/tests/test_outputs.py
@@ -34,7 +34,7 @@
 _fields = ("Temperature", "Density", "VelocityMagnitude", "DivV")
 
 sloshing = "GasSloshingLowRes/sloshing_low_res_hdf5_plt_cnt_0300"
- at requires_pf(sloshing)
+ at requires_pf(sloshing, big_data=True)
 def test_sloshing():
     pf = data_dir_load(sloshing)
     yield assert_equal, str(pf), "sloshing_low_res_hdf5_plt_cnt_0300"
@@ -50,11 +50,3 @@
     yield assert_equal, str(pf), "windtunnel_4lev_hdf5_plt_cnt_0030"
     for test in small_patch_amr(wt, _fields_2d):
         yield test
-
-gcm = "GalaxyClusterMerger/fiducial_1to10_b0.273d_hdf5_plt_cnt_0245.gz"
- at requires_pf(gcm, big_data=True)
-def test_galaxy_cluster_merger():
-    pf = data_dir_load(gcm)
-    for test in big_patch_amr(gcm, _fields):
-        yield test
-

diff -r 9586a3112cd440027b1f353c59e95dd638ea5676 -r a5a04b9207b187eb09478969630d01275da603f1 yt/frontends/gdf/data_structures.py
--- a/yt/frontends/gdf/data_structures.py
+++ b/yt/frontends/gdf/data_structures.py
@@ -197,13 +197,13 @@
         self._handle = h5py.File(self.parameter_filename, "r")
         for field_name in self._handle["/field_types"]:
             current_field = self._handle["/field_types/%s" % field_name]
-            try:
+            if 'field_to_cgs' in current_field.attrs:
                 self.units[field_name] = current_field.attrs['field_to_cgs']
-            except:
+            else:
                 self.units[field_name] = 1.0
-            try:
-                current_fields_unit = current_field.attrs['field_units'][0]
-            except:
+            if 'field_units' in current_field.attrs:
+                current_fields_unit = just_one(current_field.attrs['field_units'])
+            else:
                 current_fields_unit = ""
             self._fieldinfo_known.add_field(field_name, function=NullFunc, take_log=False,
                    units=current_fields_unit, projected_units="",

diff -r 9586a3112cd440027b1f353c59e95dd638ea5676 -r a5a04b9207b187eb09478969630d01275da603f1 yt/frontends/stream/data_structures.py
--- a/yt/frontends/stream/data_structures.py
+++ b/yt/frontends/stream/data_structures.py
@@ -25,6 +25,7 @@
 
 import weakref
 import numpy as np
+import uuid
 
 from yt.utilities.io_handler import io_registry
 from yt.funcs import *
@@ -302,7 +303,10 @@
         #self._conversion_override = conversion_override
 
         self.stream_handler = stream_handler
-        StaticOutput.__init__(self, "InMemoryParameterFile", self._data_style)
+        name = "InMemoryParameterFile_%s" % (uuid.uuid4().hex)
+        from yt.data_objects.static_output import _cached_pfs
+        _cached_pfs[name] = self
+        StaticOutput.__init__(self, name, self._data_style)
 
         self.units = {}
         self.time_units = {}

diff -r 9586a3112cd440027b1f353c59e95dd638ea5676 -r a5a04b9207b187eb09478969630d01275da603f1 yt/mods.py
--- a/yt/mods.py
+++ b/yt/mods.py
@@ -38,8 +38,10 @@
 
 # This next item will handle most of the actual startup procedures, but it will
 # also attempt to parse the command line and set up the global state of various
-# operations.
-
+# operations.  The variable unparsed_args is not used internally but is
+# provided as a convenience for users who wish to parse arguments in scripts.
+# See http://lists.spacepope.org/pipermail/yt-dev-spacepope.org/2011-December/
+#     001727.html
 import yt.startup_tasks as __startup_tasks
 unparsed_args = __startup_tasks.unparsed_args
 
@@ -146,7 +148,7 @@
 
 # Import some helpful math utilities
 from yt.utilities.math_utils import \
-    ortho_find, quartiles, periodic_position 
+    ortho_find, quartiles, periodic_position
 
 
 # We load plugins.  Keep in mind, this can be fairly dangerous -

diff -r 9586a3112cd440027b1f353c59e95dd638ea5676 -r a5a04b9207b187eb09478969630d01275da603f1 yt/utilities/answer_testing/framework.py
--- a/yt/utilities/answer_testing/framework.py
+++ b/yt/utilities/answer_testing/framework.py
@@ -30,18 +30,23 @@
 import urllib2
 import cPickle
 import sys
+import cPickle
+import shelve
+import zlib
 
+from matplotlib.testing.compare import compare_images
 from nose.plugins import Plugin
 from yt.testing import *
+from yt.convenience import load, simulation
 from yt.config import ytcfg
-from yt.mods import *
 from yt.data_objects.static_output import StaticOutput
-import cPickle
-import shelve
-
 from yt.utilities.logger import disable_stream_logging
 from yt.utilities.command_line import get_yt_version
 
+import matplotlib.image as mpimg
+import yt.visualization.plot_window as pw
+import yt.utilities.progressbar as progressbar
+
 mylog = logging.getLogger('nose.plugins.answer-testing')
 run_big_data = False
 
@@ -66,6 +71,8 @@
         parser.add_option("--answer-big-data", dest="big_data",
             default=False, help="Should we run against big data, too?",
             action="store_true")
+        parser.add_option("--local-dir", dest="output_dir", metavar='str',
+                          help="The name of the directory to store local results")
 
     @property
     def my_version(self, version=None):
@@ -96,7 +103,7 @@
                 self.store_name = options.answer_name
             self.compare_name = None
         # if we're not storing, then we're comparing, and we want default
-        # comparison name to be the latest gold standard 
+        # comparison name to be the latest gold standard
         # either on network or local
         else:
             if options.answer_name is None:
@@ -117,18 +124,21 @@
             self.compare_name = None
         elif self.compare_name == "latest":
             self.compare_name = _latest
-            
-        # Local/Cloud storage 
+
+        # Local/Cloud storage
         if options.local_results:
+            if options.output_dir is None:
+                print 'Please supply an output directory with the --local-dir option'
+                sys.exit(1)
             storage_class = AnswerTestLocalStorage
-            # Fix up filename for local storage 
+            # Fix up filename for local storage
             if self.compare_name is not None:
                 self.compare_name = "%s/%s/%s" % \
-                    (os.path.realpath(options.output_dir), self.compare_name, 
+                    (os.path.realpath(options.output_dir), self.compare_name,
                      self.compare_name)
             if self.store_name is not None:
                 name_dir_path = "%s/%s" % \
-                    (os.path.realpath(options.output_dir), 
+                    (os.path.realpath(options.output_dir),
                     self.store_name)
                 if not os.path.isdir(name_dir_path):
                     os.makedirs(name_dir_path)
@@ -147,7 +157,10 @@
 
     def finalize(self, result=None):
         if self.store_results is False: return
-        self.storage.dump(self.result_storage)        
+        self.storage.dump(self.result_storage)
+
+    def help(self):
+        return "yt answer testing support"
 
 class AnswerTestStorage(object):
     def __init__(self, reference_name=None, answer_name=None):
@@ -155,9 +168,9 @@
         self.answer_name = answer_name
         self.cache = {}
     def dump(self, result_storage, result):
-        raise NotImplementedError 
+        raise NotImplementedError
     def get(self, pf_name, default=None):
-        raise NotImplementedError 
+        raise NotImplementedError
 
 class AnswerTestCloudStorage(AnswerTestStorage):
     def get(self, pf_name, default = None):
@@ -185,6 +198,9 @@
         self.cache[pf_name] = rv
         return rv
 
+    def progress_callback(self, current, total):
+        self.pbar.update(current)
+
     def dump(self, result_storage):
         if self.answer_name is None: return
         # This is where we dump our result storage up to Amazon, if we are able
@@ -195,12 +211,24 @@
         bucket = c.get_bucket("yt-answer-tests")
         for pf_name in result_storage:
             rs = cPickle.dumps(result_storage[pf_name])
-            tk = bucket.get_key("%s_%s" % (self.answer_name, pf_name)) 
+            tk = bucket.get_key("%s_%s" % (self.answer_name, pf_name))
             if tk is not None: tk.delete()
             k = Key(bucket)
             k.key = "%s_%s" % (self.answer_name, pf_name)
-            k.set_contents_from_string(rs)
+
+            pb_widgets = [
+                unicode(k.key, errors='ignore').encode('utf-8'), ' ',
+                progressbar.FileTransferSpeed(),' <<<', progressbar.Bar(),
+                '>>> ', progressbar.Percentage(), ' ', progressbar.ETA()
+                ]
+            self.pbar = progressbar.ProgressBar(widgets=pb_widgets,
+                                                maxval=sys.getsizeof(rs))
+
+            self.pbar.start()
+            k.set_contents_from_string(rs, cb=self.progress_callback,
+                                       num_cb=100000)
             k.set_acl("public-read")
+            self.pbar.finish()
 
 class AnswerTestLocalStorage(AnswerTestStorage):
     def dump(self, result_storage):
@@ -209,7 +237,7 @@
         ds = shelve.open(self.answer_name, protocol=-1)
         for pf_name in result_storage:
             answer_name = "%s" % pf_name
-            if name in ds:
+            if answer_name in ds:
                 mylog.info("Overwriting %s", answer_name)
             ds[answer_name] = result_storage[pf_name]
         ds.close()
@@ -277,7 +305,7 @@
         nv = self.run()
         if self.reference_storage.reference_name is not None:
             dd = self.reference_storage.get(self.storage_name)
-            if dd is None or self.description not in dd: 
+            if dd is None or self.description not in dd:
                 raise YTNoOldAnswer("%s : %s" % (self.storage_name , self.description))
             ov = dd[self.description]
             self.compare(nv, ov)
@@ -303,6 +331,16 @@
         obj = cls(*obj_type[1])
         return obj
 
+    def create_plot(self, pf, plot_type, plot_field, plot_axis, plot_kwargs = None):
+        # plot_type should be a string
+        # plot_args should be a tuple
+        # plot_kwargs should be a dict
+        if plot_type is None:
+            raise RuntimeError('Must explicitly request a plot type')
+        cls = getattr(pw, plot_type)
+        plot = cls(*(pf, plot_axis, plot_field), **plot_kwargs)
+        return plot
+
     @property
     def sim_center(self):
         """
@@ -335,7 +373,7 @@
         args = [self._type_name, str(self.pf), oname]
         args += [str(getattr(self, an)) for an in self._attrs]
         return "_".join(args)
-        
+
 class FieldValuesTest(AnswerTestingTest):
     _type_name = "FieldValues"
     _attrs = ("field", )
@@ -357,7 +395,7 @@
     def compare(self, new_result, old_result):
         err_msg = "Field values for %s not equal." % self.field
         if self.decimals is None:
-            assert_equal(new_result, old_result, 
+            assert_equal(new_result, old_result,
                          err_msg=err_msg, verbose=True)
         else:
             assert_allclose(new_result, old_result, 10.**(-self.decimals),
@@ -381,12 +419,12 @@
     def compare(self, new_result, old_result):
         err_msg = "All field values for %s not equal." % self.field
         if self.decimals is None:
-            assert_equal(new_result, old_result, 
+            assert_equal(new_result, old_result,
                          err_msg=err_msg, verbose=True)
         else:
             assert_rel_equal(new_result, old_result, self.decimals,
                              err_msg=err_msg, verbose=True)
-            
+
 class ProjectionValuesTest(AnswerTestingTest):
     _type_name = "ProjectionValues"
     _attrs = ("field", "axis", "weight_field")
@@ -426,7 +464,7 @@
                 assert_equal(new_result[k], old_result[k],
                              err_msg=err_msg)
             else:
-                assert_allclose(new_result[k], old_result[k], 
+                assert_allclose(new_result[k], old_result[k],
                                  10.**-(self.decimals), err_msg=err_msg)
 
 class PixelizedProjectionValuesTest(AnswerTestingTest):
@@ -505,7 +543,7 @@
             assert_equal(new_result[i], old_result[i],
                          err_msg="Output times not equal.",
                          verbose=True)
-        
+
 class GridHierarchyTest(AnswerTestingTest):
     _type_name = "GridHierarchy"
     _attrs = ()
@@ -547,6 +585,37 @@
         for newc, oldc in zip(new_result["children"], old_result["children"]):
             assert(newp == oldp)
 
+class PlotWindowAttributeTest(AnswerTestingTest):
+    _type_name = "PlotWindowAttribute"
+    _attrs = ('plot_type', 'plot_field', 'plot_axis', 'attr_name', 'attr_args')
+    def __init__(self, pf_fn, plot_field, plot_axis, attr_name, attr_args,
+                 decimals, plot_type = 'SlicePlot'):
+        super(PlotWindowAttributeTest, self).__init__(pf_fn)
+        self.plot_type = plot_type
+        self.plot_field = plot_field
+        self.plot_axis = plot_axis
+        self.plot_kwargs = {}
+        self.attr_name = attr_name
+        self.attr_args = attr_args
+        self.decimals = decimals
+
+    def run(self):
+        plot = self.create_plot(self.pf, self.plot_type, self.plot_field,
+                                self.plot_axis, self.plot_kwargs)
+        attr = getattr(plot, self.attr_name)
+        attr(*self.attr_args[0], **self.attr_args[1])
+        fn = plot.save()[0]
+        image = mpimg.imread(fn)
+        os.remove(fn)
+        return [zlib.compress(image.dumps())]
+
+    def compare(self, new_result, old_result):
+        fns = ['old.png', 'new.png']
+        mpimg.imsave(fns[0], np.loads(zlib.decompress(old_result[0])))
+        mpimg.imsave(fns[1], np.loads(zlib.decompress(new_result[0])))
+        compare_images(fns[0], fns[1], 10**(-self.decimals))
+        for fn in fns: os.remove(fn)
+
 def requires_pf(pf_fn, big_data = False):
     def ffalse(func):
         return lambda: None
@@ -602,4 +671,3 @@
 
     def __call__(self):
         self.args[0](*self.args[1:])
-

diff -r 9586a3112cd440027b1f353c59e95dd638ea5676 -r a5a04b9207b187eb09478969630d01275da603f1 yt/utilities/command_line.py
--- a/yt/utilities/command_line.py
+++ b/yt/utilities/command_line.py
@@ -309,10 +309,10 @@
         return
     print "".join(file(date_file, 'r').readlines())
     print "To update all dependencies, run \"yt update --all\"."
-    
+
 def _update_yt_stack(path):
     "Rerun the install script to updated all dependencies."
-    
+
     install_script = os.path.join(path, "doc/install_script.sh")
     if not os.path.exists(install_script):
         print
@@ -1292,7 +1292,10 @@
         elif args.center is None:
             center = 0.5*(pf.domain_left_edge + pf.domain_right_edge)
         center = np.array(center)
-        if args.axis == 4:
+        if pf.dimensionality < 3:
+            dummy_dimensions = np.nonzero(pf.h.grids[0].ActiveDimensions <= 1)
+            axes = ensure_list(dummy_dimensions[0][0])
+        elif args.axis == 4:
             axes = range(3)
         else:
             axes = [args.axis]

diff -r 9586a3112cd440027b1f353c59e95dd638ea5676 -r a5a04b9207b187eb09478969630d01275da603f1 yt/utilities/decompose.py
--- a/yt/utilities/decompose.py
+++ b/yt/utilities/decompose.py
@@ -69,8 +69,8 @@
     """ Evaluate longest to shortest edge ratio
         BEWARE: lot's of magic here """
     eff_dim = (n_d > 1).sum()
-    ideal_bsize = eff_dim * (pieces * np.product(n_d) ** (eff_dim - 1)
-                             ) ** (1.0 / eff_dim)
+    exp = float(eff_dim - 1) / float(eff_dim)
+    ideal_bsize = eff_dim * pieces ** (1.0 / eff_dim) * np.product(n_d) ** exp
     mask = np.where(n_d > 1)
     nd_arr = np.array(n_d, dtype=np.float64)[mask]
     bsize = int(np.sum(ldom[mask] / nd_arr * np.product(nd_arr)))
@@ -109,6 +109,10 @@
     fac = factorize_number(pieces)
     nfactors = len(fac[:, 2])
     best = 0.0
+    p_size = np.ones(3, dtype=np.int)
+    if pieces == 1:
+        return p_size
+
     while np.all(fac[:, 2] > 0):
         ldom = np.ones(3, dtype=np.int)
         for nfac in range(nfactors):

diff -r 9586a3112cd440027b1f353c59e95dd638ea5676 -r a5a04b9207b187eb09478969630d01275da603f1 yt/utilities/definitions.py
--- a/yt/utilities/definitions.py
+++ b/yt/utilities/definitions.py
@@ -48,7 +48,8 @@
 y_names = ['z','z','y']
 
 # How many of each thing are in an Mpc
-mpc_conversion = {'mpc'   : mpc_per_mpc,
+mpc_conversion = {'Mpc'   : mpc_per_mpc,
+                  'mpc'   : mpc_per_mpc,
                   'kpc'   : kpc_per_mpc,
                   'pc'    : pc_per_mpc,
                   'au'    : au_per_mpc,
@@ -56,7 +57,7 @@
                   'miles' : miles_per_mpc,
                   'cm'    : cm_per_mpc}
 
-# How many seconds are in each thig
+# How many seconds are in each thing
 sec_conversion = {'Gyr'   : sec_per_Gyr,
                   'Myr'   : sec_per_Myr,
                   'years' : sec_per_year,

This diff is so big that we needed to truncate the remainder.

https://bitbucket.org/yt_analysis/yt/commits/9166fa060a7b/
Changeset:   9166fa060a7b
Branch:      yt
User:        atmyers
Date:        2013-05-07 02:13:02
Summary:     add particle mass fields for orion and chombo
Affected #:  2 files

diff -r a5a04b9207b187eb09478969630d01275da603f1 -r 9166fa060a7b4982adef36637fb83322935d7d24 yt/frontends/chombo/fields.py
--- a/yt/frontends/chombo/fields.py
+++ b/yt/frontends/chombo/fields.py
@@ -174,3 +174,18 @@
     add_field("particle_%s" % pf, function=pfunc,
               validators = [ValidateSpatial(0)],
               particle_type=True)
+
+def _ParticleMass(field, data):
+    particles = data["particle_mass"].astype('float64')
+    return particles
+
+def _ParticleMassMsun(field, data):
+    particles = data["particle_mass"].astype('float64')
+    return particles/1.989e33
+
+add_field("ParticleMass",
+          function=_ParticleMass, validators=[ValidateSpatial(0)],
+          particle_type=True)
+add_field("ParticleMassMsun",
+          function=_ParticleMassMsun, validators=[ValidateSpatial(0)],
+          particle_type=True)

diff -r a5a04b9207b187eb09478969630d01275da603f1 -r 9166fa060a7b4982adef36637fb83322935d7d24 yt/frontends/orion/fields.py
--- a/yt/frontends/orion/fields.py
+++ b/yt/frontends/orion/fields.py
@@ -174,3 +174,18 @@
     add_field("particle_%s" % pf, function=pfunc,
               validators = [ValidateSpatial(0)],
               particle_type=True)
+
+def _ParticleMass(field, data):
+    particles = data["particle_mass"].astype('float64')
+    return particles
+
+def _ParticleMassMsun(field, data):
+    particles = data["particle_mass"].astype('float64')
+    return particles/1.989e33
+
+add_field("ParticleMass",
+          function=_ParticleMass, validators=[ValidateSpatial(0)],
+          particle_type=True)
+add_field("ParticleMassMsun",
+          function=_ParticleMassMsun, validators=[ValidateSpatial(0)],
+          particle_type=True)


https://bitbucket.org/yt_analysis/yt/commits/34979d998227/
Changeset:   34979d998227
Branch:      yt
User:        atmyers
Date:        2013-05-07 02:37:59
Summary:     wrap code that touched ParticleMass in a try/except block
Affected #:  1 file

diff -r 9166fa060a7b4982adef36637fb83322935d7d24 -r 34979d998227182ac38c2c51b876472755d650ff yt/data_objects/derived_quantities.py
--- a/yt/data_objects/derived_quantities.py
+++ b/yt/data_objects/derived_quantities.py
@@ -151,8 +151,12 @@
     particle masses in the object.
     """
     baryon_mass = data["CellMassMsun"].sum()
-    particle_mass = data["ParticleMassMsun"].sum()
-    return [baryon_mass + particle_mass]
+    try:
+        particle_mass = data["ParticleMassMsun"].sum()
+        total_mass = baryon_mass + particle_mass
+    except KeyError:
+        total_mass = baryon_mass
+    return [total_mass]
 def _combTotalMass(data, total_mass):
     return total_mass.sum()
 add_quantity("TotalMass", function=_TotalMass,

Repository URL: https://bitbucket.org/yt_analysis/yt/

--

This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.



More information about the yt-svn mailing list