[yt-svn] commit/yt: 36 new changesets

commits-noreply at bitbucket.org commits-noreply at bitbucket.org
Wed Apr 20 11:04:09 PDT 2016


36 new commits in yt:

https://bitbucket.org/yt_analysis/yt/commits/270604a874e4/
Changeset:   270604a874e4
Branch:      yt
User:        ngoldbaum
Date:        2016-02-29 23:24:27+00:00
Summary:     Initial attempt at merging get_yt.sh and install_script.sh
Affected #:  2 files

diff -r 7130b7cef71f9422cc6191b755e1bd5ca96fbaa0 -r 270604a874e49fcbff493bbb7cf3b473b8649f3e doc/get_yt.sh
--- a/doc/get_yt.sh
+++ b/doc/get_yt.sh
@@ -1,394 +1,4 @@
-#
-# Hi there!  Welcome to the yt installation script.
-#
-# This script is designed to create a fully isolated Python installation
-# with the dependencies you need to run yt.
-#
-# This script is based on Conda, a distribution mechanism from Continuum
-# Analytics.  The process is as follows:
-#
-#  1. Download the appropriate Conda installation package
-#  2. Install Conda into the specified directory
-#  3. Install yt-specific dependencies
-#  4. Install yt
-#
-# There are a few options listed below, but by default, this will install
-# everything.  At the end, it will tell you what to do to use yt.
-#
-# By default this will install yt from source.
-#
-# If you experience problems, please visit the Help section at 
-# http://yt-project.org.
-#
-DEST_SUFFIX="yt-conda"
-DEST_DIR="`pwd`/${DEST_SUFFIX/ /}"   # Installation location
-BRANCH="yt" # This is the branch to which we will forcibly update.
-INST_YT_SOURCE=1 # Do we do a source install of yt?
-INST_UNSTRUCTURED=1 # Do we want to build with unstructured mesh support?
-
-##################################################################
-#                                                                #
-# You will likely not have to modify anything below this region. #
-#                                                                #
-##################################################################
-
-LOG_FILE="`pwd`/yt_install.log"
-
-# Here is the idiom for redirecting to the log file:
-# ( SOMECOMMAND 2>&1 ) 1>> ${LOG_FILE} || do_exit
-
-MINICONDA_URLBASE="http://repo.continuum.io/miniconda"
-MINICONDA_VERSION="latest"
-YT_RECIPE_REPO="https://bitbucket.org/yt_analysis/yt_conda/raw/default"
-
-if [ $INST_UNSTRUCTURED -eq 1 ]
-then
-  if [ $INST_YT_SOURCE -eq 0 ]
-  then
-      echo "yt must be compiled from source to use the unstructured mesh support."
-      echo "Please set INST_YT_SOURCE to 1 and re-run."
-      exit 1
-  fi
-  if [ `uname` = "Darwin" ]
-  then
-      EMBREE="embree-2.8.0.x86_64.macosx"
-      EMBREE_URL="https://github.com/embree/embree/releases/download/v2.8.0/$EMBREE.tar.gz"
-  else
-      EMBREE="embree-2.8.0.x86_64.linux"
-      EMBREE_URL="https://github.com/embree/embree/releases/download/v2.8.0/$EMBREE.tar.gz"
-  fi
-  PYEMBREE_URL="https://github.com/scopatz/pyembree/archive/master.zip"
-fi
-
-function do_exit
-{
-    echo "********************************************"
-    echo "        FAILURE REPORT:"
-    echo "********************************************"
-    echo
-    tail -n 10 ${LOG_FILE}
-    echo
-    echo "********************************************"
-    echo "********************************************"
-    echo "Failure.  Check ${LOG_FILE}.  The last 10 lines are above."
-    exit 1
-}
-
-function log_cmd
-{
-    echo "EXECUTING:" >> ${LOG_FILE}
-    echo "  $*" >> ${LOG_FILE}
-    ( $* 2>&1 ) 1>> ${LOG_FILE} || do_exit
-}
-
-# These are needed to prevent pushd and popd from printing to stdout
-
-function pushd () {
-    command pushd "$@" > /dev/null
-}
-
-function popd () {
-    command popd "$@" > /dev/null
-}
-
-function get_ytdata
-{
-    echo "Downloading $1 from yt-project.org"
-    [ -e $1 ] && return
-    ${GETFILE} "http://yt-project.org/data/$1" || do_exit
-    ( ${SHASUM} -c $1.sha512 2>&1 ) 1>> ${LOG_FILE} || do_exit
-}
-
-function get_ytrecipe {
-    RDIR=${DEST_DIR}/src/yt-recipes/$1
-    mkdir -p ${RDIR}
-    pushd ${RDIR}
-    log_cmd ${GETFILE} ${YT_RECIPE_REPO}/$1/meta.yaml
-    log_cmd ${GETFILE} ${YT_RECIPE_REPO}/$1/build.sh
-    NEW_PKG=`conda build --output ${RDIR}`
-    log_cmd conda build --no-binstar-upload ${RDIR}
-    log_cmd conda install ${NEW_PKG}
-    popd
-}
-
-
-echo
-echo
-echo "========================================================================"
-echo
-echo "Hi there!  This is the yt installation script.  We're going to download"
-echo "some stuff and install it to create a self-contained, isolated"
-echo "environment for yt to run within."
-echo
-echo "This will install Miniconda from Continuum Analytics, the necessary"
-echo "packages to run yt, and create a self-contained environment for you to"
-echo "use yt.  Additionally, Conda itself provides the ability to install"
-echo "many other packages that can be used for other purposes using the"
-echo "'conda install' command."
-echo
-MYOS=`uname -s`       # A guess at the OS
-if [ $INST_YT_SOURCE -ne 0 ]
-then
-    if [ "${MYOS##Darwin}" != "${MYOS}" ]
-    then
-        echo "Looks like you're running on Mac OSX."
-        echo
-        echo "NOTE: you must have the Xcode command line tools installed."
-        echo
-        echo "The instructions for obtaining these tools varies according"
-        echo "to your exact OS version.  On older versions of OS X, you"
-        echo "must register for an account on the apple developer tools"
-        echo "website: https://developer.apple.com/downloads to obtain the"
-        echo "download link."
-        echo
-        echo "We have gathered some additional instructions for each"
-        echo "version of OS X below. If you have trouble installing yt"
-        echo "after following these instructions, don't hesitate to contact"
-        echo "the yt user's e-mail list."
-        echo
-        echo "You can see which version of OSX you are running by clicking"
-        echo "'About This Mac' in the apple menu on the left hand side of"
-        echo "menu bar.  We're assuming that you've installed all operating"
-        echo "system updates; if you have an older version, we suggest"
-        echo "running software update and installing all available updates."
-        echo
-        echo "OS X 10.5.8: search for and download Xcode 3.1.4 from the"
-        echo "Apple developer tools website."
-        echo
-        echo "OS X 10.6.8: search for and download Xcode 3.2 from the Apple"
-        echo "developer tools website.  You can either download the"
-        echo "Xcode 3.2.2 Developer Tools package (744 MB) and then use"
-        echo "Software Update to update to XCode 3.2.6 or"
-        echo "alternatively, you can download the Xcode 3.2.6/iOS SDK"
-        echo "bundle (4.1 GB)."
-        echo
-        echo "OS X 10.7.5: download Xcode 4.2 from the mac app store"
-        echo "(search for Xcode)."
-        echo "Alternatively, download the Xcode command line tools from"
-        echo "the Apple developer tools website."
-        echo
-        echo "OS X 10.8.4, 10.9, 10.10, and 10.11:"
-        echo "download the appropriate version of Xcode from the"
-        echo "mac app store (search for Xcode)."
-        echo
-        echo "Additionally, you will have to manually install the Xcode"
-        echo "command line tools."
-        echo
-        echo "For OS X 10.8, see:"
-        echo "http://stackoverflow.com/questions/9353444"
-        echo
-        echo "For OS X 10.9 and newer the command line tools can be installed"
-        echo "with the following command:"
-        echo "    xcode-select --install"
-    fi
-    if [ "${MYOS##Linux}" != "${MYOS}" ]
-    then
-        echo "Looks like you're on Linux."
-        echo
-        echo "Please make sure you have the developer tools for your OS "
-        echo "installed."
-        echo
-        if [ -f /etc/SuSE-release ] && [ `grep --count SUSE /etc/SuSE-release` -gt 0 ]
-        then
-            echo "Looks like you're on an OpenSUSE-compatible machine."
-            echo
-            echo "You need to have these packages installed:"
-            echo
-            echo "  * devel_C_C++"
-            echo "  * libuuid-devel"
-            echo "  * gcc-c++"
-            echo "  * chrpath"
-            echo
-            echo "You can accomplish this by executing:"
-            echo
-            echo "$ sudo zypper install -t pattern devel_C_C++"
-            echo "$ sudo zypper install gcc-c++ libuuid-devel zip"
-            echo "$ sudo zypper install chrpath"
-        fi
-        if [ -f /etc/lsb-release ] && [ `grep --count buntu /etc/lsb-release` -gt 0 ]
-        then
-            echo "Looks like you're on an Ubuntu-compatible machine."
-            echo
-            echo "You need to have these packages installed:"
-            echo
-            echo "  * libssl-dev"
-            echo "  * build-essential"
-            echo "  * libncurses5"
-            echo "  * libncurses5-dev"
-            echo "  * uuid-dev"
-            echo "  * chrpath"
-            echo
-            echo "You can accomplish this by executing:"
-            echo
-            echo "$ sudo apt-get install libssl-dev build-essential libncurses5 libncurses5-dev zip uuid-dev chrpath"
-            echo
-        fi
-        echo
-        echo "If you are running on a supercomputer or other module-enabled"
-        echo "system, please make sure that the GNU module has been loaded."
-        echo
-    fi
-fi
-if [ "${MYOS##x86_64}" != "${MYOS}" ]
-then
-    MINICONDA_OS="Linux-x86_64"
-elif [ "${MYOS##i386}" != "${MYOS}" ]
-then
-    MINICONDA_OS="Linux-x86"
-elif [ "${MYOS##Darwin}" != "${MYOS}" ]
-then
-     MINICONDA_OS="MacOSX-x86_64"
-else
-    echo "Not sure which Linux distro you are running."
-    echo "Going with x86_64 architecture."
-    MINICONDA_OS="Linux-x86_64"
-fi
-echo
-echo "If you'd rather not continue, hit Ctrl-C."
-echo
-echo "========================================================================"
-echo
-read -p "[hit enter] "
-echo
-echo "Awesome!  Here we go."
-echo
-
-MINICONDA_PKG=Miniconda-${MINICONDA_VERSION}-${MINICONDA_OS}.sh
-
-if type -P wget &>/dev/null
-then
-    echo "Using wget"
-    export GETFILE="wget -nv -nc"
-else
-    echo "Using curl"
-    export GETFILE="curl -sSO"
-fi
-
-echo
-echo "Downloading ${MINICONDA_URLBASE}/${MINICONDA_PKG}"
-echo "Downloading ${MINICONDA_URLBASE}/${MINICONDA_PKG}" >> ${LOG_FILE}
-echo
-
-${GETFILE} ${MINICONDA_URLBASE}/${MINICONDA_PKG} || do_exit
-
-echo "Installing the Miniconda python environment."
-
-log_cmd bash ./${MINICONDA_PKG} -b -p $DEST_DIR
-
-# This we *do* need.
-export PATH=${DEST_DIR}/bin:$PATH
-
-echo "Installing the necessary packages for yt."
-echo "This may take a while, but don't worry.  yt loves you."
-
-declare -a YT_DEPS
-YT_DEPS+=('python')
-YT_DEPS+=('setuptools')
-YT_DEPS+=('numpy')
-YT_DEPS+=('jupyter')
-YT_DEPS+=('ipython')
-YT_DEPS+=('sphinx')
-YT_DEPS+=('h5py')
-YT_DEPS+=('matplotlib')
-YT_DEPS+=('cython')
-YT_DEPS+=('nose')
-YT_DEPS+=('conda-build')
-YT_DEPS+=('mercurial')
-YT_DEPS+=('sympy')
-
-if [ $INST_UNSTRUCTURED -eq 1 ]
-then
-  YT_DEPS+=('netcdf4')   
-fi
-
-# Here is our dependency list for yt
-log_cmd conda update --yes conda
-
-log_cmd echo "DEPENDENCIES" ${YT_DEPS[@]}
-for YT_DEP in "${YT_DEPS[@]}"; do
-    echo "Installing $YT_DEP"
-    log_cmd conda install --yes ${YT_DEP}
-done
-
-if [ $INST_UNSTRUCTURED -eq 1 ]
-then
-
-  echo "Installing embree"
-  mkdir ${DEST_DIR}/src
-  cd ${DEST_DIR}/src
-  ( ${GETFILE} "$EMBREE_URL" 2>&1 ) 1>> ${LOG_FILE} || do_exit
-  log_cmd tar xfz ${EMBREE}.tar.gz
-  log_cmd mv ${DEST_DIR}/src/${EMBREE}/include/embree2 ${DEST_DIR}/include
-  log_cmd mv ${DEST_DIR}/src/${EMBREE}/lib/lib*.* ${DEST_DIR}/lib
-  if [ `uname` = "Darwin" ]
-  then
-    ln -s ${DEST_DIR}/lib/libembree.2.dylib ${DEST_DIR}/lib/libembree.dylib
-    install_name_tool -id ${DEST_DIR}/lib/libembree.2.dylib ${DEST_DIR}/lib/libembree.2.dylib
-  else
-    ln -s ${DEST_DIR}/lib/libembree.so.2 ${DEST_DIR}/lib/libembree.so
-  fi
-
-  echo "Installing pyembree from source"
-  ( ${GETFILE} "$PYEMBREE_URL" 2>&1 ) 1>> ${LOG_FILE} || do_exit
-  log_cmd unzip ${DEST_DIR}/src/master.zip
-  pushd ${DEST_DIR}/src/pyembree-master
-  log_cmd python setup.py install build_ext -I${DEST_DIR}/include -L${DEST_DIR}/lib
-  popd
-fi
-
-if [ $INST_YT_SOURCE -eq 0 ]
-then
-  echo "Installing yt"
-  log_cmd conda install --yes yt
-else
-    # We do a source install.
-    echo "Installing yt from source"
-    YT_DIR="${DEST_DIR}/src/yt-hg"
-    log_cmd hg clone -r ${BRANCH} https://bitbucket.org/yt_analysis/yt ${YT_DIR}
-if [ $INST_UNSTRUCTURED -eq 1 ]
-then
-    echo $DEST_DIR > ${YT_DIR}/embree.cfg
-fi
-    pushd ${YT_DIR}
-    log_cmd python setup.py develop
-    popd
-fi
-
-echo
-echo
-echo "========================================================================"
-echo
-echo "yt and the Conda system are now installed in $DEST_DIR ."
-echo
-echo "You must now modify your PATH variable by prepending:"
-echo 
-echo "   $DEST_DIR/bin"
-echo
-echo "On Bash-style shells you can copy/paste the following command to "
-echo "temporarily activate the yt installation:"
-echo
-echo "    export PATH=$DEST_DIR/bin:\$PATH"
-echo
-echo "and on csh-style shells:"
-echo
-echo "    setenv PATH $DEST_DIR/bin:\$PATH"
-echo
-echo "You can also update the init file appropriate for your shell to include"
-echo "the same command."
-echo
-echo "To get started with yt, check out the orientation:"
-echo
-echo "    http://yt-project.org/doc/orientation/"
-echo
-echo "For support, see the website and join the mailing list:"
-echo
-echo "    http://yt-project.org/"
-echo "    http://yt-project.org/data/      (Sample data)"
-echo "    http://yt-project.org/doc/       (Docs)"
-echo
-echo "    http://lists.spacepope.org/listinfo.cgi/yt-users-spacepope.org"
-echo
-echo "========================================================================"
-echo
-echo "Oh, look at me, still talking when there's science to do!"
-echo "Good luck, and email the user list if you run into any problems."
+echo "This script has been deprecated."
+echo "You can now create a conda-based build using install_script.sh"
+echo "Please download that script and run it"
+exit 0

diff -r 7130b7cef71f9422cc6191b755e1bd5ca96fbaa0 -r 270604a874e49fcbff493bbb7cf3b473b8649f3e doc/install_script.sh
--- a/doc/install_script.sh
+++ b/doc/install_script.sh
@@ -7,10 +7,42 @@
 # This script is designed to create a fully isolated Python installation
 # with the dependencies you need to run yt.
 #
-# There are a few options, but you only need to set *one* of them, which is 
-# the next one, DEST_DIR:
+# If you would like to customize the yt installation, then please edit
+# the following options.
 
-DEST_SUFFIX="yt-`uname -m`"
+# If you do not have a working compiler environment, use the following 
+# configuration:
+
+INST_CONDA=1       # Should yt's dependencies be installed using miniconda?
+INST_YT_SOURCE=0   # Should yt itself be installed from source?
+
+# If you want to install yt's dependencies using conda but want to build yt
+# itself from source, use the following configuration:
+
+# INST_CONDA=1
+# INST_YT_SOURCE=1
+
+# If you would like to build yt and all dependencies from source, then
+# use the following configuration by uncommenting the lines below.
+# NOTE: Building yt's dependencies from source will cause the install script
+# to require substantially more time to finish.
+
+# INST_CONDA=0
+# INST_YT_SOURCE=1
+
+if [ $INST_CONDA -ne 0 ]
+then
+    DEST_SUFFIX="yt-conda"
+else
+    if [ $INST_YT_SOURCE -eq 0 ]
+    then
+        echo "yt must be compiled from source if INST_CONDA is set"
+        echo "Please set INST_YT_SOURCE to 1 and re-run."
+        exit 1
+    fi
+    DEST_SUFFIX="yt-`uname -m`"
+fi
+
 DEST_DIR="`pwd`/${DEST_SUFFIX/ /}"   # Installation location
 BRANCH="yt" # This is the branch to which we will forcibly update.
 
@@ -29,40 +61,65 @@
 # already check the current directory and the one above it in the tree.
 YT_DIR=""
 
+# These options can be set to customize the installation.
+
+INST_PY3=0          # Install Python 3 along with Python 2. If this is turned
+                    # on, all Python packages (including yt) will be installed
+                    # in Python 3 (except Mercurial, which requires Python 2).
+INST_HG=1           # Install Mercurial or not?  If hg is not already
+                    # installed, yt cannot be installed from source.
+INST_UNSTRUCTURED=0 # Install dependencies needed for unstructured mesh 
+                    # rendering?
+
+# These options control whether low-level system libraries are installed
+# they are necessary for building yt's dependencies from source and are 
+# ignored when INST_CONDA=1
+
+INST_ZLIB=1     # On some systems (Kraken) matplotlib has issues with
+                # the system zlib, which is compiled statically.
+                # If need be, you can turn this off. 
+INST_BZLIB=1    # On some systems, libbzip2 is missing.  This can
+                # lead to broken mercurial installations.
+INST_PNG=1      # Install a local libpng?  Same things apply as with zlib.
+INST_FTYPE=1    # Install FreeType2 locally?
+INST_SQLITE3=1  # Install a local version of SQLite3?
+INST_0MQ=1      # Install 0mq (for IPython) and affiliated bindings?
+
+# These variables control whether optional dependencies are installed
+
+INST_PYX=0      # Install PyX?  Sometimes PyX can be problematic without a
+                # working TeX installation.
+INST_ROCKSTAR=0 # Install the Rockstar halo finder?
+INST_SCIPY=0    # Install scipy?
+INST_H5PY=1     # Install h5py?
+INST_ASTROPY=0  # Install astropy?
+INST_NOSE=1     # Install nose?
+
+# These options allow you to customize the builds of yt dependencies.
+# They are only used if INST_CONDA=0.
+
+# If you need to pass anything to the matplotlib build, do so here.
+MPL_SUPP_LDFLAGS=""
+MPL_SUPP_CFLAGS=""
+MPL_SUPP_CXXFLAGS=""
+
 # If you need to supply arguments to the NumPy or SciPy build, supply them here
 # This one turns on gfortran manually:
 #NUMPY_ARGS="--fcompiler=gnu95"
 # If you absolutely can't get the fortran to work, try this:
 #NUMPY_ARGS="--fcompiler=fake"
 
-INST_PY3=0      # Install Python 3 along with Python 2. If this is turned
-                # on, all Python packages (including yt) will be installed
-                # in Python 3 (except Mercurial, which requires Python 2).
-INST_HG=1       # Install Mercurial or not?  If hg is not already
-                # installed, yt cannot be installed.
-INST_ZLIB=1     # On some systems (Kraken) matplotlib has issues with
-                # the system zlib, which is compiled statically.
-                # If need be, you can turn this off.
-INST_BZLIB=1    # On some systems, libbzip2 is missing.  This can
-                # lead to broken mercurial installations.
-INST_PNG=1      # Install a local libpng?  Same things apply as with zlib.
-INST_FTYPE=1    # Install FreeType2 locally?
-INST_SQLITE3=1  # Install a local version of SQLite3?
-INST_PYX=0      # Install PyX?  Sometimes PyX can be problematic without a
-                # working TeX installation.
-INST_0MQ=1      # Install 0mq (for IPython) and affiliated bindings?
-INST_ROCKSTAR=0 # Install the Rockstar halo finder?
-INST_SCIPY=0    # Install scipy?
-
-# If you need to pass anything to matplotlib, do so here.
-MPL_SUPP_LDFLAGS=""
-MPL_SUPP_CFLAGS=""
-MPL_SUPP_CXXFLAGS=""
-
 # If you want to spawn multiple Make jobs, here's the place to set the
 # arguments.  For instance, "-j4"
 MAKE_PROCS=""
 
+# These variables control which miniconda version and yt recipe are used
+# when INST_CONDA=1.
+
+MINICONDA_URLBASE="http://repo.continuum.io/miniconda"
+MINICONDA_VERSION="latest"
+YT_RECIPE_REPO="https://bitbucket.org/yt_analysis/yt_conda/raw/default"
+
 # Make sure we are NOT being run as root
 if [[ $EUID -eq 0 ]]
 then
@@ -87,6 +144,10 @@
    echo "*                                                    *"
    echo "*                                                    *"
    echo "******************************************************"
+   echo
+   echo "If you really want to do this, you must manually edit"
+   echo "the script."
+   echo "Sorry!"
    exit 1
 fi
 
@@ -129,18 +190,6 @@
     fi
 }
 
-# Write config settings to file.
-CONFIG_FILE=${DEST_DIR}/.yt_config
-mkdir -p ${DEST_DIR}
-if [ -z ${REINST_YT} ] || [ ${REINST_YT} -neq 1 ]
-then
-    write_config
-elif [ ${REINST_YT} ] && [ ${REINST_YT} -eq 1 ] && [ -f ${CONFIG_FILE} ]
-then
-    USED_CONFIG=1
-    source ${CONFIG_FILE}
-fi
-
 function get_willwont
 {
     if [ $1 -eq 1 ]
@@ -391,6 +440,39 @@
     fi
 }
 
+function log_cmd
+{
+    echo "EXECUTING:" >> ${LOG_FILE}
+    echo "  $*" >> ${LOG_FILE}
+    ( $* 2>&1 ) 1>> ${LOG_FILE} || do_exit
+}
+
+# set paths needed for unstructured mesh rendering support
+
+if [ $INST_UNSTRUCTURED -ne 0 ]
+then
+    if [$INST_YT_SOURCE -eq 0 ]
+    then
+        echo "yt must be compiled from source to install support for"
+        echo "unstructured mesh rendering. Please set INST_YT_SOURCE to 1"
+        echo "and re-run the install script."
+        exit 1
+    fi
+    if [ `uname` = "Darwin" ]
+    then
+        EMBREE="embree-2.8.0.x86_64.macosx"
+        EMBREE_URL="https://github.com/embree/embree/releases/download/v2.8.0/$EMBREE.tar.gz"
+    elif [ `uname` = "Linux" ]
+    then
+            EMBREE="embree-2.8.0.x86_64.linux"
+            EMBREE_URL="https://github.com/embree/embree/releases/download/v2.8.0/$EMBREE.tar.gz"
+    else
+        echo "Unstructured mesh rendering is not supported on this platform."
+        echo "Set INST_UNSTRUCTURED=0 and re-run the install script."
+        exit 1
+    fi
+    PYEMBREE_URL="https://github.com/scopatz/pyembree/archive/master.zip"
+fi
 
 echo
 echo
@@ -404,53 +486,72 @@
 echo "they're currently set to -- you can hit Ctrl-C and edit the values in "
 echo "the script if you aren't such a fan."
 echo
-printf "%-15s = %s so I " "INST_ZLIB" "${INST_ZLIB}"
-get_willwont ${INST_ZLIB}
-echo "be installing zlib"
 
-printf "%-15s = %s so I " "INST_BZLIB" "${INST_BZLIB}"
-get_willwont ${INST_BZLIB}
-echo "be installing bzlib"
-
-printf "%-15s = %s so I " "INST_PNG" "${INST_PNG}"
-get_willwont ${INST_PNG}
-echo "be installing libpng"
-
-printf "%-15s = %s so I " "INST_FTYPE" "${INST_FTYPE}"
-get_willwont ${INST_FTYPE}
-echo "be installing freetype2"
-
-printf "%-15s = %s so I " "INST_SQLITE3" "${INST_SQLITE3}"
-get_willwont ${INST_SQLITE3}
-echo "be installing SQLite3"
-
-printf "%-15s = %s so I " "INST_PY3" "${INST_PY3}"
+printf "%-18s = %s so I " "INST_PY3" "${INST_PY3}"
 get_willwont ${INST_PY3}
 echo "be installing Python 3"
 
-printf "%-15s = %s so I " "INST_HG" "${INST_HG}"
+printf "%-18s = %s so I " "INST_HG" "${INST_HG}"
 get_willwont ${INST_HG}
 echo "be installing Mercurial"
 
-printf "%-15s = %s so I " "INST_PYX" "${INST_PYX}"
+printf "%-18s = %s so I " "INST_UNSTRUCTURED" "${INST_UNSTRUCTURED}"
+get_willwont ${INST_HG}
+echo "be installing support for unstructured mesh rendering"
+
+if [ $INST_CONDA -eq 0 ]
+then
+    printf "%-18s = %s so I " "INST_ZLIB" "${INST_ZLIB}"
+    get_willwont ${INST_ZLIB}
+    echo "be installing zlib"
+
+    printf "%-18s = %s so I " "INST_BZLIB" "${INST_BZLIB}"
+    get_willwont ${INST_BZLIB}
+    echo "be installing bzlib"
+
+    printf "%-18s = %s so I " "INST_PNG" "${INST_PNG}"
+    get_willwont ${INST_PNG}
+    echo "be installing libpng"
+
+    printf "%-18s = %s so I " "INST_FTYPE" "${INST_FTYPE}"
+    get_willwont ${INST_FTYPE}
+    echo "be installing freetype2"
+
+    printf "%-18s = %s so I " "INST_SQLITE3" "${INST_SQLITE3}"
+    get_willwont ${INST_SQLITE3}
+    echo "be installing SQLite3"
+fi
+
+printf "%-18s = %s so I " "INST_PYX" "${INST_PYX}"
 get_willwont ${INST_PYX}
 echo "be installing PyX"
 
-printf "%-15s = %s so I " "INST_SCIPY" "${INST_SCIPY}"
-get_willwont ${INST_SCIPY}
-echo "be installing scipy"
-
-printf "%-15s = %s so I " "INST_ROCKSTAR" "${INST_ROCKSTAR}"
+printf "%-18s = %s so I " "INST_ROCKSTAR" "${INST_ROCKSTAR}"
 get_willwont ${INST_ROCKSTAR}
 echo "be installing Rockstar"
 
+printf "%-18s = %s so I " "INST_H5PY" "${INST_H5PY}"
+get_willwont ${INST_H5PY}
+echo "be installing h5py"
+
+printf "%-18s = %s so I " "INST_ASTROPY" "${INST_ASTROPY}"
+get_willwont ${INST_ASTROPY}
+echo "be installing astropy"
+
+printf "%-18s = %s so I " "INST_NOSE" "${INST_NOSE}"
+get_willwont ${INST_NOSE}
+echo "be installing nose"
+
 echo
 
-if [ -z "$HDF5_DIR" ]
+if [ $INST_CONDA -eq 0 ]
 then
-    echo "HDF5_DIR is not set, so I will be installing HDF5"
-else
-    echo "HDF5_DIR=${HDF5_DIR} , so I will not be installing HDF5"
+    if [ -z "$HDF5_DIR" ]
+    then
+        echo "HDF5_DIR is not set, so I will be installing HDF5"
+    else
+        echo "HDF5_DIR=${HDF5_DIR} , so I will not be installing HDF5"
+    fi
 fi
 
 echo
@@ -464,12 +565,18 @@
 echo "If you'd rather stop, maybe think things over, even grab a sandwich, "
 echo "hit Ctrl-C."
 echo
-host_specific
-if [ ${USED_CONFIG} ]
+if [ $INST_YT_SOURCE -ne 0 ]
 then
-    echo "Settings were loaded from ${CONFIG_FILE}."
-    echo "Remove this file if you wish to return to the default settings."
-    echo
+   host_specific
+fi
+if [ $INST_CONDA -eq 0 ]
+then
+    if [ ${USED_CONFIG} ]
+    then
+        echo "Settings were loaded from ${CONFIG_FILE}."
+        echo "Remove this file if you wish to return to the default settings."
+        echo
+    fi
 fi
 echo "========================================================================"
 echo
@@ -598,474 +705,682 @@
     exit 1
 fi
 
-# Get supplemental data.
+# Set paths to what they should be when yt is activated.
+if [ $INST_CONDA -eq 0 ]
+then
+    export PATH=${DEST_DIR}/bin:$PATH
+    export LD_LIBRARY_PATH=${DEST_DIR}/lib:$LD_LIBRARY_PATH
+    export PYTHONPATH=${DEST_DIR}/lib/${PYTHON_EXEC}/site-packages
 
-mkdir -p ${DEST_DIR}/data
-cd ${DEST_DIR}/data
-echo 'de6d8c6ea849f0206d219303329a0276b3cce7c051eec34377d42aacbe0a4f47ac5145eb08966a338ecddd2b83c8f787ca9956508ad5c39ee2088ad875166410  cloudy_emissivity.h5' > cloudy_emissivity.h5.sha512
-[ ! -e cloudy_emissivity.h5 ] && get_ytdata cloudy_emissivity.h5
-echo '0f714ae2eace0141b1381abf1160dc8f8a521335e886f99919caf3beb31df1fe271d67c7b2a804b1467949eb16b0ef87a3d53abad0e8160fccac1e90d8d9e85f  apec_emissivity.h5' > apec_emissivity.h5.sha512
-[ ! -e apec_emissivity.h5 ] && get_ytdata apec_emissivity.h5
+    # Write config settings to file.
+    CONFIG_FILE=${DEST_DIR}/.yt_config
+    mkdir -p ${DEST_DIR}
+    if [ -z ${REINST_YT} ] || [ ${REINST_YT} -neq 1 ]
+    then
+        write_config
+    elif [ ${REINST_YT} ] && [ ${REINST_YT} -eq 1 ] && [ -f ${CONFIG_FILE} ]
+    then
+        USED_CONFIG=1
+        source ${CONFIG_FILE}
+    fi
+    
+    # Get supplemental data.
 
-# Set paths to what they should be when yt is activated.
-export PATH=${DEST_DIR}/bin:$PATH
-export LD_LIBRARY_PATH=${DEST_DIR}/lib:$LD_LIBRARY_PATH
-export PYTHONPATH=${DEST_DIR}/lib/${PYTHON_EXEC}/site-packages
+    mkdir -p ${DEST_DIR}/data
+    cd ${DEST_DIR}/data
+    echo 'de6d8c6ea849f0206d219303329a0276b3cce7c051eec34377d42aacbe0a4f47ac5145eb08966a338ecddd2b83c8f787ca9956508ad5c39ee2088ad875166410  cloudy_emissivity.h5' > cloudy_emissivity.h5.sha512
+    [ ! -e cloudy_emissivity.h5 ] && get_ytdata cloudy_emissivity.h5
+    echo '0f714ae2eace0141b1381abf1160dc8f8a521335e886f99919caf3beb31df1fe271d67c7b2a804b1467949eb16b0ef87a3d53abad0e8160fccac1e90d8d9e85f  apec_emissivity.h5' > apec_emissivity.h5.sha512
+    [ ! -e apec_emissivity.h5 ] && get_ytdata apec_emissivity.h5
+    
+    mkdir -p ${DEST_DIR}/src
+    cd ${DEST_DIR}/src
 
-mkdir -p ${DEST_DIR}/src
-cd ${DEST_DIR}/src
+    PYTHON2='Python-2.7.9'
+    PYTHON3='Python-3.4.3'
+    CYTHON='Cython-0.22'
+    PYX='PyX-0.12.1'
+    BZLIB='bzip2-1.0.6'
+    FREETYPE_VER='freetype-2.4.12' 
+    H5PY='h5py-2.5.0'
+    HDF5='hdf5-1.8.14' 
+    LAPACK='lapack-3.4.2'
+    PNG=libpng-1.6.3
+    MATPLOTLIB='matplotlib-1.4.3'
+    MERCURIAL='mercurial-3.4'
+    NOSE='nose-1.3.6'
+    NUMPY='numpy-1.9.2'
+    PYTHON_HGLIB='python-hglib-1.6'
+    ROCKSTAR='rockstar-0.99.6'
+    SCIPY='scipy-0.15.1'
+    SQLITE='sqlite-autoconf-3071700'
+    SYMPY='sympy-0.7.6'
+    ZLIB='zlib-1.2.8'
+    SETUPTOOLS='setuptools-18.0.1'
+    
+    # Now we dump all our SHA512 files out.
+    echo '856220fa579e272ac38dcef091760f527431ff3b98df9af6e68416fcf77d9659ac5abe5c7dee41331f359614637a4ff452033085335ee499830ed126ab584267  Cython-0.22.tar.gz' > Cython-0.22.tar.gz.sha512
+    echo '4941f5aa21aff3743546495fb073c10d2657ff42b2aff401903498638093d0e31e344cce778980f28a7170c6d29eab72ac074277b9d4088376e8692dc71e55c1  PyX-0.12.1.tar.gz' > PyX-0.12.1.tar.gz.sha512
+    echo 'a42f28ed8e49f04cf89e2ea7434c5ecbc264e7188dcb79ab97f745adf664dd9ab57f9a913543731635f90859536244ac37dca9adf0fc2aa1b215ba884839d160  Python-2.7.9.tgz' > Python-2.7.9.tgz.sha512
+    echo '609cc82586fabecb25f25ecb410f2938e01d21cde85dd3f8824fe55c6edde9ecf3b7609195473d3fa05a16b9b121464f5414db1a0187103b78ea6edfa71684a7  Python-3.4.3.tgz' > Python-3.4.3.tgz.sha512
+    echo '276bd9c061ec9a27d478b33078a86f93164ee2da72210e12e2c9da71dcffeb64767e4460b93f257302b09328eda8655e93c4b9ae85e74472869afbeae35ca71e  blas.tar.gz' > blas.tar.gz.sha512
+    echo '00ace5438cfa0c577e5f578d8a808613187eff5217c35164ffe044fbafdfec9e98f4192c02a7d67e01e5a5ccced630583ad1003c37697219b0f147343a3fdd12  bzip2-1.0.6.tar.gz' > bzip2-1.0.6.tar.gz.sha512
+    echo '609a68a3675087e0cc95268574f31e104549daa48efe15a25a33b8e269a93b4bd160f4c3e8178dca9c950ef5ca514b039d6fd1b45db6af57f25342464d0429ce  freetype-2.4.12.tar.gz' > freetype-2.4.12.tar.gz.sha512
+    echo '4a83f9ae1855a7fad90133b327d426201c8ccfd2e7fbe9f39b2d61a2eee2f3ebe2ea02cf80f3d4e1ad659f8e790c173df8cc99b87d0b7ce63d34aa88cfdc7939  h5py-2.5.0.tar.gz' > h5py-2.5.0.tar.gz.sha512
+    echo '4073fba510ccadaba41db0939f909613c9cb52ba8fb6c1062fc9118edc601394c75e102310be1af4077d07c9b327e6bbb1a6359939a7268dc140382d0c1e0199  hdf5-1.8.14.tar.gz' > hdf5-1.8.14.tar.gz.sha512
+    echo '8770214491e31f0a7a3efaade90eee7b0eb20a8a6ab635c5f854d78263f59a1849133c14ef5123d01023f0110cbb9fc6f818da053c01277914ae81473430a952  lapack-3.4.2.tar.gz' > lapack-3.4.2.tar.gz.sha512
+    echo '887582e5a22e4cde338aa8fec7a89f6dd31f2f02b8842735f00f970f64582333fa03401cea6d01704083403c7e8b7ebc26655468ce930165673b33efa4bcd586  libpng-1.6.3.tar.gz' > libpng-1.6.3.tar.gz.sha512
+    echo '51b0f58b2618b47b653e17e4f6b6a1215d3a3b0f1331ce3555cc7435e365d9c75693f289ce12fe3bf8f69fd57b663e545f0f1c2c94e81eaa661cac0689e125f5  matplotlib-1.4.3.tar.gz' > matplotlib-1.4.3.tar.gz.sha512
+    echo 'a61b0d4cf528136991243bb23ac972c11c50ab5681d09f8b2d12cf7d37d3a9d76262f7fe6e7a1834bf6d03e8dc0ebbd9231da982e049e09830341dabefe5d064  mercurial-3.4.tar.gz' > mercurial-3.4.tar.gz.sha512
+    echo 'd0cede08dc33a8ac0af0f18063e57f31b615f06e911edb5ca264575174d8f4adb4338448968c403811d9dcc60f38ade3164662d6c7b69b499f56f0984bb6283c  nose-1.3.6.tar.gz' > nose-1.3.6.tar.gz.sha512
+    echo '70470ebb9afef5dfd0c83ceb7a9d5f1b7a072b1a9b54b04f04f5ed50fbaedd5b4906bd500472268d478f94df9e749a88698b1ff30f2d80258e7f3fec040617d9  numpy-1.9.2.tar.gz' > numpy-1.9.2.tar.gz.sha512
+    echo 'bfd10455e74e30df568c4c4827140fb6cc29893b0e062ce1764bd52852ec7487a70a0f5ea53c3fca7886f5d36365c9f4db52b8c93cad35fb67beeb44a2d56f2d  python-hglib-1.6.tar.gz' > python-hglib-1.6.tar.gz.sha512
+    echo 'fff4412d850c431a1b4e6ee3b17958ee5ab3beb81e6cb8a8e7d56d368751eaa8781d7c3e69d932dc002d718fddc66a72098acfe74cfe29ec80b24e6736317275  scipy-0.15.1.tar.gz' > scipy-0.15.1.tar.gz.sha512
+    echo '96f3e51b46741450bc6b63779c10ebb4a7066860fe544385d64d1eda52592e376a589ef282ace2e1df73df61c10eab1a0d793abbdaf770e60289494d4bf3bcb4  sqlite-autoconf-3071700.tar.gz' > sqlite-autoconf-3071700.tar.gz.sha512
+    echo 'ce0f1a17ac01eb48aec31fc0ad431d9d7ed9907f0e8584a6d79d0ffe6864fe62e203fe3f2a3c3e4e3d485809750ce07507a6488e776a388a7a9a713110882fcf  sympy-0.7.6.tar.gz' > sympy-0.7.6.tar.gz.sha512
+    echo 'ece209d4c7ec0cb58ede791444dc754e0d10811cbbdebe3df61c0fd9f9f9867c1c3ccd5f1827f847c005e24eef34fb5bf87b5d3f894d75da04f1797538290e4a  zlib-1.2.8.tar.gz' > zlib-1.2.8.tar.gz.sha512
+    echo '9b318ce2ee2cf787929dcb886d76c492b433e71024fda9452d8b4927652a298d6bd1bdb7a4c73883a98e100024f89b46ea8aa14b250f896e549e6dd7e10a6b41  setuptools-18.0.1.tar.gz' > setuptools-18.0.1.tar.gz.sha512
+    # Individual processes
+    [ -z "$HDF5_DIR" ] && get_ytproject $HDF5.tar.gz
+    [ $INST_ZLIB -eq 1 ] && get_ytproject $ZLIB.tar.gz
+    [ $INST_BZLIB -eq 1 ] && get_ytproject $BZLIB.tar.gz
+    [ $INST_PNG -eq 1 ] && get_ytproject $PNG.tar.gz
+    [ $INST_FTYPE -eq 1 ] && get_ytproject $FREETYPE_VER.tar.gz
+    [ $INST_SQLITE3 -eq 1 ] && get_ytproject $SQLITE.tar.gz
+    [ $INST_PYX -eq 1 ] && get_ytproject $PYX.tar.gz
+    [ $INST_SCIPY -eq 1 ] && get_ytproject $SCIPY.tar.gz
+    [ $INST_SCIPY -eq 1 ] && get_ytproject blas.tar.gz
+    [ $INST_SCIPY -eq 1 ] && get_ytproject $LAPACK.tar.gz
+    [ $INST_HG -eq 1 ] && get_ytproject $MERCURIAL.tar.gz
+    [ $INST_PY3 -eq 1 ] && get_ytproject $PYTHON3.tgz
+    [ $INST_H5PY -eq 1] && get_ytproject $H5PY.tat.gz
+    [ $INST_NOSE -eq 1] && get_ytproject $NOSE.tar.gz
+    get_ytproject $PYTHON2.tgz
+    get_ytproject $NUMPY.tar.gz
+    get_ytproject $MATPLOTLIB.tar.gz
+    get_ytproject $CYTHON.tar.gz
+    get_ytproject $PYTHON_HGLIB.tar.gz
+    get_ytproject $SYMPY.tar.gz
+    get_ytproject $SETUPTOOLS.tar.gz
 
-PYTHON2='Python-2.7.9'
-PYTHON3='Python-3.4.3'
-CYTHON='Cython-0.22'
-PYX='PyX-0.12.1'
-BZLIB='bzip2-1.0.6'
-FREETYPE_VER='freetype-2.4.12' 
-H5PY='h5py-2.5.0'
-HDF5='hdf5-1.8.14' 
-LAPACK='lapack-3.4.2'
-PNG=libpng-1.6.3
-MATPLOTLIB='matplotlib-1.4.3'
-MERCURIAL='mercurial-3.4'
-NOSE='nose-1.3.6'
-NUMPY='numpy-1.9.2'
-PYTHON_HGLIB='python-hglib-1.6'
-ROCKSTAR='rockstar-0.99.6'
-SCIPY='scipy-0.15.1'
-SQLITE='sqlite-autoconf-3071700'
-SYMPY='sympy-0.7.6'
-ZLIB='zlib-1.2.8'
-SETUPTOOLS='setuptools-18.0.1'
+    if [ $INST_BZLIB -eq 1 ]
+    then
+        if [ ! -e $BZLIB/done ]
+        then
+            [ ! -e $BZLIB ] && tar xfz $BZLIB.tar.gz
+            echo "Installing BZLIB"
+            cd $BZLIB
+            if [ `uname` = "Darwin" ]
+            then
+                if [ -z "${CC}" ]
+                then
+                    sed -i.bak 's/soname/install_name/' Makefile-libbz2_so
+                else
+                    sed -i.bak -e 's/soname/install_name/' -e "s|CC=gcc|CC=${CC}|" Makefile-libbz2_so
+                fi
+            fi
+            ( make install CFLAGS=-fPIC LDFLAGS=-fPIC PREFIX=${DEST_DIR} 2>&1 ) 1>> ${LOG_FILE} || do_exit
+            ( make -f Makefile-libbz2_so CFLAGS=-fPIC LDFLAGS=-fPIC PREFIX=${DEST_DIR} 2>&1 ) 1>> ${LOG_FILE} || do_exit
+            ( cp -v libbz2.so.1.0.6 ${DEST_DIR}/lib 2>&1 ) 1>> ${LOG_FILE} || do_exit
+            touch done
+            cd ..
+        fi
+        BZLIB_DIR=${DEST_DIR}
+        export LDFLAGS="${LDFLAGS} -L${BZLIB_DIR}/lib/ -L${BZLIB_DIR}/lib64/"
+        LD_LIBRARY_PATH="${LD_LIBRARY_PATH}:${BZLIB_DIR}/lib/"
+    fi
 
-# Now we dump all our SHA512 files out.
-echo '856220fa579e272ac38dcef091760f527431ff3b98df9af6e68416fcf77d9659ac5abe5c7dee41331f359614637a4ff452033085335ee499830ed126ab584267  Cython-0.22.tar.gz' > Cython-0.22.tar.gz.sha512
-echo '4941f5aa21aff3743546495fb073c10d2657ff42b2aff401903498638093d0e31e344cce778980f28a7170c6d29eab72ac074277b9d4088376e8692dc71e55c1  PyX-0.12.1.tar.gz' > PyX-0.12.1.tar.gz.sha512
-echo 'a42f28ed8e49f04cf89e2ea7434c5ecbc264e7188dcb79ab97f745adf664dd9ab57f9a913543731635f90859536244ac37dca9adf0fc2aa1b215ba884839d160  Python-2.7.9.tgz' > Python-2.7.9.tgz.sha512
-echo '609cc82586fabecb25f25ecb410f2938e01d21cde85dd3f8824fe55c6edde9ecf3b7609195473d3fa05a16b9b121464f5414db1a0187103b78ea6edfa71684a7  Python-3.4.3.tgz' > Python-3.4.3.tgz.sha512
-echo '276bd9c061ec9a27d478b33078a86f93164ee2da72210e12e2c9da71dcffeb64767e4460b93f257302b09328eda8655e93c4b9ae85e74472869afbeae35ca71e  blas.tar.gz' > blas.tar.gz.sha512
-echo '00ace5438cfa0c577e5f578d8a808613187eff5217c35164ffe044fbafdfec9e98f4192c02a7d67e01e5a5ccced630583ad1003c37697219b0f147343a3fdd12  bzip2-1.0.6.tar.gz' > bzip2-1.0.6.tar.gz.sha512
-echo '609a68a3675087e0cc95268574f31e104549daa48efe15a25a33b8e269a93b4bd160f4c3e8178dca9c950ef5ca514b039d6fd1b45db6af57f25342464d0429ce  freetype-2.4.12.tar.gz' > freetype-2.4.12.tar.gz.sha512
-echo '4a83f9ae1855a7fad90133b327d426201c8ccfd2e7fbe9f39b2d61a2eee2f3ebe2ea02cf80f3d4e1ad659f8e790c173df8cc99b87d0b7ce63d34aa88cfdc7939  h5py-2.5.0.tar.gz' > h5py-2.5.0.tar.gz.sha512
-echo '4073fba510ccadaba41db0939f909613c9cb52ba8fb6c1062fc9118edc601394c75e102310be1af4077d07c9b327e6bbb1a6359939a7268dc140382d0c1e0199  hdf5-1.8.14.tar.gz' > hdf5-1.8.14.tar.gz.sha512
-echo '8770214491e31f0a7a3efaade90eee7b0eb20a8a6ab635c5f854d78263f59a1849133c14ef5123d01023f0110cbb9fc6f818da053c01277914ae81473430a952  lapack-3.4.2.tar.gz' > lapack-3.4.2.tar.gz.sha512
-echo '887582e5a22e4cde338aa8fec7a89f6dd31f2f02b8842735f00f970f64582333fa03401cea6d01704083403c7e8b7ebc26655468ce930165673b33efa4bcd586  libpng-1.6.3.tar.gz' > libpng-1.6.3.tar.gz.sha512
-echo '51b0f58b2618b47b653e17e4f6b6a1215d3a3b0f1331ce3555cc7435e365d9c75693f289ce12fe3bf8f69fd57b663e545f0f1c2c94e81eaa661cac0689e125f5  matplotlib-1.4.3.tar.gz' > matplotlib-1.4.3.tar.gz.sha512
-echo 'a61b0d4cf528136991243bb23ac972c11c50ab5681d09f8b2d12cf7d37d3a9d76262f7fe6e7a1834bf6d03e8dc0ebbd9231da982e049e09830341dabefe5d064  mercurial-3.4.tar.gz' > mercurial-3.4.tar.gz.sha512
-echo 'd0cede08dc33a8ac0af0f18063e57f31b615f06e911edb5ca264575174d8f4adb4338448968c403811d9dcc60f38ade3164662d6c7b69b499f56f0984bb6283c  nose-1.3.6.tar.gz' > nose-1.3.6.tar.gz.sha512
-echo '70470ebb9afef5dfd0c83ceb7a9d5f1b7a072b1a9b54b04f04f5ed50fbaedd5b4906bd500472268d478f94df9e749a88698b1ff30f2d80258e7f3fec040617d9  numpy-1.9.2.tar.gz' > numpy-1.9.2.tar.gz.sha512
-echo 'bfd10455e74e30df568c4c4827140fb6cc29893b0e062ce1764bd52852ec7487a70a0f5ea53c3fca7886f5d36365c9f4db52b8c93cad35fb67beeb44a2d56f2d  python-hglib-1.6.tar.gz' > python-hglib-1.6.tar.gz.sha512
-echo 'fff4412d850c431a1b4e6ee3b17958ee5ab3beb81e6cb8a8e7d56d368751eaa8781d7c3e69d932dc002d718fddc66a72098acfe74cfe29ec80b24e6736317275  scipy-0.15.1.tar.gz' > scipy-0.15.1.tar.gz.sha512
-echo '96f3e51b46741450bc6b63779c10ebb4a7066860fe544385d64d1eda52592e376a589ef282ace2e1df73df61c10eab1a0d793abbdaf770e60289494d4bf3bcb4  sqlite-autoconf-3071700.tar.gz' > sqlite-autoconf-3071700.tar.gz.sha512
-echo 'ce0f1a17ac01eb48aec31fc0ad431d9d7ed9907f0e8584a6d79d0ffe6864fe62e203fe3f2a3c3e4e3d485809750ce07507a6488e776a388a7a9a713110882fcf  sympy-0.7.6.tar.gz' > sympy-0.7.6.tar.gz.sha512
-echo 'ece209d4c7ec0cb58ede791444dc754e0d10811cbbdebe3df61c0fd9f9f9867c1c3ccd5f1827f847c005e24eef34fb5bf87b5d3f894d75da04f1797538290e4a  zlib-1.2.8.tar.gz' > zlib-1.2.8.tar.gz.sha512
-echo '9b318ce2ee2cf787929dcb886d76c492b433e71024fda9452d8b4927652a298d6bd1bdb7a4c73883a98e100024f89b46ea8aa14b250f896e549e6dd7e10a6b41  setuptools-18.0.1.tar.gz' > setuptools-18.0.1.tar.gz.sha512
-# Individual processes
-[ -z "$HDF5_DIR" ] && get_ytproject $HDF5.tar.gz
-[ $INST_ZLIB -eq 1 ] && get_ytproject $ZLIB.tar.gz
-[ $INST_BZLIB -eq 1 ] && get_ytproject $BZLIB.tar.gz
-[ $INST_PNG -eq 1 ] && get_ytproject $PNG.tar.gz
-[ $INST_FTYPE -eq 1 ] && get_ytproject $FREETYPE_VER.tar.gz
-[ $INST_SQLITE3 -eq 1 ] && get_ytproject $SQLITE.tar.gz
-[ $INST_PYX -eq 1 ] && get_ytproject $PYX.tar.gz
-[ $INST_SCIPY -eq 1 ] && get_ytproject $SCIPY.tar.gz
-[ $INST_SCIPY -eq 1 ] && get_ytproject blas.tar.gz
-[ $INST_SCIPY -eq 1 ] && get_ytproject $LAPACK.tar.gz
-[ $INST_HG -eq 1 ] && get_ytproject $MERCURIAL.tar.gz
-[ $INST_PY3 -eq 1 ] && get_ytproject $PYTHON3.tgz
-get_ytproject $PYTHON2.tgz
-get_ytproject $NUMPY.tar.gz
-get_ytproject $MATPLOTLIB.tar.gz
-get_ytproject $H5PY.tar.gz
-get_ytproject $CYTHON.tar.gz
-get_ytproject $NOSE.tar.gz
-get_ytproject $PYTHON_HGLIB.tar.gz
-get_ytproject $SYMPY.tar.gz
-get_ytproject $SETUPTOOLS.tar.gz
-if [ $INST_BZLIB -eq 1 ]
-then
-    if [ ! -e $BZLIB/done ]
+    if [ $INST_ZLIB -eq 1 ]
     then
-        [ ! -e $BZLIB ] && tar xfz $BZLIB.tar.gz
-        echo "Installing BZLIB"
-        cd $BZLIB
-        if [ `uname` = "Darwin" ]
+        if [ ! -e $ZLIB/done ]
         then
-            if [ -z "${CC}" ]
-            then
-                sed -i.bak 's/soname/install_name/' Makefile-libbz2_so
-            else
-                sed -i.bak -e 's/soname/install_name/' -e "s|CC=gcc|CC=${CC}|" Makefile-libbz2_so
-            fi
+            [ ! -e $ZLIB ] && tar xfz $ZLIB.tar.gz
+            echo "Installing ZLIB"
+            cd $ZLIB
+            ( ./configure --shared --prefix=${DEST_DIR}/ 2>&1 ) 1>> ${LOG_FILE} || do_exit
+            ( make install 2>&1 ) 1>> ${LOG_FILE} || do_exit
+            ( make clean 2>&1) 1>> ${LOG_FILE} || do_exit
+            touch done
+            cd ..
         fi
-        ( make install CFLAGS=-fPIC LDFLAGS=-fPIC PREFIX=${DEST_DIR} 2>&1 ) 1>> ${LOG_FILE} || do_exit
-        ( make -f Makefile-libbz2_so CFLAGS=-fPIC LDFLAGS=-fPIC PREFIX=${DEST_DIR} 2>&1 ) 1>> ${LOG_FILE} || do_exit
-        ( cp -v libbz2.so.1.0.6 ${DEST_DIR}/lib 2>&1 ) 1>> ${LOG_FILE} || do_exit
-        touch done
-        cd ..
+        ZLIB_DIR=${DEST_DIR}
+        export LDFLAGS="${LDFLAGS} -L${ZLIB_DIR}/lib/ -L${ZLIB_DIR}/lib64/"
+        LD_LIBRARY_PATH="${LD_LIBRARY_PATH}:${ZLIB_DIR}/lib/"
     fi
-    BZLIB_DIR=${DEST_DIR}
-    export LDFLAGS="${LDFLAGS} -L${BZLIB_DIR}/lib/ -L${BZLIB_DIR}/lib64/"
-    LD_LIBRARY_PATH="${LD_LIBRARY_PATH}:${BZLIB_DIR}/lib/"
-fi
+    
+    if [ $INST_PNG -eq 1 ]
+    then
+        if [ ! -e $PNG/done ]
+        then
+            [ ! -e $PNG ] && tar xfz $PNG.tar.gz
+            echo "Installing PNG"
+            cd $PNG
+            ( ./configure CPPFLAGS=-I${DEST_DIR}/include CFLAGS=-I${DEST_DIR}/include --prefix=${DEST_DIR}/ 2>&1 ) 1>> ${LOG_FILE} || do_exit
+            ( make install 2>&1 ) 1>> ${LOG_FILE} || do_exit
+            ( make clean 2>&1) 1>> ${LOG_FILE} || do_exit
+            touch done
+            cd ..
+        fi
+        PNG_DIR=${DEST_DIR}
+        export LDFLAGS="${LDFLAGS} -L${PNG_DIR}/lib/ -L${PNG_DIR}/lib64/"
+        LD_LIBRARY_PATH="${LD_LIBRARY_PATH}:${PNG_DIR}/lib/"
+    fi
 
-if [ $INST_ZLIB -eq 1 ]
-then
-    if [ ! -e $ZLIB/done ]
+    if [ $INST_FTYPE -eq 1 ]
     then
-        [ ! -e $ZLIB ] && tar xfz $ZLIB.tar.gz
-        echo "Installing ZLIB"
-        cd $ZLIB
-        ( ./configure --shared --prefix=${DEST_DIR}/ 2>&1 ) 1>> ${LOG_FILE} || do_exit
+        if [ ! -e $FREETYPE_VER/done ]
+        then
+            [ ! -e $FREETYPE_VER ] && tar xfz $FREETYPE_VER.tar.gz
+            echo "Installing FreeType2"
+            cd $FREETYPE_VER
+            ( ./configure CFLAGS=-I${DEST_DIR}/include --prefix=${DEST_DIR}/ 2>&1 ) 1>> ${LOG_FILE} || do_exit
+            ( make 2>&1 ) 1>> ${LOG_FILE} || do_exit
+            ( make install 2>&1 ) 1>> ${LOG_FILE} || do_exit
+            ( make clean 2>&1) 1>> ${LOG_FILE} || do_exit
+            touch done
+            cd ..
+        fi
+        FTYPE_DIR=${DEST_DIR}
+        export LDFLAGS="${LDFLAGS} -L${FTYPE_DIR}/lib/ -L${FTYPE_DIR}/lib64/"
+        LD_LIBRARY_PATH="${LD_LIBRARY_PATH}:${FTYPE_DIR}/lib/"
+    fi
+
+    if [ -z "$HDF5_DIR" ]
+    then
+        if [ ! -e $HDF5/done ]
+        then
+            [ ! -e $HDF5 ] && tar xfz $HDF5.tar.gz
+            echo "Installing HDF5"
+            cd $HDF5
+            ( ./configure --prefix=${DEST_DIR}/ --enable-shared 2>&1 ) 1>> ${LOG_FILE} || do_exit
+            ( make ${MAKE_PROCS} install 2>&1 ) 1>> ${LOG_FILE} || do_exit
+            ( make clean 2>&1) 1>> ${LOG_FILE} || do_exit
+            touch done
+            cd ..
+        fi
+        export HDF5_DIR=${DEST_DIR}
+    else
+        export HDF5_DIR=${HDF5_DIR}
+    fi
+    export HDF5_API=16
+
+    if [ $INST_SQLITE3 -eq 1 ]
+    then
+        if [ ! -e $SQLITE/done ]
+        then
+            [ ! -e $SQLITE ] && tar xfz $SQLITE.tar.gz
+            echo "Installing SQLite3"
+            cd $SQLITE
+            ( ./configure --prefix=${DEST_DIR}/ 2>&1 ) 1>> ${LOG_FILE} || do_exit
+            ( make ${MAKE_PROCS} install 2>&1 ) 1>> ${LOG_FILE} || do_exit
+            ( make clean 2>&1) 1>> ${LOG_FILE} || do_exit
+            touch done
+            cd ..
+        fi
+    fi
+
+    if [ ! -e $PYTHON2/done ]
+    then
+        echo "Installing Python 2. This may take a while, but don't worry. yt loves you."
+        [ ! -e $PYTHON2 ] && tar xfz $PYTHON2.tgz
+        cd $PYTHON2
+        ( ./configure --prefix=${DEST_DIR}/ ${PYCONF_ARGS} 2>&1 ) 1>> ${LOG_FILE} || do_exit
+        
+        ( make ${MAKE_PROCS} 2>&1 ) 1>> ${LOG_FILE} || do_exit
         ( make install 2>&1 ) 1>> ${LOG_FILE} || do_exit
+        ( ln -sf ${DEST_DIR}/bin/python2.7 ${DEST_DIR}/bin/pyyt 2>&1 ) 1>> ${LOG_FILE}
         ( make clean 2>&1) 1>> ${LOG_FILE} || do_exit
         touch done
         cd ..
     fi
-    ZLIB_DIR=${DEST_DIR}
-    export LDFLAGS="${LDFLAGS} -L${ZLIB_DIR}/lib/ -L${ZLIB_DIR}/lib64/"
-    LD_LIBRARY_PATH="${LD_LIBRARY_PATH}:${ZLIB_DIR}/lib/"
-fi
 
-if [ $INST_PNG -eq 1 ]
-then
-    if [ ! -e $PNG/done ]
+    if [ $INST_PY3 -eq 1 ]
     then
-        [ ! -e $PNG ] && tar xfz $PNG.tar.gz
-        echo "Installing PNG"
-        cd $PNG
-        ( ./configure CPPFLAGS=-I${DEST_DIR}/include CFLAGS=-I${DEST_DIR}/include --prefix=${DEST_DIR}/ 2>&1 ) 1>> ${LOG_FILE} || do_exit
-        ( make install 2>&1 ) 1>> ${LOG_FILE} || do_exit
-        ( make clean 2>&1) 1>> ${LOG_FILE} || do_exit
-        touch done
-        cd ..
+        if [ ! -e $PYTHON3/done ]
+        then
+            echo "Installing Python 3. Because two Pythons are better than one."
+            [ ! -e $PYTHON3 ] && tar xfz $PYTHON3.tgz
+            cd $PYTHON3
+            ( ./configure --prefix=${DEST_DIR}/ ${PYCONF_ARGS} 2>&1 ) 1>> ${LOG_FILE} || do_exit
+            
+            ( make ${MAKE_PROCS} 2>&1 ) 1>> ${LOG_FILE} || do_exit
+            ( make install 2>&1 ) 1>> ${LOG_FILE} || do_exit
+            ( ln -sf ${DEST_DIR}/bin/python3.4 ${DEST_DIR}/bin/pyyt 2>&1 ) 1>> ${LOG_FILE}
+            ( ln -sf ${DEST_DIR}/bin/python3.4 ${DEST_DIR}/bin/python 2>&1 ) 1>> ${LOG_FILE}
+            ( ln -sf ${DEST_DIR}/bin/python3-config ${DEST_DIR}/bin/python-config 2>&1 ) 1>> ${LOG_FILE}
+            ( make clean 2>&1) 1>> ${LOG_FILE} || do_exit
+            touch done
+            cd ..
+        fi
     fi
-    PNG_DIR=${DEST_DIR}
-    export LDFLAGS="${LDFLAGS} -L${PNG_DIR}/lib/ -L${PNG_DIR}/lib64/"
-    LD_LIBRARY_PATH="${LD_LIBRARY_PATH}:${PNG_DIR}/lib/"
-fi
 
-if [ $INST_FTYPE -eq 1 ]
-then
-    if [ ! -e $FREETYPE_VER/done ]
+    export PYTHONPATH=${DEST_DIR}/lib/${PYTHON_EXEC}/site-packages/
+
+    # Install setuptools
+    do_setup_py $SETUPTOOLS
+
+    if [ $INST_HG -eq 1 ]
     then
-        [ ! -e $FREETYPE_VER ] && tar xfz $FREETYPE_VER.tar.gz
-        echo "Installing FreeType2"
-        cd $FREETYPE_VER
-        ( ./configure CFLAGS=-I${DEST_DIR}/include --prefix=${DEST_DIR}/ 2>&1 ) 1>> ${LOG_FILE} || do_exit
-        ( make 2>&1 ) 1>> ${LOG_FILE} || do_exit
-        ( make install 2>&1 ) 1>> ${LOG_FILE} || do_exit
-        ( make clean 2>&1) 1>> ${LOG_FILE} || do_exit
-        touch done
-        cd ..
+        do_setup_py $MERCURIAL
+        export HG_EXEC=${DEST_DIR}/bin/hg
+    else
+        # We assume that hg can be found in the path.
+        if type -P hg &>/dev/null
+        then
+            export HG_EXEC=hg
+        else
+            echo "Cannot find mercurial.  Please set INST_HG=1."
+            do_exit
+        fi
     fi
-    FTYPE_DIR=${DEST_DIR}
-    export LDFLAGS="${LDFLAGS} -L${FTYPE_DIR}/lib/ -L${FTYPE_DIR}/lib64/"
-    LD_LIBRARY_PATH="${LD_LIBRARY_PATH}:${FTYPE_DIR}/lib/"
-fi
 
-if [ -z "$HDF5_DIR" ]
-then
-    if [ ! -e $HDF5/done ]
+    if [ -z "$YT_DIR" ]
     then
-        [ ! -e $HDF5 ] && tar xfz $HDF5.tar.gz
-        echo "Installing HDF5"
-        cd $HDF5
-        ( ./configure --prefix=${DEST_DIR}/ --enable-shared 2>&1 ) 1>> ${LOG_FILE} || do_exit
-        ( make ${MAKE_PROCS} install 2>&1 ) 1>> ${LOG_FILE} || do_exit
-        ( make clean 2>&1) 1>> ${LOG_FILE} || do_exit
-        touch done
-        cd ..
+        if [ -e $ORIG_PWD/yt/mods.py ]
+        then
+            YT_DIR="$ORIG_PWD"
+        elif [ -e $ORIG_PWD/../yt/mods.py ]
+        then
+            YT_DIR=`dirname $ORIG_PWD`
+        elif [ ! -e yt-hg ]
+        then
+            echo "Cloning yt"
+            YT_DIR="$PWD/yt-hg/"
+            ( ${HG_EXEC} --debug clone https://bitbucket.org/yt_analysis/yt-supplemental/ 2>&1 ) 1>> ${LOG_FILE}
+            # Recently the hg server has had some issues with timeouts.  In lieu of
+            # a new webserver, we are now moving to a three-stage process.
+            # First we clone the repo, but only up to r0.
+            ( ${HG_EXEC} --debug clone https://bitbucket.org/yt_analysis/yt/ ./yt-hg 2>&1 ) 1>> ${LOG_FILE}
+            # Now we update to the branch we're interested in.
+            ( ${HG_EXEC} -R ${YT_DIR} up -C ${BRANCH} 2>&1 ) 1>> ${LOG_FILE}
+        elif [ -e yt-hg ]
+        then
+            YT_DIR="$PWD/yt-hg/"
+        fi
+        echo Setting YT_DIR=${YT_DIR}
     fi
-    export HDF5_DIR=${DEST_DIR}
+    
+    # This fixes problems with gfortran linking.
+    unset LDFLAGS
+
+    echo "Installing pip"
+    ( ${GETFILE} https://bootstrap.pypa.io/get-pip.py 2>&1 ) 1>> ${LOG_FILE} || do_exit
+    ( ${DEST_DIR}/bin/${PYTHON_EXEC} get-pip.py 2>&1 ) 1>> ${LOG_FILE} || do_exit
+
+    if [ $INST_SCIPY -eq 0 ]
+    then
+        do_setup_py $NUMPY ${NUMPY_ARGS}
+    else
+        if [ ! -e $SCIPY/done ]
+        then
+            if [ ! -e BLAS/done ]
+            then
+                tar xfz blas.tar.gz
+                echo "Building BLAS"
+                cd BLAS
+                gfortran -O2 -fPIC -fno-second-underscore -c *.f
+                ( ar r libfblas.a *.o 2>&1 ) 1>> ${LOG_FILE}
+                ( ranlib libfblas.a 2>&1 ) 1>> ${LOG_FILE}
+                rm -rf *.o
+                touch done
+                cd ..
+            fi
+            if [ ! -e $LAPACK/done ]
+            then
+                tar xfz $LAPACK.tar.gz
+                echo "Building LAPACK"
+                cd $LAPACK/
+                cp INSTALL/make.inc.gfortran make.inc
+                ( make lapacklib OPTS="-fPIC -O2" NOOPT="-fPIC -O0" CFLAGS=-fPIC LDFLAGS=-fPIC 2>&1 ) 1>> ${LOG_FILE} || do_exit
+                touch done
+                cd ..
+            fi
+        fi
+        export BLAS=$PWD/BLAS/libfblas.a
+        export LAPACK=$PWD/$LAPACK/liblapack.a
+        do_setup_py $NUMPY ${NUMPY_ARGS}
+        do_setup_py $SCIPY ${NUMPY_ARGS}
+    fi
+    
+    if [ -n "${MPL_SUPP_LDFLAGS}" ]
+    then
+        OLD_LDFLAGS=${LDFLAGS}
+        export LDFLAGS="${MPL_SUPP_LDFLAGS}"
+        echo "Setting LDFLAGS ${LDFLAGS}"
+    fi
+    if [ -n "${MPL_SUPP_CXXFLAGS}" ]
+    then
+        OLD_CXXFLAGS=${CXXFLAGS}
+        export CXXFLAGS="${MPL_SUPP_CXXFLAGS}"
+        echo "Setting CXXFLAGS ${CXXFLAGS}"
+    fi
+    if [ -n "${MPL_SUPP_CFLAGS}" ]
+    then
+        OLD_CFLAGS=${CFLAGS}
+        export CFLAGS="${MPL_SUPP_CFLAGS}"
+        echo "Setting CFLAGS ${CFLAGS}"
+    fi
+    # Now we set up the basedir for matplotlib:
+    mkdir -p ${DEST_DIR}/src/$MATPLOTLIB
+    echo "[directories]" >> ${DEST_DIR}/src/$MATPLOTLIB/setup.cfg
+    echo "basedirlist = ${DEST_DIR}" >> ${DEST_DIR}/src/$MATPLOTLIB/setup.cfg
+    if [ `uname` = "Darwin" ]
+    then
+        echo "[gui_support]" >> ${DEST_DIR}/src/$MATPLOTLIB/setup.cfg
+        echo "macosx = False" >> ${DEST_DIR}/src/$MATPLOTLIB/setup.cfg
+    fi
+    
+    _user_DISPLAY=$DISPLAY
+    unset DISPLAY   # see (yt-user link missing: "Installation failure" 01/29/15)
+    do_setup_py $MATPLOTLIB
+    export DISPLAY=${_user_DISPLAY}
+    if [ -n "${OLD_LDFLAGS}" ]
+    then
+        export LDFLAG=${OLD_LDFLAGS}
+    fi
+    [ -n "${OLD_LDFLAGS}" ] && export LDFLAGS=${OLD_LDFLAGS}
+    [ -n "${OLD_CXXFLAGS}" ] && export CXXFLAGS=${OLD_CXXFLAGS}
+    [ -n "${OLD_CFLAGS}" ] && export CFLAGS=${OLD_CFLAGS}
+    
+    echo "Installing Jupyter"
+    ( ${DEST_DIR}/bin/pip install "jupyter<2.0.0" 2>&1 ) 1>> ${LOG_FILE}
+    
+    do_setup_py $CYTHON
+    do_setup_py $H5PY
+    do_setup_py $NOSE
+    do_setup_py $PYTHON_HGLIB
+    do_setup_py $SYMPY
+    [ $INST_PYX -eq 1 ] && do_setup_py $PYX
+
+    ( ${DEST_DIR}/bin/pip install jinja2 2>&1 ) 1>> ${LOG_FILE}
+    
+    # Now we build Rockstar and set its environment variable.
+    if [ $INST_ROCKSTAR -eq 1 ]
+    then
+        if [ ! -e rockstar/done ]
+        then
+            echo "Building Rockstar"
+            if [ ! -e rockstar ]
+            then
+                ( hg clone http://bitbucket.org/MatthewTurk/rockstar 2>&1 ) 1>> ${LOG_FILE}
+            fi
+            cd rockstar
+            ( hg pull 2>&1 ) 1>> ${LOG_FILE}
+            ( hg up -C tip 2>&1 ) 1>> ${LOG_FILE}
+            ( make lib 2>&1 ) 1>> ${LOG_FILE} || do_exit
+            cp librockstar.so ${DEST_DIR}/lib
+            ROCKSTAR_DIR=${DEST_DIR}/src/rockstar
+            echo $ROCKSTAR_DIR > ${YT_DIR}/rockstar.cfg
+            touch done
+            cd ..
+        fi
+    fi
+    
+    echo "Doing yt update, wiping local changes and updating to branch ${BRANCH}"
+    MY_PWD=`pwd`
+    cd $YT_DIR
+    ( ${HG_EXEC} pull 2>1 && ${HG_EXEC} up -C 2>1 ${BRANCH} 2>&1 ) 1>> ${LOG_FILE}
+
+    echo "Installing yt"
+    [ $INST_PNG -eq 1 ] && echo $PNG_DIR > png.cfg
+    ( export PATH=$DEST_DIR/bin:$PATH ; ${DEST_DIR}/bin/${PYTHON_EXEC} setup.py develop 2>&1 ) 1>> ${LOG_FILE} || do_exit
+    touch done
+    cd $MY_PWD
+
+    if !( ( ${DEST_DIR}/bin/${PYTHON_EXEC} -c "import readline" 2>&1 )>> ${LOG_FILE}) || \
+            [[ "${MYOS##Darwin}" != "${MYOS}" && $INST_PY3 -eq 1 ]] 
+    then
+        if !( ( ${DEST_DIR}/bin/${PYTHON_EXEC} -c "import gnureadline" 2>&1 )>> ${LOG_FILE})
+        then
+            echo "Installing pure-python readline"
+            ( ${DEST_DIR}/bin/pip install gnureadline 2>&1 ) 1>> ${LOG_FILE}
+        fi
+    fi
+
+    if [ -e $HOME/.matplotlib/fontList.cache ] && \
+           ( grep -q python2.6 $HOME/.matplotlib/fontList.cache )
+    then
+        echo "WARNING WARNING WARNING WARNING WARNING WARNING WARNING"
+        echo "*******************************************************"
+        echo
+        echo "  You likely need to remove your old fontList.cache!"
+        echo "  You can do this with this command:"
+        echo ""
+        echo "  rm $HOME/.matplotlib/fontList.cache"
+        echo
+        echo "*******************************************************"
+    fi
+    
+    # Add the environment scripts
+    ( cp ${YT_DIR}/doc/activate ${DEST_DIR}/bin/activate 2>&1 ) 1>> ${LOG_FILE}
+    sed -i.bak -e "s,__YT_DIR__,${DEST_DIR}," ${DEST_DIR}/bin/activate
+    ( cp ${YT_DIR}/doc/activate.csh ${DEST_DIR}/bin/activate.csh 2>&1 ) 1>> ${LOG_FILE}
+    sed -i.bak -e "s,__YT_DIR__,${DEST_DIR}," ${DEST_DIR}/bin/activate.csh
+
+    function print_afterword
+    {
+        echo
+        echo
+        echo "========================================================================"
+        echo
+        echo "yt is now installed in $DEST_DIR ."
+        echo
+        echo "To run from this new installation, use the activate script for this "
+        echo "environment."
+        echo
+        echo "    $ source $DEST_DIR/bin/activate"
+        echo
+        echo "This modifies the environment variables YT_DEST, PATH, PYTHONPATH, and"
+        echo "LD_LIBRARY_PATH to match your new yt install.  If you use csh, just"
+        echo "append .csh to the above."
+        echo
+        echo "To get started with yt, check out the orientation:"
+        echo
+        echo "    http://yt-project.org/doc/quickstart/"
+        echo
+        echo "The source for yt is located at:"
+        echo "    $YT_DIR"
+        if [ $INST_HG -eq 1 ]
+        then
+            echo
+            echo "Mercurial has also been installed:"
+            echo
+            echo "$DEST_DIR/bin/hg"
+            echo
+        fi
+        echo
+        echo "For support, see the website and join the mailing list:"
+        echo
+        echo "    http://yt-project.org/"
+        echo "    http://yt-project.org/data/      (Sample data)"
+        echo "    http://yt-project.org/doc/       (Docs)"
+        echo
+        echo "    http://lists.spacepope.org/listinfo.cgi/yt-users-spacepope.org"
+        echo
+        echo "========================================================================"
+        echo
+        echo "Oh, look at me, still talking when there's science to do!"
+        echo "Good luck, and email the user list if you run into any problems."
+    }
+
+    print_afterword
+    print_afterword >> ${LOG_FILE}
+
+    echo "yt dependencies were last updated on" > ${DEST_DIR}/.yt_update
+    date >> ${DEST_DIR}/.yt_update
 else
-    export HDF5_DIR=${HDF5_DIR}
-fi
-export HDF5_API=16
+    MYARCH=`uname -m`       # A guess at the OS
+    MYOS=`uname -s`
 
-if [ $INST_SQLITE3 -eq 1 ]
-then
-    if [ ! -e $SQLITE/done ]
+    if [ $MYOS = "Darwin" ]
     then
-        [ ! -e $SQLITE ] && tar xfz $SQLITE.tar.gz
-        echo "Installing SQLite3"
-        cd $SQLITE
-        ( ./configure --prefix=${DEST_DIR}/ 2>&1 ) 1>> ${LOG_FILE} || do_exit
-        ( make ${MAKE_PROCS} install 2>&1 ) 1>> ${LOG_FILE} || do_exit
-        ( make clean 2>&1) 1>> ${LOG_FILE} || do_exit
-        touch done
-        cd ..
+        MINICONDA_OS="MacOSX"
+        MINICONDA_ARCH="x86_64"
+    elif [ $MYOS = "Linux" ]
+    then
+        MINICONDA_OS="Linux"
+        if [ $MYARCH = "i386" ]
+        then
+            MINICONDA_ARCH="x86"
+        elif [ $MYARCH = "i686"  ]
+        then
+            MINICONDA_ARCH="x86"
+        elif [ $MYARCH = "x86_64"  ]
+        then
+            MINICONDA_ARCH="x86_64"
+        else
+            echo "Not sure which architecture you are running."
+            echo "Going with x86_64 architecture."
+            MINICONDA_OS="Linux-x86_64"
+        fi
     fi
-fi
 
-if [ ! -e $PYTHON2/done ]
-then
-    echo "Installing Python 2. This may take a while, but don't worry. yt loves you."
-    [ ! -e $PYTHON2 ] && tar xfz $PYTHON2.tgz
-    cd $PYTHON2
-    ( ./configure --prefix=${DEST_DIR}/ ${PYCONF_ARGS} 2>&1 ) 1>> ${LOG_FILE} || do_exit
+    MINICONDA_PKG="Miniconda-${MINICONDA_VERSION}-${MINICONDA_OS}-${MINICONDA_ARCH}.sh"
 
-    ( make ${MAKE_PROCS} 2>&1 ) 1>> ${LOG_FILE} || do_exit
-    ( make install 2>&1 ) 1>> ${LOG_FILE} || do_exit
-    ( ln -sf ${DEST_DIR}/bin/python2.7 ${DEST_DIR}/bin/pyyt 2>&1 ) 1>> ${LOG_FILE}
-    ( make clean 2>&1) 1>> ${LOG_FILE} || do_exit
-    touch done
-    cd ..
-fi
+    echo
+    echo "Downloading ${MINICONDA_URLBASE}/${MINICONDA_PKG}"
+    echo
 
-if [ $INST_PY3 -eq 1 ]
-then
-    if [ ! -e $PYTHON3/done ]
+    if [ -f ${MINICONDA_PKG} ]
     then
-        echo "Installing Python 3. Because two Pythons are better than one."
-        [ ! -e $PYTHON3 ] && tar xfz $PYTHON3.tgz
-        cd $PYTHON3
-        ( ./configure --prefix=${DEST_DIR}/ ${PYCONF_ARGS} 2>&1 ) 1>> ${LOG_FILE} || do_exit
+        rm $MINICONDA_PKG
+    fi
 
-        ( make ${MAKE_PROCS} 2>&1 ) 1>> ${LOG_FILE} || do_exit
-        ( make install 2>&1 ) 1>> ${LOG_FILE} || do_exit
-        ( ln -sf ${DEST_DIR}/bin/python3.4 ${DEST_DIR}/bin/pyyt 2>&1 ) 1>> ${LOG_FILE}
-        ( ln -sf ${DEST_DIR}/bin/python3.4 ${DEST_DIR}/bin/python 2>&1 ) 1>> ${LOG_FILE}
-        ( ln -sf ${DEST_DIR}/bin/python3-config ${DEST_DIR}/bin/python-config 2>&1 ) 1>> ${LOG_FILE}
-        ( make clean 2>&1) 1>> ${LOG_FILE} || do_exit
-        touch done
-        cd ..
+    ${GETFILE} ${MINICONDA_URLBASE}/${MINICONDA_PKG} || do_exit
+
+    echo "Installing the Miniconda python environment."
+
+    if [ -e ${DEST_DIR} ]
+    then
+       rm -r $DEST_DIR
     fi
-fi
 
-export PYTHONPATH=${DEST_DIR}/lib/${PYTHON_EXEC}/site-packages/
+    bash ./${MINICONDA_PKG} -b -p $DEST_DIR
 
-# Install setuptools
-do_setup_py $SETUPTOOLS
+    # Need to set PATH so we use miniconda's python environment
+    export PATH=${DEST_DIR}/bin:$PATH
 
-if [ $INST_HG -eq 1 ]
-then
-    do_setup_py $MERCURIAL
-    export HG_EXEC=${DEST_DIR}/bin/hg
-else
-    # We assume that hg can be found in the path.
-    if type -P hg &>/dev/null
+    echo "Installing the necessary packages for yt."
+    echo "This may take a while, but don't worry.  yt loves you."
+
+    declare -a YT_DEPS
+    YT_DEPS+=('python')
+    YT_DEPS+=('setuptools')
+    YT_DEPS+=('numpy')
+    YT_DEPS+=('jupyter')
+    YT_DEPS+=('ipython')
+    YT_DEPS+=('sphinx')
+    if [ $INST_H5PY -ne 0 ]
     then
-        export HG_EXEC=hg
+        YT_DEPS+=('h5py')
+    fi
+    YT_DEPS+=('matplotlib')
+    YT_DEPS+=('cython')
+    if [ $INST_NOSE -ne 0 ]
+    then
+        YT_DEPS+=('nose')
+    fi
+    if [ $INST_SCIPY -ne 0 ]
+    then
+        YT_DEPS+=('scipy')
+    fi
+    if [ $INST_ASTROPY -ne 0 ]
+    then
+        YT_DEPS+=('astropy')
+    fi
+    if [ $INST_ROCKSTAR -ne 0 ]
+    then
+        echo "Installing with rockstar and conda hasn't been implemented yet"
+        exit 1
+    fi
+    YT_DEPS+=('conda-build')
+    YT_DEPS+=('mercurial')
+    YT_DEPS+=('sympy')
+
+    if [ $INST_UNSTRUCTURED -eq 1 ]
+    then
+        YT_DEPS+=('netcdf4')   
+    fi
+    
+    # Here is our dependency list for yt
+    log_cmd conda update --yes conda
+    
+    log_cmd echo "DEPENDENCIES" ${YT_DEPS[@]}
+    for YT_DEP in "${YT_DEPS[@]}"; do
+        echo "Installing $YT_DEP"
+        log_cmd conda install --yes ${YT_DEP}
+    done
+    
+    if [ $INST_UNSTRUCTURED -eq 1 ]
+    then
+        
+        echo "Installing embree"
+        mkdir ${DEST_DIR}/src
+        cd ${DEST_DIR}/src
+        ( ${GETFILE} "$EMBREE_URL" 2>&1 ) 1>> ${LOG_FILE} || do_exit
+        log_cmd tar xfz ${EMBREE}.tar.gz
+        log_cmd mv ${DEST_DIR}/src/${EMBREE}/include/embree2 ${DEST_DIR}/include
+        log_cmd mv ${DEST_DIR}/src/${EMBREE}/lib/lib*.* ${DEST_DIR}/lib
+        if [ `uname` = "Darwin" ]
+        then
+            ln -s ${DEST_DIR}/lib/libembree.2.dylib ${DEST_DIR}/lib/libembree.dylib
+            install_name_tool -id ${DEST_DIR}/lib/libembree.2.dylib ${DEST_DIR}/lib/libembree.2.dylib
+        else
+            ln -s ${DEST_DIR}/lib/libembree.so.2 ${DEST_DIR}/lib/libembree.so
+        fi
+        
+        echo "Installing pyembree from source"
+        ( ${GETFILE} "$PYEMBREE_URL" 2>&1 ) 1>> ${LOG_FILE} || do_exit
+        log_cmd unzip ${DEST_DIR}/src/master.zip
+        pushd ${DEST_DIR}/src/pyembree-master
+        log_cmd python setup.py install build_ext -I${DEST_DIR}/include -L${DEST_DIR}/lib
+        popd
+    fi
+
+    if [ $INST_YT_SOURCE -eq 0 ]
+    then
+        echo "Installing yt"
+        log_cmd conda install --yes yt
     else
-        echo "Cannot find mercurial.  Please set INST_HG=1."
-        do_exit
+        # We do a source install.
+        echo "Installing yt from source"
+        YT_DIR="${DEST_DIR}/src/yt-hg"
+        log_cmd hg clone -r ${BRANCH} https://bitbucket.org/yt_analysis/yt ${YT_DIR}
+        if [ $INST_UNSTRUCTURED -eq 1 ]
+        then
+            echo $DEST_DIR > ${YT_DIR}/embree.cfg
+        fi
+        pushd ${YT_DIR}
+        log_cmd python setup.py develop
+        popd
     fi
-fi
 
-if [ -z "$YT_DIR" ]
-then
-    if [ -e $ORIG_PWD/yt/mods.py ]
-    then
-        YT_DIR="$ORIG_PWD"
-    elif [ -e $ORIG_PWD/../yt/mods.py ]
-    then
-        YT_DIR=`dirname $ORIG_PWD`
-    elif [ ! -e yt-hg ]
-    then
-        echo "Cloning yt"
-        YT_DIR="$PWD/yt-hg/"
-        ( ${HG_EXEC} --debug clone https://bitbucket.org/yt_analysis/yt-supplemental/ 2>&1 ) 1>> ${LOG_FILE}
-        # Recently the hg server has had some issues with timeouts.  In lieu of
-        # a new webserver, we are now moving to a three-stage process.
-        # First we clone the repo, but only up to r0.
-        ( ${HG_EXEC} --debug clone https://bitbucket.org/yt_analysis/yt/ ./yt-hg 2>&1 ) 1>> ${LOG_FILE}
-        # Now we update to the branch we're interested in.
-        ( ${HG_EXEC} -R ${YT_DIR} up -C ${BRANCH} 2>&1 ) 1>> ${LOG_FILE}
-    elif [ -e yt-hg ]
-    then
-        YT_DIR="$PWD/yt-hg/"
-    fi
-    echo Setting YT_DIR=${YT_DIR}
-fi
-
-# This fixes problems with gfortran linking.
-unset LDFLAGS
- 
-echo "Installing pip"
-( ${GETFILE} https://bootstrap.pypa.io/get-pip.py 2>&1 ) 1>> ${LOG_FILE} || do_exit
-( ${DEST_DIR}/bin/${PYTHON_EXEC} get-pip.py 2>&1 ) 1>> ${LOG_FILE} || do_exit
-
-if [ $INST_SCIPY -eq 0 ]
-then
-    do_setup_py $NUMPY ${NUMPY_ARGS}
-else
-    if [ ! -e $SCIPY/done ]
-    then
-    if [ ! -e BLAS/done ]
-    then
-        tar xfz blas.tar.gz
-        echo "Building BLAS"
-        cd BLAS
-        gfortran -O2 -fPIC -fno-second-underscore -c *.f
-        ( ar r libfblas.a *.o 2>&1 ) 1>> ${LOG_FILE}
-        ( ranlib libfblas.a 2>&1 ) 1>> ${LOG_FILE}
-        rm -rf *.o
-        touch done
-        cd ..
-    fi
-    if [ ! -e $LAPACK/done ]
-    then
-        tar xfz $LAPACK.tar.gz
-        echo "Building LAPACK"
-        cd $LAPACK/
-        cp INSTALL/make.inc.gfortran make.inc
-        ( make lapacklib OPTS="-fPIC -O2" NOOPT="-fPIC -O0" CFLAGS=-fPIC LDFLAGS=-fPIC 2>&1 ) 1>> ${LOG_FILE} || do_exit
-        touch done
-        cd ..
-    fi
-    fi
-    export BLAS=$PWD/BLAS/libfblas.a
-    export LAPACK=$PWD/$LAPACK/liblapack.a
-    do_setup_py $NUMPY ${NUMPY_ARGS}
-    do_setup_py $SCIPY ${NUMPY_ARGS}
-fi
-
-if [ -n "${MPL_SUPP_LDFLAGS}" ]
-then
-    OLD_LDFLAGS=${LDFLAGS}
-    export LDFLAGS="${MPL_SUPP_LDFLAGS}"
-    echo "Setting LDFLAGS ${LDFLAGS}"
-fi
-if [ -n "${MPL_SUPP_CXXFLAGS}" ]
-then
-    OLD_CXXFLAGS=${CXXFLAGS}
-    export CXXFLAGS="${MPL_SUPP_CXXFLAGS}"
-    echo "Setting CXXFLAGS ${CXXFLAGS}"
-fi
-if [ -n "${MPL_SUPP_CFLAGS}" ]
-then
-    OLD_CFLAGS=${CFLAGS}
-    export CFLAGS="${MPL_SUPP_CFLAGS}"
-    echo "Setting CFLAGS ${CFLAGS}"
-fi
-# Now we set up the basedir for matplotlib:
-mkdir -p ${DEST_DIR}/src/$MATPLOTLIB
-echo "[directories]" >> ${DEST_DIR}/src/$MATPLOTLIB/setup.cfg
-echo "basedirlist = ${DEST_DIR}" >> ${DEST_DIR}/src/$MATPLOTLIB/setup.cfg
-if [ `uname` = "Darwin" ]
-then
-   echo "[gui_support]" >> ${DEST_DIR}/src/$MATPLOTLIB/setup.cfg
-   echo "macosx = False" >> ${DEST_DIR}/src/$MATPLOTLIB/setup.cfg
-fi
-
-_user_DISPLAY=$DISPLAY
-unset DISPLAY   # see (yt-user link missing: "Installation failure" 01/29/15)
-do_setup_py $MATPLOTLIB
-export DISPLAY=${_user_DISPLAY}
-if [ -n "${OLD_LDFLAGS}" ]
-then
-    export LDFLAG=${OLD_LDFLAGS}
-fi
-[ -n "${OLD_LDFLAGS}" ] && export LDFLAGS=${OLD_LDFLAGS}
-[ -n "${OLD_CXXFLAGS}" ] && export CXXFLAGS=${OLD_CXXFLAGS}
-[ -n "${OLD_CFLAGS}" ] && export CFLAGS=${OLD_CFLAGS}
-
-echo "Installing Jupyter"
-( ${DEST_DIR}/bin/pip install "jupyter<2.0.0" 2>&1 ) 1>> ${LOG_FILE}
-
-do_setup_py $CYTHON
-do_setup_py $H5PY
-do_setup_py $NOSE
-do_setup_py $PYTHON_HGLIB
-do_setup_py $SYMPY
-[ $INST_PYX -eq 1 ] && do_setup_py $PYX
-
-( ${DEST_DIR}/bin/pip install jinja2 2>&1 ) 1>> ${LOG_FILE}
-
-# Now we build Rockstar and set its environment variable.
-if [ $INST_ROCKSTAR -eq 1 ]
-then
-    if [ ! -e rockstar/done ]
-    then
-        echo "Building Rockstar"
-        if [ ! -e rockstar ]
-        then
-            ( hg clone http://bitbucket.org/MatthewTurk/rockstar 2>&1 ) 1>> ${LOG_FILE}
-        fi
-        cd rockstar
-        ( hg pull 2>&1 ) 1>> ${LOG_FILE}
-        ( hg up -C tip 2>&1 ) 1>> ${LOG_FILE}
-        ( make lib 2>&1 ) 1>> ${LOG_FILE} || do_exit
-        cp librockstar.so ${DEST_DIR}/lib
-        ROCKSTAR_DIR=${DEST_DIR}/src/rockstar
-        echo $ROCKSTAR_DIR > ${YT_DIR}/rockstar.cfg
-        touch done
-        cd ..
-    fi
-fi
-
-echo "Doing yt update, wiping local changes and updating to branch ${BRANCH}"
-MY_PWD=`pwd`
-cd $YT_DIR
-( ${HG_EXEC} pull 2>1 && ${HG_EXEC} up -C 2>1 ${BRANCH} 2>&1 ) 1>> ${LOG_FILE}
-
-echo "Installing yt"
-[ $INST_PNG -eq 1 ] && echo $PNG_DIR > png.cfg
-( export PATH=$DEST_DIR/bin:$PATH ; ${DEST_DIR}/bin/${PYTHON_EXEC} setup.py develop 2>&1 ) 1>> ${LOG_FILE} || do_exit
-touch done
-cd $MY_PWD
-
-if !( ( ${DEST_DIR}/bin/${PYTHON_EXEC} -c "import readline" 2>&1 )>> ${LOG_FILE}) || \
-    [[ "${MYOS##Darwin}" != "${MYOS}" && $INST_PY3 -eq 1 ]] 
-then
-    if !( ( ${DEST_DIR}/bin/${PYTHON_EXEC} -c "import gnureadline" 2>&1 )>> ${LOG_FILE})
-    then
-        echo "Installing pure-python readline"
-        ( ${DEST_DIR}/bin/pip install gnureadline 2>&1 ) 1>> ${LOG_FILE}
-    fi
-fi
-
-if [ -e $HOME/.matplotlib/fontList.cache ] && \
-   ( grep -q python2.6 $HOME/.matplotlib/fontList.cache )
-then
-    echo "WARNING WARNING WARNING WARNING WARNING WARNING WARNING"
-    echo "*******************************************************"
-    echo
-    echo "  You likely need to remove your old fontList.cache!"
-    echo "  You can do this with this command:"
-    echo ""
-    echo "  rm $HOME/.matplotlib/fontList.cache"
-    echo
-    echo "*******************************************************"
-fi
-
-# Add the environment scripts
-( cp ${YT_DIR}/doc/activate ${DEST_DIR}/bin/activate 2>&1 ) 1>> ${LOG_FILE}
-sed -i.bak -e "s,__YT_DIR__,${DEST_DIR}," ${DEST_DIR}/bin/activate
-( cp ${YT_DIR}/doc/activate.csh ${DEST_DIR}/bin/activate.csh 2>&1 ) 1>> ${LOG_FILE}
-sed -i.bak -e "s,__YT_DIR__,${DEST_DIR}," ${DEST_DIR}/bin/activate.csh
-
-function print_afterword
-{
     echo
     echo
     echo "========================================================================"
     echo
-    echo "yt is now installed in $DEST_DIR ."
+    echo "yt and the Conda system are now installed in $DEST_DIR"
     echo
-    echo "To run from this new installation, use the activate script for this "
-    echo "environment."
+    echo "You must now prepend the following folder to your PATH environment variable:"
+    echo 
+    echo "    $DEST_DIR/bin"
     echo
-    echo "    $ source $DEST_DIR/bin/activate"
+    echo "On Bash-style shells you can copy/paste the following command to "
+    echo "temporarily activate the yt installation:"
     echo
-    echo "This modifies the environment variables YT_DEST, PATH, PYTHONPATH, and"
-    echo "LD_LIBRARY_PATH to match your new yt install.  If you use csh, just"
-    echo "append .csh to the above."
+    echo "    export PATH=$DEST_DIR/bin:\$PATH"
+    echo
+    echo "and on csh-style shells:"
+    echo
+    echo "    setenv PATH $DEST_DIR/bin:\$PATH"
+    echo
+    echo "You can also update the init file appropriate for your shell"
+    echo "(e.g. .bashrc, .bash_profile, .cshrc, or .zshrc) to include"
+    echo "the same command."
     echo
     echo "To get started with yt, check out the orientation:"
     echo
-    echo "    http://yt-project.org/doc/quickstart/"
-    echo
-    echo "The source for yt is located at:"
-    echo "    $YT_DIR"
-    if [ $INST_HG -eq 1 ]
-    then
-      echo
-      echo "Mercurial has also been installed:"
-      echo
-      echo "$DEST_DIR/bin/hg"
-      echo
-    fi
+    echo "    http://yt-project.org/doc/orientation/"
     echo
     echo "For support, see the website and join the mailing list:"
     echo
@@ -1078,11 +1393,5 @@
     echo "========================================================================"
     echo
     echo "Oh, look at me, still talking when there's science to do!"
-    echo "Good luck, and email the user list if you run into any problems."
-}
-
-print_afterword
-print_afterword >> ${LOG_FILE}
-
-echo "yt dependencies were last updated on" > ${DEST_DIR}/.yt_update
-date >> ${DEST_DIR}/.yt_update
+    echo "Good luck, and email the mailing list if you run into any problems."
+fi


https://bitbucket.org/yt_analysis/yt/commits/9dce5205150b/
Changeset:   9dce5205150b
Branch:      yt
User:        ngoldbaum
Date:        2016-03-01 00:47:34+00:00
Summary:     Fix syntax error
Affected #:  1 file

diff -r 270604a874e49fcbff493bbb7cf3b473b8649f3e -r 9dce5205150ba10561d8e8126915d1fb90388b74 doc/install_script.sh
--- a/doc/install_script.sh
+++ b/doc/install_script.sh
@@ -793,8 +793,8 @@
     [ $INST_SCIPY -eq 1 ] && get_ytproject $LAPACK.tar.gz
     [ $INST_HG -eq 1 ] && get_ytproject $MERCURIAL.tar.gz
     [ $INST_PY3 -eq 1 ] && get_ytproject $PYTHON3.tgz
-    [ $INST_H5PY -eq 1] && get_ytproject $H5PY.tat.gz
-    [ $INST_NOSE -eq 1] && get_ytproject $NOSE.tar.gz
+    [ $INST_H5PY -eq 1 ] && get_ytproject $H5PY.tat.gz
+    [ $INST_NOSE -eq 1 ] && get_ytproject $NOSE.tar.gz
     get_ytproject $PYTHON2.tgz
     get_ytproject $NUMPY.tar.gz
     get_ytproject $MATPLOTLIB.tar.gz


https://bitbucket.org/yt_analysis/yt/commits/a8a476e17920/
Changeset:   a8a476e17920
Branch:      yt
User:        ngoldbaum
Date:        2016-03-01 00:47:51+00:00
Summary:     Do a better job of handling yt being re-installed
Affected #:  1 file

diff -r 9dce5205150ba10561d8e8126915d1fb90388b74 -r a8a476e17920b2f1b0bd0c8c9f112044837f3368 doc/install_script.sh
--- a/doc/install_script.sh
+++ b/doc/install_script.sh
@@ -30,6 +30,12 @@
 # INST_CONDA=0
 # INST_YT_SOURCE=1
 
+if [ ${REINST_YT} ] && [ ${REINST_YT} -eq 1 ] && [ -n ${YT_DEST} ]
+then
+    DEST_DIR=${YT_DEST}
+    INST_CONDA=0
+fi
+
 if [ $INST_CONDA -ne 0 ]
 then
     DEST_SUFFIX="yt-conda"
@@ -43,14 +49,13 @@
     DEST_SUFFIX="yt-`uname -m`"
 fi
 
-DEST_DIR="`pwd`/${DEST_SUFFIX/ /}"   # Installation location
+if [ ! -z "${DEST_DIR}" ]
+then
+    DEST_DIR="`pwd`/${DEST_SUFFIX/ /}"   # Installation location
+fi
+
 BRANCH="yt" # This is the branch to which we will forcibly update.
 
-if [ ${REINST_YT} ] && [ ${REINST_YT} -eq 1 ] && [ -n ${YT_DEST} ]
-then
-    DEST_DIR=${YT_DEST}
-fi
-
 # What follows are some other options that you may or may not need to change.
 
 # Here's where you put the HDF5 path if you like; otherwise it'll download it


https://bitbucket.org/yt_analysis/yt/commits/22529c09eee5/
Changeset:   22529c09eee5
Branch:      yt
User:        ngoldbaum
Date:        2016-03-01 00:48:15+00:00
Summary:     typo and formatting fixes
Affected #:  1 file

diff -r a8a476e17920b2f1b0bd0c8c9f112044837f3368 -r 22529c09eee5e410eb896a3e443a8b90bac29a14 doc/install_script.sh
--- a/doc/install_script.sh
+++ b/doc/install_script.sh
@@ -42,7 +42,7 @@
 else
     if [ $INST_YT_SOURCE -eq 0 ]
     then
-        echo "yt must be compiled from source if INST_CONDA is set"
+        echo "yt must be compiled from source if INST_CONDA is not set"
         echo "Please set INST_YT_SOURCE to 1 and re-run."
         exit 1
     fi
@@ -469,8 +469,8 @@
         EMBREE_URL="https://github.com/embree/embree/releases/download/v2.8.0/$EMBREE.tar.gz"
     elif [ `uname` = "Linux" ]
     then
-            EMBREE="embree-2.8.0.x86_64.linux"
-            EMBREE_URL="https://github.com/embree/embree/releases/download/v2.8.0/$EMBREE.tar.gz"
+        EMBREE="embree-2.8.0.x86_64.linux"
+        EMBREE_URL="https://github.com/embree/embree/releases/download/v2.8.0/$EMBREE.tar.gz"
     else
         echo "Unstructured mesh rendering is not supported on this platform."
         echo "Set INST_UNSTRUCTURED=0 and re-run the install script."


https://bitbucket.org/yt_analysis/yt/commits/83965d011034/
Changeset:   83965d011034
Branch:      yt
User:        MatthewTurk
Date:        2016-03-08 20:08:38+00:00
Summary:     Updating to Python 3.5.1 and Mercurial 3.7.2
Affected #:  1 file

diff -r 22529c09eee5e410eb896a3e443a8b90bac29a14 -r 83965d0110340a244a841fb6638a6a238dbd98c7 doc/install_script.sh
--- a/doc/install_script.sh
+++ b/doc/install_script.sh
@@ -606,7 +606,7 @@
 
 if [ $INST_PY3 -eq 1 ]
 then
-     PYTHON_EXEC='python3.4'
+     PYTHON_EXEC='python3.5'
 else 
      PYTHON_EXEC='python2.7'
 fi
@@ -742,7 +742,7 @@
     cd ${DEST_DIR}/src
 
     PYTHON2='Python-2.7.9'
-    PYTHON3='Python-3.4.3'
+    PYTHON3='Python-3.5.1'
     CYTHON='Cython-0.22'
     PYX='PyX-0.12.1'
     BZLIB='bzip2-1.0.6'
@@ -752,7 +752,7 @@
     LAPACK='lapack-3.4.2'
     PNG=libpng-1.6.3
     MATPLOTLIB='matplotlib-1.4.3'
-    MERCURIAL='mercurial-3.4'
+    MERCURIAL='mercurial-3.7.2'
     NOSE='nose-1.3.6'
     NUMPY='numpy-1.9.2'
     PYTHON_HGLIB='python-hglib-1.6'
@@ -767,7 +767,7 @@
     echo '856220fa579e272ac38dcef091760f527431ff3b98df9af6e68416fcf77d9659ac5abe5c7dee41331f359614637a4ff452033085335ee499830ed126ab584267  Cython-0.22.tar.gz' > Cython-0.22.tar.gz.sha512
     echo '4941f5aa21aff3743546495fb073c10d2657ff42b2aff401903498638093d0e31e344cce778980f28a7170c6d29eab72ac074277b9d4088376e8692dc71e55c1  PyX-0.12.1.tar.gz' > PyX-0.12.1.tar.gz.sha512
     echo 'a42f28ed8e49f04cf89e2ea7434c5ecbc264e7188dcb79ab97f745adf664dd9ab57f9a913543731635f90859536244ac37dca9adf0fc2aa1b215ba884839d160  Python-2.7.9.tgz' > Python-2.7.9.tgz.sha512
-    echo '609cc82586fabecb25f25ecb410f2938e01d21cde85dd3f8824fe55c6edde9ecf3b7609195473d3fa05a16b9b121464f5414db1a0187103b78ea6edfa71684a7  Python-3.4.3.tgz' > Python-3.4.3.tgz.sha512
+    echo '73f1477f3d3f5bd978c4ea1d1b679467b45e9fd2f443287b88c5c107a9ced580c56e0e8f33acea84e06b11a252e2a4e733120b721a9b6e1bb3d34493a3353bfb  Python-3.5.1.tgz' > Python-3.5.1.tgz.sha512
     echo '276bd9c061ec9a27d478b33078a86f93164ee2da72210e12e2c9da71dcffeb64767e4460b93f257302b09328eda8655e93c4b9ae85e74472869afbeae35ca71e  blas.tar.gz' > blas.tar.gz.sha512
     echo '00ace5438cfa0c577e5f578d8a808613187eff5217c35164ffe044fbafdfec9e98f4192c02a7d67e01e5a5ccced630583ad1003c37697219b0f147343a3fdd12  bzip2-1.0.6.tar.gz' > bzip2-1.0.6.tar.gz.sha512
     echo '609a68a3675087e0cc95268574f31e104549daa48efe15a25a33b8e269a93b4bd160f4c3e8178dca9c950ef5ca514b039d6fd1b45db6af57f25342464d0429ce  freetype-2.4.12.tar.gz' > freetype-2.4.12.tar.gz.sha512
@@ -776,7 +776,7 @@
     echo '8770214491e31f0a7a3efaade90eee7b0eb20a8a6ab635c5f854d78263f59a1849133c14ef5123d01023f0110cbb9fc6f818da053c01277914ae81473430a952  lapack-3.4.2.tar.gz' > lapack-3.4.2.tar.gz.sha512
     echo '887582e5a22e4cde338aa8fec7a89f6dd31f2f02b8842735f00f970f64582333fa03401cea6d01704083403c7e8b7ebc26655468ce930165673b33efa4bcd586  libpng-1.6.3.tar.gz' > libpng-1.6.3.tar.gz.sha512
     echo '51b0f58b2618b47b653e17e4f6b6a1215d3a3b0f1331ce3555cc7435e365d9c75693f289ce12fe3bf8f69fd57b663e545f0f1c2c94e81eaa661cac0689e125f5  matplotlib-1.4.3.tar.gz' > matplotlib-1.4.3.tar.gz.sha512
-    echo 'a61b0d4cf528136991243bb23ac972c11c50ab5681d09f8b2d12cf7d37d3a9d76262f7fe6e7a1834bf6d03e8dc0ebbd9231da982e049e09830341dabefe5d064  mercurial-3.4.tar.gz' > mercurial-3.4.tar.gz.sha512
+    echo '573b35052ec50f4672cdd5afd93080edc864deb854b890c8cedba53c40d6d3a37a560e364b94b819cfc6d9fc5c07c0411a13167f0888007e5c9366bc94154306  mercurial-3.7.2.tar.gz' > mercurial-3.7.2.tar.gz.sha512
     echo 'd0cede08dc33a8ac0af0f18063e57f31b615f06e911edb5ca264575174d8f4adb4338448968c403811d9dcc60f38ade3164662d6c7b69b499f56f0984bb6283c  nose-1.3.6.tar.gz' > nose-1.3.6.tar.gz.sha512
     echo '70470ebb9afef5dfd0c83ceb7a9d5f1b7a072b1a9b54b04f04f5ed50fbaedd5b4906bd500472268d478f94df9e749a88698b1ff30f2d80258e7f3fec040617d9  numpy-1.9.2.tar.gz' > numpy-1.9.2.tar.gz.sha512
     echo 'bfd10455e74e30df568c4c4827140fb6cc29893b0e062ce1764bd52852ec7487a70a0f5ea53c3fca7886f5d36365c9f4db52b8c93cad35fb67beeb44a2d56f2d  python-hglib-1.6.tar.gz' > python-hglib-1.6.tar.gz.sha512
@@ -950,8 +950,8 @@
             
             ( make ${MAKE_PROCS} 2>&1 ) 1>> ${LOG_FILE} || do_exit
             ( make install 2>&1 ) 1>> ${LOG_FILE} || do_exit
-            ( ln -sf ${DEST_DIR}/bin/python3.4 ${DEST_DIR}/bin/pyyt 2>&1 ) 1>> ${LOG_FILE}
-            ( ln -sf ${DEST_DIR}/bin/python3.4 ${DEST_DIR}/bin/python 2>&1 ) 1>> ${LOG_FILE}
+            ( ln -sf ${DEST_DIR}/bin/python3.5 ${DEST_DIR}/bin/pyyt 2>&1 ) 1>> ${LOG_FILE}
+            ( ln -sf ${DEST_DIR}/bin/python3.5 ${DEST_DIR}/bin/python 2>&1 ) 1>> ${LOG_FILE}
             ( ln -sf ${DEST_DIR}/bin/python3-config ${DEST_DIR}/bin/python-config 2>&1 ) 1>> ${LOG_FILE}
             ( make clean 2>&1) 1>> ${LOG_FILE} || do_exit
             touch done


https://bitbucket.org/yt_analysis/yt/commits/0dddd71ffc94/
Changeset:   0dddd71ffc94
Branch:      yt
User:        ngoldbaum
Date:        2016-03-28 20:15:32+00:00
Summary:     Move all install options to the top of the script
Affected #:  1 file

diff -r 83965d0110340a244a841fb6638a6a238dbd98c7 -r 0dddd71ffc94187d6bf424d9cee713387a5f84cb doc/install_script.sh
--- a/doc/install_script.sh
+++ b/doc/install_script.sh
@@ -30,30 +30,6 @@
 # INST_CONDA=0
 # INST_YT_SOURCE=1
 
-if [ ${REINST_YT} ] && [ ${REINST_YT} -eq 1 ] && [ -n ${YT_DEST} ]
-then
-    DEST_DIR=${YT_DEST}
-    INST_CONDA=0
-fi
-
-if [ $INST_CONDA -ne 0 ]
-then
-    DEST_SUFFIX="yt-conda"
-else
-    if [ $INST_YT_SOURCE -eq 0 ]
-    then
-        echo "yt must be compiled from source if INST_CONDA is not set"
-        echo "Please set INST_YT_SOURCE to 1 and re-run."
-        exit 1
-    fi
-    DEST_SUFFIX="yt-`uname -m`"
-fi
-
-if [ ! -z "${DEST_DIR}" ]
-then
-    DEST_DIR="`pwd`/${DEST_SUFFIX/ /}"   # Installation location
-fi
-
 BRANCH="yt" # This is the branch to which we will forcibly update.
 
 # What follows are some other options that you may or may not need to change.
@@ -123,7 +99,30 @@
 
 MINICONDA_URLBASE="http://repo.continuum.io/miniconda"
 MINICONDA_VERSION="latest"
-YT_RECIPE_REPO="https://bitbucket.org/yt_analysis/yt_conda/raw/default"
+
+if [ ${REINST_YT} ] && [ ${REINST_YT} -eq 1 ] && [ -n ${YT_DEST} ]
+then
+    DEST_DIR=${YT_DEST}
+    INST_CONDA=0
+fi
+
+if [ $INST_CONDA -ne 0 ]
+then
+    DEST_SUFFIX="yt-conda"
+else
+    if [ $INST_YT_SOURCE -eq 0 ]
+    then
+        echo "yt must be compiled from source if INST_CONDA is not set"
+        echo "Please set INST_YT_SOURCE to 1 and re-run."
+        exit 1
+    fi
+    DEST_SUFFIX="yt-`uname -m`"
+fi
+
+if [ ! -z "${DEST_DIR}" ]
+then
+    DEST_DIR="`pwd`/${DEST_SUFFIX/ /}"   # Installation location
+fi
 
 # Make sure we are NOT being run as root
 if [[ $EUID -eq 0 ]]


https://bitbucket.org/yt_analysis/yt/commits/cc02f32e370c/
Changeset:   cc02f32e370c
Branch:      yt
User:        ngoldbaum
Date:        2016-03-28 21:35:51+00:00
Summary:     Clarify note printed when INST_CONDA=0 and INST_YT_SOURCE=0
Affected #:  1 file

diff -r 0dddd71ffc94187d6bf424d9cee713387a5f84cb -r cc02f32e370c08a4594bf97bd024e59741afa91b doc/install_script.sh
--- a/doc/install_script.sh
+++ b/doc/install_script.sh
@@ -112,7 +112,7 @@
 else
     if [ $INST_YT_SOURCE -eq 0 ]
     then
-        echo "yt must be compiled from source if INST_CONDA is not set"
+        echo "yt must be compiled from source if INST_CONDA is set to 0"
         echo "Please set INST_YT_SOURCE to 1 and re-run."
         exit 1
     fi


https://bitbucket.org/yt_analysis/yt/commits/e35fda3a655f/
Changeset:   e35fda3a655f
Branch:      yt
User:        ngoldbaum
Date:        2016-03-28 21:37:35+00:00
Summary:     Fix broken logic that sets DEST_DIR
Affected #:  1 file

diff -r cc02f32e370c08a4594bf97bd024e59741afa91b -r e35fda3a655fd7ce126debf907f67d0f01780f38 doc/install_script.sh
--- a/doc/install_script.sh
+++ b/doc/install_script.sh
@@ -119,7 +119,7 @@
     DEST_SUFFIX="yt-`uname -m`"
 fi
 
-if [ ! -z "${DEST_DIR}" ]
+if [ -z "${DEST_DIR}" ]
 then
     DEST_DIR="`pwd`/${DEST_SUFFIX/ /}"   # Installation location
 fi


https://bitbucket.org/yt_analysis/yt/commits/34539dd6c1a8/
Changeset:   34539dd6c1a8
Branch:      yt
User:        ngoldbaum
Date:        2016-03-28 21:38:04+00:00
Summary:     Only print warnings about openSSL on OSX if INST_CONDA=0
Affected #:  1 file

diff -r e35fda3a655fd7ce126debf907f67d0f01780f38 -r 34539dd6c1a885a0ec2778320fb2b014b690fa83 doc/install_script.sh
--- a/doc/install_script.sh
+++ b/doc/install_script.sh
@@ -332,13 +332,18 @@
         echo "with the following command:"
         echo "    xcode-select --install"
         echo
-        echo "For OS X 10.11, you will additionally need to install the OpenSSL"
-        echo "library using a package manager like homebrew or macports."
-        echo "If you install fails with a message like"
-        echo "    ImportError: cannot import HTTPSHandler"
-        echo "then you do not have the OpenSSL headers available in a location"
-        echo "visible to your C compiler. Consider installing yt using the"
-        echo "get_yt.sh script instead, as that bundles OpenSSL."
+        if [ $INST_CONDA -eq 0 ]
+        then
+            echo "For OS X 10.11, you will additionally need to install the"
+            echo "OpenSSL library using a package manager like homebrew or"
+            echo "macports."
+            echo
+            echo "If your install fails with a message like"
+            echo "    ImportError: cannot import HTTPSHandler"
+            echo "then you do not have the OpenSSL headers available in a"
+            echo "location visible to your C compiler. Consider setting"
+            echo "INST_CONDA=1 instead, as conda's python bundles OpenSSL."
+        fi
         OSX_VERSION=`sw_vers -productVersion`
         if [ "${OSX_VERSION##10.8}" != "${OSX_VERSION}" ]
         then


https://bitbucket.org/yt_analysis/yt/commits/69dca9bffe7b/
Changeset:   69dca9bffe7b
Branch:      yt
User:        ngoldbaum
Date:        2016-03-28 21:38:27+00:00
Summary:     Shorten message about installing embree to fit 80 character terminal
Affected #:  1 file

diff -r 34539dd6c1a885a0ec2778320fb2b014b690fa83 -r 69dca9bffe7b227328ba665c7332d6babc21c531 doc/install_script.sh
--- a/doc/install_script.sh
+++ b/doc/install_script.sh
@@ -506,7 +506,7 @@
 
 printf "%-18s = %s so I " "INST_UNSTRUCTURED" "${INST_UNSTRUCTURED}"
 get_willwont ${INST_HG}
-echo "be installing support for unstructured mesh rendering"
+echo "be installing unstructured mesh rendering"
 
 if [ $INST_CONDA -eq 0 ]
 then


https://bitbucket.org/yt_analysis/yt/commits/67f325b2325e/
Changeset:   67f325b2325e
Branch:      yt
User:        ngoldbaum
Date:        2016-03-28 21:38:50+00:00
Summary:     Add comment to else block to clarify where INST_CONDA=1 section starts
Affected #:  1 file

diff -r 69dca9bffe7b227328ba665c7332d6babc21c531 -r 67f325b2325e9f6a20d19457cabf0701e002302c doc/install_script.sh
--- a/doc/install_script.sh
+++ b/doc/install_script.sh
@@ -1217,7 +1217,7 @@
 
     echo "yt dependencies were last updated on" > ${DEST_DIR}/.yt_update
     date >> ${DEST_DIR}/.yt_update
-else
+else # INST_CONDA -eq 1
     MYARCH=`uname -m`       # A guess at the OS
     MYOS=`uname -s`
 


https://bitbucket.org/yt_analysis/yt/commits/60d11602f55b/
Changeset:   60d11602f55b
Branch:      yt
User:        ngoldbaum
Date:        2016-03-28 21:39:10+00:00
Summary:     Add logic to download miniconda2/3 if INST_PY3 is 0/1
Affected #:  1 file

diff -r 67f325b2325e9f6a20d19457cabf0701e002302c -r 60d11602f55b3faf6e52694572749118a6ff7d48 doc/install_script.sh
--- a/doc/install_script.sh
+++ b/doc/install_script.sh
@@ -1244,7 +1244,14 @@
         fi
     fi
 
-    MINICONDA_PKG="Miniconda-${MINICONDA_VERSION}-${MINICONDA_OS}-${MINICONDA_ARCH}.sh"
+    if [ $INST_PY3 -eq 1 ]
+    then
+        PY_VERSION='3'
+    else
+        PY_VERSION='2'
+    fi
+
+    MINICONDA_PKG="Miniconda${PY_VERSION}-${MINICONDA_VERSION}-${MINICONDA_OS}-${MINICONDA_ARCH}.sh"
 
     echo
     echo "Downloading ${MINICONDA_URLBASE}/${MINICONDA_PKG}"


https://bitbucket.org/yt_analysis/yt/commits/03c511a3f239/
Changeset:   03c511a3f239
Branch:      yt
User:        ngoldbaum
Date:        2016-03-28 21:39:48+00:00
Summary:     Rearange logic for running miniconda install script, suppress output to terminal
Affected #:  1 file

diff -r 60d11602f55b3faf6e52694572749118a6ff7d48 -r 03c511a3f2390b80d39ebf3b1b3d88f1307eac6d doc/install_script.sh
--- a/doc/install_script.sh
+++ b/doc/install_script.sh
@@ -1262,16 +1262,18 @@
         rm $MINICONDA_PKG
     fi
 
-    ${GETFILE} ${MINICONDA_URLBASE}/${MINICONDA_PKG} || do_exit
-
     echo "Installing the Miniconda python environment."
 
     if [ -e ${DEST_DIR} ]
     then
-       rm -r $DEST_DIR
+        rm -rf $DEST_DIR/*
+    else
+        mkdir $DEST_DIR
     fi
 
-    bash ./${MINICONDA_PKG} -b -p $DEST_DIR
+    log_cmd ${GETFILE} ${MINICONDA_URLBASE}/${MINICONDA_PKG} || do_exit
+
+    log_cmd bash ./${MINICONDA_PKG} -b -p $DEST_DIR -f
 
     # Need to set PATH so we use miniconda's python environment
     export PATH=${DEST_DIR}/bin:$PATH


https://bitbucket.org/yt_analysis/yt/commits/531d9232a0dd/
Changeset:   531d9232a0dd
Branch:      yt
User:        ngoldbaum
Date:        2016-03-28 21:40:47+00:00
Summary:     Install mercurial in a separate py2 environment for py3 conda installs
Affected #:  1 file

diff -r 03c511a3f2390b80d39ebf3b1b3d88f1307eac6d -r 531d9232a0dd8c9ec7995c88c6a7c1f1be56b8c5 doc/install_script.sh
--- a/doc/install_script.sh
+++ b/doc/install_script.sh
@@ -1312,7 +1312,10 @@
         exit 1
     fi
     YT_DEPS+=('conda-build')
-    YT_DEPS+=('mercurial')
+    if [ $INST_PY3 -eq 0 ]
+    then
+       YT_DEPS+=('mercurial')
+    fi
     YT_DEPS+=('sympy')
 
     if [ $INST_UNSTRUCTURED -eq 1 ]
@@ -1355,6 +1358,12 @@
         popd
     fi
 
+    if [ $INST_PY3 -eq 1 ]
+    then
+        log_cmd conda create -y -n py27 python=2.7 mercurial
+        log_cmd ln -s ${DEST_DIR}/envs/py27/bin/hg ${DEST_DIR}/bin
+    fi
+
     if [ $INST_YT_SOURCE -eq 0 ]
     then
         echo "Installing yt"


https://bitbucket.org/yt_analysis/yt/commits/17362d161a30/
Changeset:   17362d161a30
Branch:      yt
User:        ngoldbaum
Date:        2016-03-28 21:41:19+00:00
Summary:     Wrap pushd/popd calls in log_cmd
Affected #:  1 file

diff -r 531d9232a0dd8c9ec7995c88c6a7c1f1be56b8c5 -r 17362d161a30ecb9211c6140eecd2367c805b89d doc/install_script.sh
--- a/doc/install_script.sh
+++ b/doc/install_script.sh
@@ -1353,7 +1353,7 @@
         echo "Installing pyembree from source"
         ( ${GETFILE} "$PYEMBREE_URL" 2>&1 ) 1>> ${LOG_FILE} || do_exit
         log_cmd unzip ${DEST_DIR}/src/master.zip
-        pushd ${DEST_DIR}/src/pyembree-master
+        log_cmd pushd ${DEST_DIR}/src/pyembree-master
         log_cmd python setup.py install build_ext -I${DEST_DIR}/include -L${DEST_DIR}/lib
         popd
     fi
@@ -1377,9 +1377,9 @@
         then
             echo $DEST_DIR > ${YT_DIR}/embree.cfg
         fi
-        pushd ${YT_DIR}
+        log_cmd pushd ${YT_DIR}
         log_cmd python setup.py develop
-        popd
+        log_cmd popd
     fi
 
     echo


https://bitbucket.org/yt_analysis/yt/commits/25b77f284f8b/
Changeset:   25b77f284f8b
Branch:      yt
User:        ngoldbaum
Date:        2016-03-28 21:41:41+00:00
Summary:     Move notes about setting PATH to bottom of message printed at end of script

This ensures that this important information does not scroll off of a short
terminal
Affected #:  1 file

diff -r 17362d161a30ecb9211c6140eecd2367c805b89d -r 25b77f284f8b48cf271c48aae3ba8b34d100489e doc/install_script.sh
--- a/doc/install_script.sh
+++ b/doc/install_script.sh
@@ -1369,7 +1369,6 @@
         echo "Installing yt"
         log_cmd conda install --yes yt
     else
-        # We do a source install.
         echo "Installing yt from source"
         YT_DIR="${DEST_DIR}/src/yt-hg"
         log_cmd hg clone -r ${BRANCH} https://bitbucket.org/yt_analysis/yt ${YT_DIR}
@@ -1388,6 +1387,18 @@
     echo
     echo "yt and the Conda system are now installed in $DEST_DIR"
     echo
+    echo "To get started with yt, check out the orientation:"
+    echo
+    echo "    http://yt-project.org/doc/orientation/"
+    echo
+    echo "For support, see the website and join the mailing list:"
+    echo
+    echo "    http://yt-project.org/"
+    echo "    http://yt-project.org/data/      (Sample data)"
+    echo "    http://yt-project.org/doc/       (Docs)"
+    echo
+    echo "    http://lists.spacepope.org/listinfo.cgi/yt-users-spacepope.org"
+    echo
     echo "You must now prepend the following folder to your PATH environment variable:"
     echo 
     echo "    $DEST_DIR/bin"
@@ -1405,18 +1416,6 @@
     echo "(e.g. .bashrc, .bash_profile, .cshrc, or .zshrc) to include"
     echo "the same command."
     echo
-    echo "To get started with yt, check out the orientation:"
-    echo
-    echo "    http://yt-project.org/doc/orientation/"
-    echo
-    echo "For support, see the website and join the mailing list:"
-    echo
-    echo "    http://yt-project.org/"
-    echo "    http://yt-project.org/data/      (Sample data)"
-    echo "    http://yt-project.org/doc/       (Docs)"
-    echo
-    echo "    http://lists.spacepope.org/listinfo.cgi/yt-users-spacepope.org"
-    echo
     echo "========================================================================"
     echo
     echo "Oh, look at me, still talking when there's science to do!"


https://bitbucket.org/yt_analysis/yt/commits/dfda0ce6531c/
Changeset:   dfda0ce6531c
Branch:      yt
User:        ngoldbaum
Date:        2016-03-31 23:39:53+00:00
Summary:     Merging, clearing conflicts
Affected #:  280 files

diff -r 25b77f284f8b48cf271c48aae3ba8b34d100489e -r dfda0ce6531cadeee95b16b820c3a6203f9eaf42 CONTRIBUTING.rst
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -795,8 +795,8 @@
    rather than explicitly. Ex: ``super(SpecialGridSubclass, self).__init__()``
    rather than ``SpecialGrid.__init__()``.
  * Docstrings should describe input, output, behavior, and any state changes
-   that occur on an object.  See the file ``doc/docstring_example.txt`` for a
-   fiducial example of a docstring.
+   that occur on an object.  See :ref:`docstrings` below for a fiducial example
+   of a docstring.
  * Use only one top-level import per line. Unless there is a good reason not to,
    imports should happen at the top of the file, after the copyright blurb.
  * Never compare with ``True`` or ``False`` using ``==`` or ``!=``, always use
@@ -843,7 +843,7 @@
    be avoided, they must be explained, even if they are only to be passed on to
    a nested function.
 
-.. _docstrings
+.. _docstrings:
 
 Docstrings
 ----------

diff -r 25b77f284f8b48cf271c48aae3ba8b34d100489e -r dfda0ce6531cadeee95b16b820c3a6203f9eaf42 doc/cheatsheet.tex
--- a/doc/cheatsheet.tex
+++ b/doc/cheatsheet.tex
@@ -7,12 +7,12 @@
 
 % To make this come out properly in landscape mode, do one of the following
 % 1.
-%  pdflatex latexsheet.tex
+%  pdflatex cheatsheet.tex
 %
 % 2.
-%  latex latexsheet.tex
-%  dvips -P pdf  -t landscape latexsheet.dvi
-%  ps2pdf latexsheet.ps
+%  latex cheatsheet.tex
+%  dvips -P pdf  -t landscape cheatsheet.dvi
+%  ps2pdf cheatsheet.ps
 
 
 % If you're reading this, be prepared for confusion.  Making this was
@@ -45,7 +45,7 @@
 
 % Turn off header and footer
 \pagestyle{empty}
- 
+
 
 % Redefine section commands to use less space
 \makeatletter
@@ -117,26 +117,26 @@
 including a list of the available flags.
 
 \texttt{iyt}\textemdash\ Load yt and IPython. \\
-\texttt{yt load} {\it dataset}   \textemdash\ Load a single dataset.  \\
+\texttt{yt load} \textit{dataset}   \textemdash\ Load a single dataset.  \\
 \texttt{yt help} \textemdash\ Print yt help information. \\
-\texttt{yt stats} {\it dataset} \textemdash\ Print stats of a dataset. \\
+\texttt{yt stats} \textit{dataset} \textemdash\ Print stats of a dataset. \\
 \texttt{yt update} \textemdash\ Update yt to most recent version.\\
 \texttt{yt update --all} \textemdash\ Update yt and dependencies to most recent version. \\
 \texttt{yt version} \textemdash\ yt installation information. \\
 \texttt{yt notebook} \textemdash\ Run the IPython notebook server. \\
-\texttt{yt upload\_image} {\it image.png} \textemdash\ Upload PNG image to imgur.com. \\
-\texttt{yt upload\_notebook} {\it notebook.nb} \textemdash\ Upload IPython notebook to hub.yt-project.org.\\
-\texttt{yt plot} {\it dataset} \textemdash\ Create a set of images.\\
-\texttt{yt render} {\it dataset} \textemdash\ Create a simple
+\texttt{yt upload\_image} \textit{image.png} \textemdash\ Upload PNG image to imgur.com. \\
+\texttt{yt upload\_notebook} \textit{notebook.nb} \textemdash\ Upload IPython notebook to hub.yt-project.org.\\
+\texttt{yt plot} \textit{dataset} \textemdash\ Create a set of images.\\
+\texttt{yt render} \textit{dataset} \textemdash\ Create a simple
  volume rendering. \\
-\texttt{yt mapserver} {\it dataset} \textemdash\ View a plot/projection in a Gmaps-like
+\texttt{yt mapserver} \textit{dataset} \textemdash\ View a plot/projection in a Gmaps-like
  interface. \\
-\texttt{yt pastebin} {\it text.out} \textemdash\ Post text to the pastebin at
- paste.yt-project.org. \\ 
-\texttt{yt pastebin\_grab} {\it identifier} \textemdash\ Print content of pastebin to
+\texttt{yt pastebin} \textit{text.out} \textemdash\ Post text to the pastebin at
+ paste.yt-project.org. \\
+\texttt{yt pastebin\_grab} \textit{identifier} \textemdash\ Print content of pastebin to
  STDOUT. \\
 \texttt{yt bugreport} \textemdash\ Report a yt bug. \\
-\texttt{yt hop} {\it dataset} \textemdash\  Run hop on a dataset. \\
+\texttt{yt hop} \textit{dataset} \textemdash\  Run hop on a dataset. \\
 
 \subsection{yt Imports}
 In order to use yt, Python must load the relevant yt modules into memory.
@@ -144,15 +144,15 @@
 used as part of a script.
 \newlength{\MyLen}
 \settowidth{\MyLen}{\texttt{letterpaper}/\texttt{a4paper} \ }
-\texttt{import yt}  \textemdash\ 
+\texttt{import yt}  \textemdash\
 Load yt. \\
-\texttt{from yt.config import ytcfg}  \textemdash\ 
+\texttt{from yt.config import ytcfg}  \textemdash\
 Used to set yt configuration options.
 If used, must be called before importing any other module.\\
-\texttt{from yt.analysis\_modules.\emph{halo\_finding}.api import \textasteriskcentered}  \textemdash\ 
+\texttt{from yt.analysis\_modules.\emph{halo\_finding}.api import \textasteriskcentered}  \textemdash\
 Load halo finding modules. Other modules
-are loaded in a similar way by swapping the 
-{\em emphasized} text.
+are loaded in a similar way by swapping the
+\emph{emphasized} text.
 See the \textbf{Analysis Modules} section for a listing and short descriptions of each.
 
 \subsection{YTArray}
@@ -163,32 +163,32 @@
 very brief list of some useful ones.
 \settowidth{\MyLen}{\texttt{multicol} }\\
 \texttt{v = a.in\_cgs()} \textemdash\ Return the array in CGS units \\
-\texttt{v = a.in\_units('Msun/pc**3')} \textemdash\ Return the array in solar masses per cubic parsec \\ 
+\texttt{v = a.in\_units('Msun/pc**3')} \textemdash\ Return the array in solar masses per cubic parsec \\
 \texttt{v = a.max(), a.min()} \textemdash\ Return maximum, minimum of \texttt{a}. \\
 \texttt{index = a.argmax(), a.argmin()} \textemdash\ Return index of max,
 min value of \texttt{a}.\\
-\texttt{v = a[}{\it index}\texttt{]} \textemdash\ Select a single value from \texttt{a} at location {\it index}.\\
-\texttt{b = a[}{\it i:j}\texttt{]} \textemdash\ Select the slice of values from
+\texttt{v = a[}\textit{index}\texttt{]} \textemdash\ Select a single value from \texttt{a} at location \textit{index}.\\
+\texttt{b = a[}\textit{i:j}\texttt{]} \textemdash\ Select the slice of values from
 \texttt{a} between
-locations {\it i} to {\it j-1} saved to a new Numpy array \texttt{b} with length {\it j-i}. \\
+locations \textit{i} to \textit{j-1} saved to a new Numpy array \texttt{b} with length \textit{j-i}. \\
 \texttt{sel = (a > const)} \textemdash\ Create a new boolean Numpy array
 \texttt{sel}, of the same shape as \texttt{a},
 that marks which values of \texttt{a > const}. Other operators (e.g. \textless, !=, \%) work as well.\\
 \texttt{b = a[sel]} \textemdash\ Create a new Numpy array \texttt{b} made up of
 elements from \texttt{a} that correspond to elements of \texttt{sel}
-that are {\it True}. In the above example \texttt{b} would be all elements of \texttt{a} that are greater than \texttt{const}.\\
-\texttt{a.write\_hdf5({\it filename.h5})} \textemdash\ Save \texttt{a} to the hdf5 file {\it filename.h5}.\\
+that are \textit{True}. In the above example \texttt{b} would be all elements of \texttt{a} that are greater than \texttt{const}.\\
+\texttt{a.write\_hdf5(\textit{filename.h5})} \textemdash\ Save \texttt{a} to the hdf5 file \textit{filename.h5}.\\
 
 \subsection{IPython Tips}
 \settowidth{\MyLen}{\texttt{multicol} }
 These tips work if IPython has been loaded, typically either by invoking
 \texttt{iyt} or \texttt{yt load} on the command line, or using the IPython notebook (\texttt{yt notebook}).
 \texttt{Tab complete} \textemdash\ IPython will attempt to auto-complete a
-variable or function name when the \texttt{Tab} key is pressed, e.g. {\it HaloFi}\textendash\texttt{Tab} would auto-complete
-to {\it HaloFinder}. This also works with imports, e.g. {\it from numpy.random.}\textendash\texttt{Tab}
+variable or function name when the \texttt{Tab} key is pressed, e.g. \textit{HaloFi}\textendash\texttt{Tab} would auto-complete
+to \textit{HaloFinder}. This also works with imports, e.g. \textit{from numpy.random.}\textendash\texttt{Tab}
 would give you a list of random functions (note the trailing period before hitting \texttt{Tab}).\\
 \texttt{?, ??} \textemdash\ Appending one or two question marks at the end of any object gives you
-detailed information about it, e.g. {\it variable\_name}?.\\
+detailed information about it, e.g. \textit{variable\_name}?.\\
 Below a few IPython ``magics'' are listed, which are IPython-specific shortcut commands.\\
 \texttt{\%paste} \textemdash\ Paste content from the system clipboard into the IPython shell.\\
 \texttt{\%hist} \textemdash\ Print recent command history.\\
@@ -204,40 +204,40 @@
 
 \subsection{Load and Access Data}
 The first step in using yt is to reference a simulation snapshot.
-After that, simulation data is generally accessed in yt using {\it Data Containers} which are Python objects
+After that, simulation data is generally accessed in yt using \textit{Data Containers} which are Python objects
 that define a region of simulation space from which data should be selected.
 \settowidth{\MyLen}{\texttt{multicol} }
-\texttt{ds = yt.load(}{\it dataset}\texttt{)} \textemdash\   Reference a single snapshot.\\
+\texttt{ds = yt.load(}\textit{dataset}\texttt{)} \textemdash\   Reference a single snapshot.\\
 \texttt{dd = ds.all\_data()} \textemdash\ Select the entire volume.\\
-\texttt{a = dd[}{\it field\_name}\texttt{]} \textemdash\ Copies the contents of {\it field} into the
+\texttt{a = dd[}\textit{field\_name}\texttt{]} \textemdash\ Copies the contents of \textit{field} into the
 YTArray \texttt{a}. Similarly for other data containers.\\
 \texttt{ds.field\_list} \textemdash\ A list of available fields in the snapshot. \\
 \texttt{ds.derived\_field\_list} \textemdash\ A list of available derived fields
 in the snapshot. \\
 \texttt{val, loc = ds.find\_max("Density")} \textemdash\ Find the \texttt{val}ue of
 the maximum of the field \texttt{Density} and its \texttt{loc}ation. \\
-\texttt{sp = ds.sphere(}{\it cen}\texttt{,}{\it radius}\texttt{)} \textemdash\   Create a spherical data 
-container. {\it cen} may be a coordinate, or ``max'' which 
-centers on the max density point. {\it radius} may be a float in 
-code units or a tuple of ({\it length, unit}).\\
+\texttt{sp = ds.sphere(}\textit{cen}\texttt{,}\textit{radius}\texttt{)} \textemdash\   Create a spherical data
+container. \textit{cen} may be a coordinate, or ``max'' which
+centers on the max density point. \textit{radius} may be a float in
+code units or a tuple of (\textit{length, unit}).\\
 
-\texttt{re = ds.region({\it cen}, {\it left edge}, {\it right edge})} \textemdash\ Create a
-rectilinear data container. {\it cen} is required but not used.
-{\it left} and {\it right edge} are coordinate values that define the region.
+\texttt{re = ds.region(\textit{cen}, \textit{left edge}, \textit{right edge})} \textemdash\ Create a
+rectilinear data container. \textit{cen} is required but not used.
+\textit{left} and \textit{right edge} are coordinate values that define the region.
 
-\texttt{di = ds.disk({\it cen}, {\it normal}, {\it radius}, {\it height})} \textemdash\ 
-Create a cylindrical data container centered at {\it cen} along the 
-direction set by {\it normal},with total length
- 2$\times${\it height} and with radius {\it radius}. \\
- 
-\texttt{ds.save\_object(sp, {\it ``sp\_for\_later''})} \textemdash\ Save an object (\texttt{sp}) for later use.\\
-\texttt{sp = ds.load\_object({\it ``sp\_for\_later''})} \textemdash\ Recover a saved object.\\
+\texttt{di = ds.disk(\textit{cen}, \textit{normal}, \textit{radius}, \textit{height})} \textemdash\
+Create a cylindrical data container centered at \textit{cen} along the
+direction set by \textit{normal},with total length
+ 2$\times$\textit{height} and with radius \textit{radius}. \\
+
+\texttt{ds.save\_object(sp, \textit{``sp\_for\_later''})} \textemdash\ Save an object (\texttt{sp}) for later use.\\
+\texttt{sp = ds.load\_object(\textit{``sp\_for\_later''})} \textemdash\ Recover a saved object.\\
 
 
 \subsection{Defining New Fields}
-\texttt{yt} expects on-disk fields, fields generated on-demand and in-memory. 
+\texttt{yt} expects on-disk fields, fields generated on-demand and in-memory.
 Field can either be created before a dataset is loaded using \texttt{add\_field}:
-\texttt{def \_metal\_mass({\it field},{\it data})}\\
+\texttt{def \_metal\_mass(\textit{field},\textit{data})}\\
 \texttt{\hspace{4 mm} return data["metallicity"]*data["cell\_mass"]}\\
 \texttt{add\_field("metal\_mass", units='g', function=\_metal\_mass)}\\
 Or added to an existing dataset using \texttt{ds.add\_field}:
@@ -245,34 +245,34 @@
 
 \subsection{Slices and Projections}
 \settowidth{\MyLen}{\texttt{multicol} }
-\texttt{slc = yt.SlicePlot(ds, {\it axis or normal vector}, {\it field}, {\it center=}, {\it width=}, {\it weight\_field=}, {\it additional parameters})} \textemdash\ Make a slice plot
-perpendicular to {\it axis} (specified via 'x', 'y', or 'z') or a normal vector for an off-axis slice of {\it field} weighted by {\it weight\_field} at (code-units) {\it center} with 
-{\it width} in code units or a (value, unit) tuple. Hint: try {\it yt.SlicePlot?} in IPython to see additional parameters.\\
-\texttt{slc.save({\it file\_prefix})} \textemdash\ Save the slice to a png with name prefix {\it file\_prefix}.
+\texttt{slc = yt.SlicePlot(ds, \textit{axis or normal vector}, \textit{field}, \textit{center=}, \textit{width=}, \textit{weight\_field=}, \textit{additional parameters})} \textemdash\ Make a slice plot
+perpendicular to \textit{axis} (specified via 'x', 'y', or 'z') or a normal vector for an off-axis slice of \textit{field} weighted by \textit{weight\_field} at (code-units) \textit{center} with
+\textit{width} in code units or a (value, unit) tuple. Hint: try \textit{yt.SlicePlot?} in IPython to see additional parameters.\\
+\texttt{slc.save(\textit{file\_prefix})} \textemdash\ Save the slice to a png with name prefix \textit{file\_prefix}.
 \texttt{.save()} works similarly for the commands below.\\
 
-\texttt{prj = yt.ProjectionPlot(ds, {\it axis}, {\it field}, {\it addit. params})} \textemdash\ Make a projection. \\
-\texttt{prj = yt.OffAxisProjectionPlot(ds, {\it normal}, {\it fields}, {\it center=}, {\it width=}, {\it depth=},{\it north\_vector=},{\it weight\_field=})} \textemdash Make an off axis projection. Note this takes an array of fields. \\
+\texttt{prj = yt.ProjectionPlot(ds, \textit{axis}, \textit{field}, \textit{addit. params})} \textemdash\ Make a projection. \\
+\texttt{prj = yt.OffAxisProjectionPlot(ds, \textit{normal}, \textit{fields}, \textit{center=}, \textit{width=}, \textit{depth=},\textit{north\_vector=},\textit{weight\_field=})} \textemdash Make an off axis projection. Note this takes an array of fields. \\
 
 \subsection{Plot Annotations}
 \settowidth{\MyLen}{\texttt{multicol} }
-Plot callbacks are functions itemized in a registry that is attached to every plot object. They can be accessed and then called like \texttt{ prj.annotate\_velocity(factor=16, normalize=False)}. Most callbacks also accept a {\it plot\_args} dict that is fed to matplotlib annotator. \\
-\texttt{velocity({\it factor=},{\it scale=},{\it scale\_units=}, {\it normalize=})} \textemdash\ Uses field "x-velocity" to draw quivers\\
-\texttt{magnetic\_field({\it factor=},{\it scale=},{\it scale\_units=}, {\it normalize=})} \textemdash\ Uses field "Bx" to draw quivers\\
-\texttt{quiver({\it field\_x},{\it field\_y},{\it factor=},{\it scale=},{\it scale\_units=}, {\it normalize=})} \\
-\texttt{contour({\it field=},{\it ncont=},{\it factor=},{\it clim=},{\it take\_log=}, {\it additional parameters})} \textemdash Plots a number of contours {\it ncont} to interpolate {\it field} optionally using {\it take\_log}, upper and lower {\it c}ontour{\it lim}its and {\it factor} number of points in the interpolation.\\
-\texttt{grids({\it alpha=}, {\it draw\_ids=}, {\it periodic=}, {\it min\_level=}, {\it max\_level=})} \textemdash Add grid boundaries. \\
-\texttt{streamlines({\it field\_x},{\it field\_y},{\it factor=},{\it density=})}\\
-\texttt{clumps({\it clumplist})} \textemdash\ Generate {\it clumplist} using the clump finder and plot. \\
-\texttt{arrow({\it pos}, {\it code\_size})} Add an arrow at a {\it pos}ition. \\
-\texttt{point({\it pos}, {\it text})} \textemdash\ Add text at a {\it pos}ition. \\
-\texttt{marker({\it pos}, {\it marker=})} \textemdash\ Add a matplotlib-defined marker at a {\it pos}ition. \\
-\texttt{sphere({\it center}, {\it radius}, {\it text=})} \textemdash\ Draw a circle and append {\it text}.\\
-\texttt{hop\_circles({\it hop\_output}, {\it max\_number=}, {\it annotate=}, {\it min\_size=}, {\it max\_size=}, {\it font\_size=}, {\it print\_halo\_size=}, {\it fixed\_radius=}, {\it min\_mass=}, {\it print\_halo\_mass=}, {\it width=})} \textemdash\ Draw a halo, printing it's ID, mass, clipping halos depending on number of particles ({\it size}) and optionally fixing the drawn circle radius to be constant for all halos.\\
-\texttt{hop\_particles({\it hop\_output},{\it max\_number=},{\it p\_size=},\\
-{\it min\_size},{\it alpha=})} \textemdash\ Draw particle positions for member halos with a certain number of pixels per particle.\\
-\texttt{particles({\it width},{\it p\_size=},{\it col=}, {\it marker=}, {\it stride=}, {\it ptype=}, {\it stars\_only=}, {\it dm\_only=}, {\it minimum\_mass=}, {\it alpha=})}  \textemdash\  Draw particles of {\it p\_size} pixels in a slab of {\it width} with {\it col}or using a matplotlib {\it marker} plotting only every {\it stride} number of particles.\\
-\texttt{title({\it text})}\\
+Plot callbacks are functions itemized in a registry that is attached to every plot object. They can be accessed and then called like \texttt{ prj.annotate\_velocity(factor=16, normalize=False)}. Most callbacks also accept a \textit{plot\_args} dict that is fed to matplotlib annotator. \\
+\texttt{velocity(\textit{factor=},\textit{scale=},\textit{scale\_units=}, \textit{normalize=})} \textemdash\ Uses field "x-velocity" to draw quivers\\
+\texttt{magnetic\_field(\textit{factor=},\textit{scale=},\textit{scale\_units=}, \textit{normalize=})} \textemdash\ Uses field "Bx" to draw quivers\\
+\texttt{quiver(\textit{field\_x},\textit{field\_y},\textit{factor=},\textit{scale=},\textit{scale\_units=}, \textit{normalize=})} \\
+\texttt{contour(\textit{field=},\textit{ncont=},\textit{factor=},\textit{clim=},\textit{take\_log=}, \textit{additional parameters})} \textemdash Plots a number of contours \textit{ncont} to interpolate \textit{field} optionally using \textit{take\_log}, upper and lower \textit{c}ontour\textit{lim}its and \textit{factor} number of points in the interpolation.\\
+\texttt{grids(\textit{alpha=}, \textit{draw\_ids=}, \textit{periodic=}, \textit{min\_level=}, \textit{max\_level=})} \textemdash Add grid boundaries. \\
+\texttt{streamlines(\textit{field\_x},\textit{field\_y},\textit{factor=},\textit{density=})}\\
+\texttt{clumps(\textit{clumplist})} \textemdash\ Generate \textit{clumplist} using the clump finder and plot. \\
+\texttt{arrow(\textit{pos}, \textit{code\_size})} Add an arrow at a \textit{pos}ition. \\
+\texttt{point(\textit{pos}, \textit{text})} \textemdash\ Add text at a \textit{pos}ition. \\
+\texttt{marker(\textit{pos}, \textit{marker=})} \textemdash\ Add a matplotlib-defined marker at a \textit{pos}ition. \\
+\texttt{sphere(\textit{center}, \textit{radius}, \textit{text=})} \textemdash\ Draw a circle and append \textit{text}.\\
+\texttt{hop\_circles(\textit{hop\_output}, \textit{max\_number=}, \textit{annotate=}, \textit{min\_size=}, \textit{max\_size=}, \textit{font\_size=}, \textit{print\_halo\_size=}, \textit{fixed\_radius=}, \textit{min\_mass=}, \textit{print\_halo\_mass=}, \textit{width=})} \textemdash\ Draw a halo, printing it's ID, mass, clipping halos depending on number of particles (\textit{size}) and optionally fixing the drawn circle radius to be constant for all halos.\\
+\texttt{hop\_particles(\textit{hop\_output},\textit{max\_number=},\textit{p\_size=},\\
+\textit{min\_size},\textit{alpha=})} \textemdash\ Draw particle positions for member halos with a certain number of pixels per particle.\\
+\texttt{particles(\textit{width},\textit{p\_size=},\textit{col=}, \textit{marker=}, \textit{stride=}, \textit{ptype=}, \textit{stars\_only=}, \textit{dm\_only=}, \textit{minimum\_mass=}, \textit{alpha=})}  \textemdash\  Draw particles of \textit{p\_size} pixels in a slab of \textit{width} with \textit{col}or using a matplotlib \textit{marker} plotting only every \textit{stride} number of particles.\\
+\texttt{title(\textit{text})}\\
 
 \subsection{The $\sim$/.yt/ Directory}
 \settowidth{\MyLen}{\texttt{multicol} }
@@ -297,12 +297,12 @@
 
 
 \subsection{Parallel Analysis}
-\settowidth{\MyLen}{\texttt{multicol}} 
+\settowidth{\MyLen}{\texttt{multicol}}
 Nearly all of yt is parallelized using
-MPI.  The {\it mpi4py} package must be installed for parallelism in yt.  To
-install {\it pip install mpi4py} on the command line usually works.
+MPI\@.  The \textit{mpi4py} package must be installed for parallelism in yt.  To
+install \textit{pip install mpi4py} on the command line usually works.
 Execute python in parallel similar to this:\\
-{\it mpirun -n 12 python script.py}\\
+\textit{mpirun -n 12 python script.py}\\
 The file \texttt{script.py} must call the \texttt{yt.enable\_parallelism()} to
 turn on yt's parallelism.  If this doesn't happen, all cores will execute the
 same serial yt script.  This command may differ for each system on which you use
@@ -320,12 +320,12 @@
 \texttt{hg clone https://bitbucket.org/yt\_analysis/yt} \textemdash\ Clone a copy of yt. \\
 \texttt{hg status} \textemdash\ Files changed in working directory.\\
 \texttt{hg diff} \textemdash\ Print diff of all changed files in working directory. \\
-\texttt{hg diff -r{\it RevX} -r{\it RevY}} \textemdash\ Print diff of all changes between revision {\it RevX} and {\it RevY}.\\
+\texttt{hg diff -r\textit{RevX} -r\textit{RevY}} \textemdash\ Print diff of all changes between revision \textit{RevX} and \textit{RevY}.\\
 \texttt{hg log} \textemdash\ History of changes.\\
-\texttt{hg cat -r{\it RevX file}} \textemdash\ Print the contents of {\it file} from revision {\it RevX}.\\
+\texttt{hg cat -r\textit{RevX file}} \textemdash\ Print the contents of \textit{file} from revision \textit{RevX}.\\
 \texttt{hg heads} \textemdash\ Print all the current heads. \\
-\texttt{hg revert -r{\it RevX file}} \textemdash\ Revert {\it file} to revision {\it RevX}. On-disk changed version is
-moved to {\it file.orig}. \\
+\texttt{hg revert -r\textit{RevX file}} \textemdash\ Revert \textit{file} to revision \textit{RevX}. On-disk changed version is
+moved to \textit{file.orig}. \\
 \texttt{hg commit} \textemdash\ Commit changes to repository. \\
 \texttt{hg push} \textemdash\ Push changes to default remote repository. \\
 \texttt{hg pull} \textemdash\ Pull changes from default remote repository. \\

diff -r 25b77f284f8b48cf271c48aae3ba8b34d100489e -r dfda0ce6531cadeee95b16b820c3a6203f9eaf42 doc/helper_scripts/code_support.py
--- a/doc/helper_scripts/code_support.py
+++ b/doc/helper_scripts/code_support.py
@@ -85,7 +85,7 @@
 print("|| . ||", end=' ')
 for c in code_names:
     print("%s || " % (c), end=' ')
-print() 
+print()
 
 for vn in vals:
     print("|| !%s ||" % (vn), end=' ')

diff -r 25b77f284f8b48cf271c48aae3ba8b34d100489e -r dfda0ce6531cadeee95b16b820c3a6203f9eaf42 doc/helper_scripts/table.py
--- a/doc/helper_scripts/table.py
+++ b/doc/helper_scripts/table.py
@@ -44,7 +44,7 @@
       "A bunch of illustrated examples of how to do things"),
      ("reference/index.html", "Reference Materials",
       "A list of all bundled fields, API documentation, the Change Log..."),
-     ("faq/index.html", "FAQ", 
+     ("faq/index.html", "FAQ",
       "Frequently Asked Questions: answered for you!")
   ]),
 ]

diff -r 25b77f284f8b48cf271c48aae3ba8b34d100489e -r dfda0ce6531cadeee95b16b820c3a6203f9eaf42 doc/helper_scripts/update_recipes.py
--- a/doc/helper_scripts/update_recipes.py
+++ b/doc/helper_scripts/update_recipes.py
@@ -66,7 +66,7 @@
             written = cond_output(output, written)
             ofn = "%s/%s_%s" % (ndir, fn, os.path.basename(ifn))
             open(ofn, "wb").write(open(ifn, "rb").read())
-            output.write(".. image:: _%s/%s_%s\n" % (fn, fn, os.path.basename(ifn)) + 
+            output.write(".. image:: _%s/%s_%s\n" % (fn, fn, os.path.basename(ifn)) +
                          "   :width: 240\n" +
                          "   :target: ../_images/%s_%s\n" % (fn, os.path.basename(ifn))
                         )

diff -r 25b77f284f8b48cf271c48aae3ba8b34d100489e -r dfda0ce6531cadeee95b16b820c3a6203f9eaf42 doc/install_script.sh
--- a/doc/install_script.sh
+++ b/doc/install_script.sh
@@ -1,7 +1,7 @@
 #
 # Hi there!  Welcome to the yt installation script.
 #
-# First things first, if you experience problems, please visit the Help 
+# First things first, if you experience problems, please visit the Help
 # section at http://yt-project.org.
 #
 # This script is designed to create a fully isolated Python installation
@@ -365,7 +365,7 @@
         echo "  * gcc-{,c++,gfortran}"
         echo "  * make"
         echo "  * patch"
-        echo 
+        echo
         echo "You can accomplish this by executing:"
         echo "$ sudo yum install gcc gcc-c++ gcc-gfortran make patch zip"
         echo "$ sudo yum install ncurses-devel uuid-devel openssl-devel readline-devel"
@@ -629,7 +629,7 @@
     [ ! -e $LIB/extracted ] && tar xfz $LIB.tar.gz
     touch $LIB/extracted
     BUILD_ARGS=""
-    if [[ $LIB =~ .*mercurial.* ]] 
+    if [[ $LIB =~ .*mercurial.* ]]
     then
         PYEXE="python2.7"
     else

diff -r 25b77f284f8b48cf271c48aae3ba8b34d100489e -r dfda0ce6531cadeee95b16b820c3a6203f9eaf42 doc/source/_static/custom.css
--- a/doc/source/_static/custom.css
+++ b/doc/source/_static/custom.css
@@ -40,7 +40,7 @@
         padding-bottom: 10px;
     }
     /* since 3.1.0 */
-    .navbar-collapse.collapse.in { 
+    .navbar-collapse.collapse.in {
         display: block!important;
     }
     .collapsing {
@@ -48,7 +48,7 @@
     }
 }
 
-/* 
+/*
 
 Sphinx code literals conflict with the notebook code tag, so we special-case
 literals that are inside text.
@@ -56,7 +56,7 @@
 */
 
 p code {
-    color:  #d14;    
+    color:  #d14;
     white-space: nowrap;
     font-size: 90%;
     background-color: #f9f2f4;
@@ -93,16 +93,16 @@
 */
 
 *[id]:before :not(p) {
-  display: block; 
-  content: " "; 
-  margin-top: -45px; 
-  height: 45px; 
-  visibility: hidden; 
+  display: block;
+  content: " ";
+  margin-top: -45px;
+  height: 45px;
+  visibility: hidden;
 }
 
 /*
 
-Make tables span only half the page. 
+Make tables span only half the page.
 
 */
 

diff -r 25b77f284f8b48cf271c48aae3ba8b34d100489e -r dfda0ce6531cadeee95b16b820c3a6203f9eaf42 doc/source/about/index.rst
--- a/doc/source/about/index.rst
+++ b/doc/source/about/index.rst
@@ -12,10 +12,10 @@
 -----------
 
 yt is a toolkit for analyzing and visualizing quantitative data.  Originally
-written to analyze 3D grid-based astrophysical simulation data, 
+written to analyze 3D grid-based astrophysical simulation data,
 it has grown to handle any kind of data represented in a 2D or 3D volume.
-yt is an Python-based open source project and is open for anyone to use or 
-contribute code.  The entire source code and history is available to all 
+yt is an Python-based open source project and is open for anyone to use or
+contribute code.  The entire source code and history is available to all
 at https://bitbucket.org/yt_analysis/yt .
 
 .. _who-is-yt:
@@ -23,16 +23,16 @@
 Who is yt?
 ----------
 
-As an open-source project, yt has a large number of user-developers.  
-In September of 2014, the yt developer community collectively decided to endow 
-the title of *member* on individuals who had contributed in a significant way 
-to the project.  For a list of those members and a description of their 
-contributions to the code, see 
+As an open-source project, yt has a large number of user-developers.
+In September of 2014, the yt developer community collectively decided to endow
+the title of *member* on individuals who had contributed in a significant way
+to the project.  For a list of those members and a description of their
+contributions to the code, see
 `our members website. <http://yt-project.org/members.html>`_
 
-For an up-to-date list of everyone who has contributed to the yt codebase, 
-see the current `CREDITS <http://bitbucket.org/yt_analysis/yt/src/yt/CREDITS>`_ file.  
-For a more detailed breakup of contributions made by individual users, see out 
+For an up-to-date list of everyone who has contributed to the yt codebase,
+see the current `CREDITS <http://bitbucket.org/yt_analysis/yt/src/yt/CREDITS>`_ file.
+For a more detailed breakup of contributions made by individual users, see out
 `Open HUB page <https://www.openhub.net/p/yt_amr/contributors?query=&sort=commits>`_.
 
 History of yt
@@ -40,17 +40,17 @@
 
 yt was originally begun by Matthew Turk in 2007 in the course of his graduate
 studies in computational astrophysics.  The code was developed
-as a simple data-reader and exporter for grid-based hydrodynamical simulation 
-data outputs from the *Enzo* code.  Over the next few years, he invited 
+as a simple data-reader and exporter for grid-based hydrodynamical simulation
+data outputs from the *Enzo* code.  Over the next few years, he invited
 collaborators and friends to contribute and use yt.  As the community grew,
-so did the capabilities of yt.  It is now a community-developed project with 
-contributions from many people, the hospitality of several institutions, and 
-benefiting from numerous grants.  With this community-driven approach 
-and contributions from a sizeable population of developers, it has evolved 
-into a fully-featured toolkit for analysis and visualization of 
-multidimensional data.  It relies on no proprietary software -- although it 
-can be and has been extended to interface with proprietary software and 
-libraries -- and has been designed from the ground up to enable users to be 
+so did the capabilities of yt.  It is now a community-developed project with
+contributions from many people, the hospitality of several institutions, and
+benefiting from numerous grants.  With this community-driven approach
+and contributions from a sizeable population of developers, it has evolved
+into a fully-featured toolkit for analysis and visualization of
+multidimensional data.  It relies on no proprietary software -- although it
+can be and has been extended to interface with proprietary software and
+libraries -- and has been designed from the ground up to enable users to be
 as immersed in the data as they desire.
 
 How do I contact yt?
@@ -58,7 +58,7 @@
 
 If you have any questions about the code, please contact the `yt users email
 list <http://lists.spacepope.org/listinfo.cgi/yt-users-spacepope.org>`_.  If
-you're having other problems, please follow the steps in 
+you're having other problems, please follow the steps in
 :ref:`asking-for-help`.
 
 How do I cite yt?
@@ -70,7 +70,7 @@
 entry: ::
 
    @ARTICLE{2011ApJS..192....9T,
-      author = {{Turk}, M.~J. and {Smith}, B.~D. and {Oishi}, J.~S. and {Skory}, S. and 
+      author = {{Turk}, M.~J. and {Smith}, B.~D. and {Oishi}, J.~S. and {Skory}, S. and
    	{Skillman}, S.~W. and {Abel}, T. and {Norman}, M.~L.},
        title = "{yt: A Multi-code Analysis Toolkit for Astrophysical Simulation Data}",
      journal = {\apjs},

diff -r 25b77f284f8b48cf271c48aae3ba8b34d100489e -r dfda0ce6531cadeee95b16b820c3a6203f9eaf42 doc/source/analyzing/analysis_modules/absorption_spectrum.rst
--- a/doc/source/analyzing/analysis_modules/absorption_spectrum.rst
+++ b/doc/source/analyzing/analysis_modules/absorption_spectrum.rst
@@ -5,23 +5,23 @@
 
 .. sectionauthor:: Britton Smith <brittonsmith at gmail.com>
 
-Absorption line spectra, such as shown below, can be made with data created 
-by the (:ref:`light-ray-generator`).  For each element of the ray, column 
-densities are calculated multiplying the number density within a grid cell 
-with the path length of the ray through the cell.  Line profiles are 
-generated using a voigt profile based on the temperature field.  The lines 
-are then shifted according to the redshift recorded by the light ray tool 
-and (optionally) the peculiar velocity of gas along the ray.  Inclusion of the 
-peculiar velocity requires setting ``use_peculiar_velocity`` to True in the call to 
+Absorption line spectra, such as shown below, can be made with data created
+by the (:ref:`light-ray-generator`).  For each element of the ray, column
+densities are calculated multiplying the number density within a grid cell
+with the path length of the ray through the cell.  Line profiles are
+generated using a voigt profile based on the temperature field.  The lines
+are then shifted according to the redshift recorded by the light ray tool
+and (optionally) the peculiar velocity of gas along the ray.  Inclusion of the
+peculiar velocity requires setting ``use_peculiar_velocity`` to True in the call to
 :meth:`~yt.analysis_modules.cosmological_observation.light_ray.light_ray.LightRay.make_light_ray`.
 
-The spectrum generator will output a file containing the wavelength and 
+The spectrum generator will output a file containing the wavelength and
 normalized flux.  It will also output a text file listing all important lines.
 
 .. image:: _images/spectrum_full.png
    :width: 500
 
-An absorption spectrum for the wavelength range from 900 to 1800 Angstroms 
+An absorption spectrum for the wavelength range from 900 to 1800 Angstroms
 made with a light ray extending from z = 0 to z = 0.4.
 
 .. image:: _images/spectrum_zoom.png
@@ -32,7 +32,7 @@
 Creating an Absorption Spectrum
 -------------------------------
 
-To instantiate an AbsorptionSpectrum object, the arguments required are the 
+To instantiate an AbsorptionSpectrum object, the arguments required are the
 minimum and maximum wavelengths, and the number of wavelength bins.
 
 .. code-block:: python
@@ -44,33 +44,33 @@
 Adding Features to the Spectrum
 -------------------------------
 
-Absorption lines and continuum features can then be added to the spectrum.  
-To add a line, you must know some properties of the line: the rest wavelength, 
-f-value, gamma value, and the atomic mass in amu of the atom.  That line must 
+Absorption lines and continuum features can then be added to the spectrum.
+To add a line, you must know some properties of the line: the rest wavelength,
+f-value, gamma value, and the atomic mass in amu of the atom.  That line must
 be tied in some way to a field in the dataset you are loading, and this field
-must be added to the LightRay object when it is created.  Below, we will 
-add the H Lyman-alpha line, which is tied to the neutral hydrogen field 
+must be added to the LightRay object when it is created.  Below, we will
+add the H Lyman-alpha line, which is tied to the neutral hydrogen field
 ('H_number_density').
 
 .. code-block:: python
-  
+
   my_label = 'HI Lya'
   field = 'H_number_density'
   wavelength = 1215.6700 # Angstroms
   f_value = 4.164E-01
   gamma = 6.265e+08
   mass = 1.00794
-  
+
   sp.add_line(my_label, field, wavelength, f_value, gamma, mass, label_threshold=1.e10)
 
-In the above example, the *field* argument tells the spectrum generator which 
-field from the ray data to use to calculate the column density.  The 
-``label_threshold`` keyword tells the spectrum generator to add all lines 
-above a column density of 10 :superscript:`10` cm :superscript:`-2` to the 
-text line list.  If None is provided, as is the default, no lines of this 
+In the above example, the *field* argument tells the spectrum generator which
+field from the ray data to use to calculate the column density.  The
+``label_threshold`` keyword tells the spectrum generator to add all lines
+above a column density of 10 :superscript:`10` cm :superscript:`-2` to the
+text line list.  If None is provided, as is the default, no lines of this
 type will be added to the text list.
 
-Continuum features with optical depths that follow a power law can also be 
+Continuum features with optical depths that follow a power law can also be
 added.  Like adding lines, you must specify details like the wavelength
 and the field in the dataset and LightRay that is tied to this feature.
 Below, we will add H Lyman continuum.
@@ -82,29 +82,29 @@
   wavelength = 912.323660 # Angstroms
   normalization = 1.6e17
   index = 3.0
-  
+
   sp.add_continuum(my_label, field, wavelength, normalization, index)
 
 Making the Spectrum
 -------------------
 
-Once all the lines and continuum are added, it is time to make a spectrum out 
+Once all the lines and continuum are added, it is time to make a spectrum out
 of some light ray data.
 
 .. code-block:: python
 
-  wavelength, flux = sp.make_spectrum('lightray.h5', 
-                                      output_file='spectrum.fits', 
+  wavelength, flux = sp.make_spectrum('lightray.h5',
+                                      output_file='spectrum.fits',
                                       line_list_file='lines.txt',
                                       use_peculiar_velocity=True)
 
-A spectrum will be made using the specified ray data and the wavelength and 
-flux arrays will also be returned.  If ``use_peculiar_velocity`` is set to 
+A spectrum will be made using the specified ray data and the wavelength and
+flux arrays will also be returned.  If ``use_peculiar_velocity`` is set to
 False, the lines will only be shifted according to the redshift.
 
-Three output file formats are supported for writing out the spectrum: fits, 
-hdf5, and ascii.  The file format used is based on the extension provided 
-in the ``output_file`` keyword: ``.fits`` for a fits file, 
+Three output file formats are supported for writing out the spectrum: fits,
+hdf5, and ascii.  The file format used is based on the extension provided
+in the ``output_file`` keyword: ``.fits`` for a fits file,
 ``.h5`` for an hdf5 file, and anything else for an ascii file.
 
 .. note:: To write out a fits file, you must install the `astropy <http://www.astropy.org>`_ python library in order to access the astropy.io.fits module.  You can usually do this by simply running `pip install astropy` at the command line.
@@ -112,11 +112,11 @@
 Generating Spectra in Parallel
 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
-The spectrum generator can be run in parallel simply by following the procedures 
-laid out in :ref:`parallel-computation` for running yt scripts in parallel.  
-Spectrum generation is parallelized using a multi-level strategy where each 
-absorption line is deposited by a different processor.  If the number of available 
-processors is greater than the number of lines, then the deposition of 
+The spectrum generator can be run in parallel simply by following the procedures
+laid out in :ref:`parallel-computation` for running yt scripts in parallel.
+Spectrum generation is parallelized using a multi-level strategy where each
+absorption line is deposited by a different processor.  If the number of available
+processors is greater than the number of lines, then the deposition of
 individual lines will be divided over multiple processors.
 
 Fitting an Absorption Spectrum
@@ -127,14 +127,14 @@
 This tool can be used to fit absorption spectra, particularly those
 generated using the (``AbsorptionSpectrum``) tool. For more details
 on its uses and implementation please see (`Egan et al. (2013)
-<http://arxiv.org/abs/1307.2244>`_). If you find this tool useful we 
+<http://arxiv.org/abs/1307.2244>`_). If you find this tool useful we
 encourage you to cite accordingly.
 
 Loading an Absorption Spectrum
 ------------------------------
 
-To load an absorption spectrum created by 
-(:class:`~yt.analysis_modules.absorption_spectrum.absorption_spectrum.AbsorptionSpectrum``), 
+To load an absorption spectrum created by
+(:class:`~yt.analysis_modules.absorption_spectrum.absorption_spectrum.AbsorptionSpectrum``),
 we specify the output file name. It is advisable to use either an .h5
 or .fits file, rather than an ascii file to save the spectrum as rounding
 errors produced in saving to a ascii file will negatively impact fit quality.
@@ -149,7 +149,7 @@
 Specifying Species Properties
 -----------------------------
 
-Before fitting a spectrum, you must specify the properties of all the 
+Before fitting a spectrum, you must specify the properties of all the
 species included when generating the spectrum.
 
 The physical properties needed for each species are the rest wavelength,
@@ -160,7 +160,7 @@
 
 To fine tune the fitting procedure and give results in a minimal
 number of optimizing steps, we specify expected maximum and minimum
-values for the column density, doppler parameter, and redshift. These 
+values for the column density, doppler parameter, and redshift. These
 values can be well outside the range of expected values for a typical line
 and are mostly to prevent the algorithm from fitting to negative values
 or becoming numerically unstable.
@@ -204,7 +204,7 @@
 --------------------------
 
 After loading a spectrum and specifying the properties of the species
-used to generate the spectrum, an apporpriate fit can be generated. 
+used to generate the spectrum, an appropriate fit can be generated.
 
 .. code-block:: python
 
@@ -219,20 +219,20 @@
 recommended to fit species the generate multiple lines first, as a fit
 will only be accepted if all of the lines are fit appropriately using
 a single set of parameters. At the moment no cross correlation between
-lines of different species is performed. 
+lines of different species is performed.
 
-The parameters of the lines that are needed to fit the spectrum are contained 
+The parameters of the lines that are needed to fit the spectrum are contained
 in the ``fitted_lines`` variable. Each species given in ``orderFits`` will
-be a key in the ``fitted_lines`` dictionary. The entry for each species 
-key will be another dictionary containing entries for 'N','b','z', and 
+be a key in the ``fitted_lines`` dictionary. The entry for each species
+key will be another dictionary containing entries for 'N','b','z', and
 'group#' which are the column density, doppler parameter, redshift,
-and associate line complex respectively. The i :superscript:`th` line 
-of a given species is then given by the parameters ``N[i]``, ``b[i]``, 
+and associate line complex respectively. The i :superscript:`th` line
+of a given species is then given by the parameters ``N[i]``, ``b[i]``,
 and ``z[i]`` and is part of the same complex (and was fitted at the same time)
 as all lines with the same group number as ``group#[i]``.
 
-The ``fitted_flux`` is an ndarray of the same size as ``flux`` and 
-``wavelength`` that contains the cummulative absorption spectrum generated 
+The ``fitted_flux`` is an ndarray of the same size as ``flux`` and
+``wavelength`` that contains the cumulative absorption spectrum generated
 by the lines contained in ``fitted_lines``.
 
 Saving a Spectrum Fit
@@ -250,8 +250,8 @@
 
 .. sectionauthor:: Hilary Egan <hilary.egan at colorado.edu>
 
-To generate a fit for a spectrum 
-:func:`~yt.analysis_modules.absorption_spectrum.absorption_spectrum_fit.generate_total_fit` 
+To generate a fit for a spectrum
+:func:`~yt.analysis_modules.absorption_spectrum.absorption_spectrum_fit.generate_total_fit`
 is called.
 This function controls the identification of line complexes, the fit
 of a series of absorption lines for each appropriate species, checks of
@@ -260,14 +260,14 @@
 Finding Line Complexes
 ----------------------
 
-Line complexes are found using the 
+Line complexes are found using the
 :func:`~yt.analysis_modules.absorption_spectrum.absorption_spectrum_fit.find_complexes`
-function. The process by which line complexes are found involves walking 
-through the array of flux in order from minimum to maximum wavelength, and 
-finding series of spatially contiguous cells whose flux is less than some 
-limit.  These regions are then checked in terms of an additional flux limit 
-and size.  The bounds of all the passing regions are then listed and returned. 
-Those bounds that cover an exceptionally large region of wavelength space will 
+function. The process by which line complexes are found involves walking
+through the array of flux in order from minimum to maximum wavelength, and
+finding series of spatially contiguous cells whose flux is less than some
+limit.  These regions are then checked in terms of an additional flux limit
+and size.  The bounds of all the passing regions are then listed and returned.
+Those bounds that cover an exceptionally large region of wavelength space will
 be broken up if a suitable cut point is found. This method is only appropriate
 for noiseless spectra.
 
@@ -280,25 +280,25 @@
 unstable when optimizing.
 
 The ``fitLim`` parameter controls what is the maximum flux that the trough
-of the region can have and still be considered a line complex. This 
+of the region can have and still be considered a line complex. This
 effectively controls the sensitivity to very low column absorbers. Default
-value is ``fitLim`` = 0.99. If a region is identified where the flux of the 
+value is ``fitLim`` = 0.99. If a region is identified where the flux of the
 trough is greater than this value, the region is simply ignored.
 
-The ``minLength`` parameter controls the minimum number of array elements 
+The ``minLength`` parameter controls the minimum number of array elements
 that an identified region must have. This value must be greater than or
 equal to 3 as there are a minimum of 3 free parameters that must be fit.
 Default is ``minLength`` = 3.
 
 The ``maxLength`` parameter controls the maximum number of array elements
 that an identified region can have before it is split into separate regions.
-Default is ``maxLength`` = 1000. This should be adjusted based on the 
+Default is ``maxLength`` = 1000. This should be adjusted based on the
 resolution of the spectrum to remain appropriate. The value correspond
-to a wavelength of roughly 50 angstroms. 
+to a wavelength of roughly 50 angstroms.
 
 The ``splitLim`` parameter controls how exceptionally large regions are split.
 When such a region is identified by having more array elements than
-``maxLength``, the point of maximum flux (or minimum absorption) in the 
+``maxLength``, the point of maximum flux (or minimum absorption) in the
 middle two quartiles is identified. If that point has a flux greater than
 or equal to ``splitLim``, then two separate complexes are created: one from
 the lower wavelength edge to the minimum absorption point and the other from
@@ -309,7 +309,7 @@
 Fitting a Line Complex
 ----------------------
 
-After a complex is identified, it is fitted by iteratively adding and 
+After a complex is identified, it is fitted by iteratively adding and
 optimizing a set of Voigt Profiles for a particular species until the
 region is considered successfully fit. The optimizing is accomplished
 using scipy's least squares optimizer. This requires an initial estimate
@@ -326,36 +326,36 @@
 smaller initial guess is given. These values are chosen to make optimization
 faster and more stable by being closer to the actual value, but the final
 results of fitting should not depend on them as they merely provide a
-starting point. 
+starting point.
 
-After the parameters for a line are optimized for the first time, the 
-optimized parameters are then used for the initial guess on subsequent 
-iterations with more lines. 
+After the parameters for a line are optimized for the first time, the
+optimized parameters are then used for the initial guess on subsequent
+iterations with more lines.
 
-The complex is considered successfully fit when the sum of the squares of 
+The complex is considered successfully fit when the sum of the squares of
 the difference between the flux generated from the fit and the desired flux
 profile is less than ``errBound``. ``errBound`` is related to the optional
-parameter to 
+parameter to
 :meth:`~yt.analysis_modules.cosmological_observation.light_ray.light_ray.LightRay.generate_total_fit`,
-``maxAvgError`` by the number of array elements in the region such that 
+``maxAvgError`` by the number of array elements in the region such that
 ``errBound`` = number of elements * ``maxAvgError``.
 
-There are several other conditions under which the cycle of adding and 
+There are several other conditions under which the cycle of adding and
 optimizing lines will halt. If the error of the optimized fit from adding
 a line is an order of magnitude worse than the error of the fit without
-that line, then it is assumed that the fitting has become unstable and 
+that line, then it is assumed that the fitting has become unstable and
 the latest line is removed. Lines are also prevented from being added if
 the total number of lines is greater than the number of elements in the flux
 array being fit divided by 3. This is because there must not be more free
-parameters in a fit than the number of points to constrain them. 
+parameters in a fit than the number of points to constrain them.
 
 Checking Fit Results
 --------------------
 
 After an acceptable fit for a region is determined, there are several steps
-the algorithm must go through to validate the fits. 
+the algorithm must go through to validate the fits.
 
-First, the parameters must be in a reasonable range. This is a check to make 
+First, the parameters must be in a reasonable range. This is a check to make
 sure that the optimization did not become unstable and generate a fit that
 diverges wildly outside the region where the fit was performed. This way, even
 if particular complex cannot be fit, the rest of the spectrum fitting still
@@ -363,13 +363,13 @@
 in the species parameter dictionary. These are merely broad limits that will
 prevent numerical instability rather than physical limits.
 
-In cases where a single species generates multiple lines (as in the OVI 
+In cases where a single species generates multiple lines (as in the OVI
 doublet), the fits are then checked for higher wavelength lines. Originally
 the fits are generated only considering the lowest wavelength fit to a region.
 This is because we perform the fitting of complexes in order from the lowest
 wavelength to the highest, so any contribution to a complex being fit must
 come from the lower wavelength as the higher wavelength contributions would
-already have been subtracted out after fitting the lower wavelength. 
+already have been subtracted out after fitting the lower wavelength.
 
 Saturated Lyman Alpha Fitting Tools
 -----------------------------------
@@ -380,8 +380,8 @@
 The basic approach is to simply try a much wider range of initial parameter
 guesses in order to find the true optimization minimum, rather than getting
 stuck in a local minimum. A set of hard coded initial parameter guesses
-for Lyman alpha lines is given by the function 
+for Lyman alpha lines is given by the function
 :func:`~yt.analysis_modules.absorption_spectrum.absorption_spectrum_fit.get_test_lines`.
 Also included in these parameter guesses is an an initial guess of a high
-column cool line overlapping a lower column warm line, indictive of a 
+column cool line overlapping a lower column warm line, indictive of a
 broad Lyman alpha (BLA) absorber.

diff -r 25b77f284f8b48cf271c48aae3ba8b34d100489e -r dfda0ce6531cadeee95b16b820c3a6203f9eaf42 doc/source/analyzing/analysis_modules/clump_finding.rst
--- a/doc/source/analyzing/analysis_modules/clump_finding.rst
+++ b/doc/source/analyzing/analysis_modules/clump_finding.rst
@@ -3,17 +3,17 @@
 Clump Finding
 =============
 
-The clump finder uses a contouring algorithm to identified topologically 
-disconnected structures within a dataset.  This works by first creating a 
-single contour over the full range of the contouring field, then continually 
-increasing the lower value of the contour until it reaches the maximum value 
-of the field.  As disconnected structures are identified as separate contoures, 
-the routine continues recursively through each object, creating a hierarchy of 
-clumps.  Individual clumps can be kept or removed from the hierarchy based on 
-the result of user-specified functions, such as checking for gravitational 
+The clump finder uses a contouring algorithm to identified topologically
+disconnected structures within a dataset.  This works by first creating a
+single contour over the full range of the contouring field, then continually
+increasing the lower value of the contour until it reaches the maximum value
+of the field.  As disconnected structures are identified as separate contours,
+the routine continues recursively through each object, creating a hierarchy of
+clumps.  Individual clumps can be kept or removed from the hierarchy based on
+the result of user-specified functions, such as checking for gravitational
 boundedness.  A sample recipe can be found in :ref:`cookbook-find_clumps`.
 
-The clump finder requires a data object (see :ref:`data-objects`) and a field 
+The clump finder requires a data object (see :ref:`data-objects`) and a field
 over which the contouring is to be performed.
 
 .. code:: python
@@ -28,11 +28,11 @@
 
    master_clump = Clump(data_source, ("gas", "density"))
 
-At this point, every isolated contour will be considered a clump, 
-whether this is physical or not.  Validator functions can be added to 
-determine if an individual contour should be considered a real clump.  
-These functions are specified with the ``Clump.add_validator`` function.  
-Current, two validators exist: a minimum number of cells and gravitational 
+At this point, every isolated contour will be considered a clump,
+whether this is physical or not.  Validator functions can be added to
+determine if an individual contour should be considered a real clump.
+These functions are specified with the ``Clump.add_validator`` function.
+Current, two validators exist: a minimum number of cells and gravitational
 boundedness.
 
 .. code:: python
@@ -41,9 +41,9 @@
 
    master_clump.add_validator("gravitationally_bound", use_particles=False)
 
-As many validators as desired can be added, and a clump is only kept if all 
-return True.  If not, a clump is remerged into its parent.  Custom validators 
-can easily be added.  A validator function must only accept a ``Clump`` object 
+As many validators as desired can be added, and a clump is only kept if all
+return True.  If not, a clump is remerged into its parent.  Custom validators
+can easily be added.  A validator function must only accept a ``Clump`` object
 and either return True or False.
 
 .. code:: python
@@ -52,16 +52,16 @@
        return (clump["gas", "cell_mass"].sum() >= min_mass)
    add_validator("minimum_gas_mass", _minimum_gas_mass)
 
-The ``add_validator`` function adds the validator to a registry that can 
-be accessed by the clump finder.  Then, the validator can be added to the 
+The ``add_validator`` function adds the validator to a registry that can
+be accessed by the clump finder.  Then, the validator can be added to the
 clump finding just like the others.
 
 .. code:: python
 
    master_clump.add_validator("minimum_gas_mass", ds.quan(1.0, "Msun"))
 
-The clump finding algorithm accepts the ``Clump`` object, the initial minimum 
-and maximum of the contouring field, and the step size.  The lower value of the 
+The clump finding algorithm accepts the ``Clump`` object, the initial minimum
+and maximum of the contouring field, and the step size.  The lower value of the
 contour finder will be continually multiplied by the step size.
 
 .. code:: python
@@ -71,9 +71,9 @@
    step = 2.0
    find_clumps(master_clump, c_min, c_max, step)
 
-After the clump finding has finished, the master clump will represent the top 
-of a hierarchy of clumps.  The ``children`` attribute within a ``Clump`` object 
-contains a list of all sub-clumps.  Each sub-clump is also a ``Clump`` object 
+After the clump finding has finished, the master clump will represent the top
+of a hierarchy of clumps.  The ``children`` attribute within a ``Clump`` object
+contains a list of all sub-clumps.  Each sub-clump is also a ``Clump`` object
 with its own ``children`` attribute, and so on.
 
 A number of helper routines exist for examining the clump hierarchy.
@@ -96,15 +96,15 @@
    print(leaf_clumps[0]["gas", "density"])
    print(leaf_clumps[0].quantities.total_mass())
 
-The writing functions will write out a series or properties about each 
-clump by default.  Additional properties can be appended with the 
+The writing functions will write out a series or properties about each
+clump by default.  Additional properties can be appended with the
 ``Clump.add_info_item`` function.
 
 .. code:: python
 
    master_clump.add_info_item("total_cells")
 
-Just like the validators, custom info items can be added by defining functions 
+Just like the validators, custom info items can be added by defining functions
 that minimally accept a ``Clump`` object and return a string to be printed.
 
 .. code:: python
@@ -121,16 +121,16 @@
 
    master_clump.add_info_item("mass_weighted_jeans_mass")
 
-By default, the following info items are activated: **total_cells**, 
-**cell_mass**, **mass_weighted_jeans_mass**, **volume_weighted_jeans_mass**, 
-**max_grid_level**, **min_number_density**, **max_number_density**, and 
+By default, the following info items are activated: **total_cells**,
+**cell_mass**, **mass_weighted_jeans_mass**, **volume_weighted_jeans_mass**,
+**max_grid_level**, **min_number_density**, **max_number_density**, and
 **distance_to_main_clump**.
 
 Clumps can be visualized using the ``annotate_clumps`` callback.
 
 .. code:: python
 
-   prj = yt.ProjectionPlot(ds, 2, ("gas", "density"), 
+   prj = yt.ProjectionPlot(ds, 2, ("gas", "density"),
                            center='c', width=(20,'kpc'))
    prj.annotate_clumps(leaf_clumps)
    prj.save('clumps')

diff -r 25b77f284f8b48cf271c48aae3ba8b34d100489e -r dfda0ce6531cadeee95b16b820c3a6203f9eaf42 doc/source/analyzing/analysis_modules/cosmology_calculator.rst
--- /dev/null
+++ b/doc/source/analyzing/analysis_modules/cosmology_calculator.rst
@@ -0,0 +1,75 @@
+.. _cosmology-calculator:
+
+Cosmology Calculator
+====================
+
+The cosmology calculator can be used to calculate cosmological distances and
+times given a set of cosmological parameters.  A cosmological dataset, `ds`,
+will automatically have a cosmology calculator configured with the correct
+parameters associated with it as `ds.cosmology`.  A standalone
+:class:`~yt.utilities.cosmology.Cosmology` calculator object can be created
+in the following way:
+
+.. code-block:: python
+
+   from yt.utilities.cosmology import Cosmology
+
+   co = Cosmology(hubble_constant=0.7, omega_matter=0.3,
+                  omega_lambda=0.7, omega_curvature=0.0)
+
+Once created, various distance calculations as well as conversions between
+redshift and time are available:
+
+.. notebook-cell::
+
+   from yt.utilities.cosmology import Cosmology
+
+   co = Cosmology(hubble_constant=0.7, omega_matter=0.3,
+                  omega_lambda=0.7, omega_curvature=0.0)
+
+   # Hubble distance (c / h)
+   print("hubble distance", co.hubble_distance())
+
+   # distance from z = 0 to 0.5
+   print("comoving radial distance", co.comoving_radial_distance(0, 0.5).in_units("Mpc/h"))
+
+   # transverse distance
+   print("transverse distance", co.comoving_transverse_distance(0, 0.5).in_units("Mpc/h"))
+
+   # comoving volume
+   print("comoving volume", co.comoving_volume(0, 0.5).in_units("Gpc**3"))
+
+   # angulare diameter distance
+   print("angular diameter distance", co.angular_diameter_distance(0, 0.5).in_units("Mpc/h"))
+
+   # angular scale
+   print("angular scale", co.angular_scale(0, 0.5).in_units("Mpc/degree"))
+
+   # luminosity distance
+   print("luminosity distance", co.luminosity_distance(0, 0.5).in_units("Mpc/h"))
+
+   # time between two redshifts
+   print("lookback time", co.lookback_time(0, 0.5).in_units("Gyr"))
+
+   # age of the Universe at a given redshift
+   print("hubble time", co.hubble_time(0).in_units("Gyr"))
+
+   # critical density
+   print("critical density", co.critical_density(0))
+
+   # Hubble parameter at a given redshift
+   print("hubble parameter", co.hubble_parameter(0).in_units("km/s/Mpc"))
+
+   # convert time after Big Bang to redshift
+   my_t = co.quan(8, "Gyr")
+   print("z from t", co.z_from_t(my_t))
+
+   # convert redshift to time after Big Bang (same as Hubble time)
+   print("t from z", co.t_from_z(0.5).in_units("Gyr"))
+
+Note, that all distances returned are comoving distances.  All of the above
+functions accept scalar values and arrays.  The helper functions, `co.quan`
+and `co.arr` exist to create unitful `YTQuantities` and `YTArray` with the
+unit registry of the cosmology calculator.  For more information on the usage
+and meaning of each calculation, consult the reference documentation at
+:ref:`cosmology-calculator-ref`.

diff -r 25b77f284f8b48cf271c48aae3ba8b34d100489e -r dfda0ce6531cadeee95b16b820c3a6203f9eaf42 doc/source/analyzing/analysis_modules/ellipsoid_analysis.rst
--- a/doc/source/analyzing/analysis_modules/ellipsoid_analysis.rst
+++ b/doc/source/analyzing/analysis_modules/ellipsoid_analysis.rst
@@ -91,9 +91,9 @@
 The center of mass would be the same one as returned by the halo
 finder.  The A, B, C are the largest to smallest magnitude of the
 ellipsoid's semi-principle axes. "e0" is the largest semi-principle
-axis vector direction that would have magnitude A but normalized.  
+axis vector direction that would have magnitude A but normalized.
 The "tilt" is an angle measured in radians.  It can be best described
-as after the rotation about the z-axis to allign e0 to x in the x-y
+as after the rotation about the z-axis to align e0 to x in the x-y
 plane, and then rotating about the y-axis to align e0 completely to
 the x-axis, the angle remaining to rotate about the x-axis to align
 both e1 to the y-axis and e2 to the z-axis.
@@ -128,7 +128,7 @@
 Since this is a first attempt, there are many drawbacks and corners
 cut.  Many things listed here will be amended when I have time.
 
-* The ellipsoid 3D container like the boolean object, do not contain 
+* The ellipsoid 3D container like the boolean object, do not contain
   particle position and velocity information.
 * This currently assume periodic boundary condition, so if an
   ellipsoid center is at the edge, it will return part of the opposite
@@ -136,7 +136,7 @@
   periodicity in the future.
 * This method gives a minimalistic ellipsoid centered around the
   center of mass that contains all the particles, but sometimes people
-  prefer an inertial tensor triaxial ellipsoid described in 
+  prefer an inertial tensor triaxial ellipsoid described in
   `Dubinski, Carlberg 1991
   <http://adsabs.harvard.edu/abs/1991ApJ...378..496D>`_.  I have that
   method composed but it is not fully tested yet.

diff -r 25b77f284f8b48cf271c48aae3ba8b34d100489e -r dfda0ce6531cadeee95b16b820c3a6203f9eaf42 doc/source/analyzing/analysis_modules/halo_catalogs.rst
--- a/doc/source/analyzing/analysis_modules/halo_catalogs.rst
+++ b/doc/source/analyzing/analysis_modules/halo_catalogs.rst
@@ -7,21 +7,21 @@
 ----------------------
 
 In yt 3.0, operations relating to the analysis of halos (halo finding,
-merger tree creation, and individual halo analysis) are all brought 
+merger tree creation, and individual halo analysis) are all brought
 together into a single framework. This framework is substantially
-different from the halo analysis machinery available in yt-2.x and is 
-entirely backward incompatible.  
+different from the halo analysis machinery available in yt-2.x and is
+entirely backward incompatible.
 For a direct translation of various halo analysis tasks using yt-2.x
 to yt-3.0 please see :ref:`halo-transition`.
 
-A catalog of halos can be created from any initial dataset given to halo 
+A catalog of halos can be created from any initial dataset given to halo
 catalog through data_ds. These halos can be found using friends-of-friends,
 HOP, and Rockstar. The finder_method keyword dictates which halo finder to
-use. The available arguments are :ref:`fof`, :ref:`hop`, and :ref:`rockstar`. 
-For more details on the relative differences between these halo finders see 
+use. The available arguments are :ref:`fof`, :ref:`hop`, and :ref:`rockstar`.
+For more details on the relative differences between these halo finders see
 :ref:`halo_finding`.
 
-The class which holds all of the halo information is the 
+The class which holds all of the halo information is the
 :class:`~yt.analysis_modules.halo_analysis.halo_catalog.HaloCatalog`.
 
 .. code-block:: python
@@ -32,11 +32,11 @@
    data_ds = yt.load('Enzo_64/RD0006/RedshiftOutput0006')
    hc = HaloCatalog(data_ds=data_ds, finder_method='hop')
 
-A halo catalog may also be created from already run rockstar outputs. 
-This method is not implemented for previously run friends-of-friends or 
-HOP finders. Even though rockstar creates one file per processor, 
-specifying any one file allows the full catalog to be loaded. Here we 
-only specify the file output by the processor with ID 0. Note that the 
+A halo catalog may also be created from already run rockstar outputs.
+This method is not implemented for previously run friends-of-friends or
+HOP finders. Even though rockstar creates one file per processor,
+specifying any one file allows the full catalog to be loaded. Here we
+only specify the file output by the processor with ID 0. Note that the
 argument for supplying a rockstar output is `halos_ds`, not `data_ds`.
 
 .. code-block:: python
@@ -44,10 +44,10 @@
    halos_ds = yt.load(path+'rockstar_halos/halos_0.0.bin')
    hc = HaloCatalog(halos_ds=halos_ds)
 
-Although supplying only the binary output of the rockstar halo finder 
-is sufficient for creating a halo catalog, it is not possible to find 
-any new information about the identified halos. To associate the halos 
-with the dataset from which they were found, supply arguments to both 
+Although supplying only the binary output of the rockstar halo finder
+is sufficient for creating a halo catalog, it is not possible to find
+any new information about the identified halos. To associate the halos
+with the dataset from which they were found, supply arguments to both
 halos_ds and data_ds.
 
 .. code-block:: python
@@ -56,34 +56,35 @@
    data_ds = yt.load('Enzo_64/RD0006/RedshiftOutput0006')
    hc = HaloCatalog(data_ds=data_ds, halos_ds=halos_ds)
 
-A data object can also be supplied via the keyword ``data_source``, 
-associated with either dataset, to control the spatial region in 
+A data object can also be supplied via the keyword ``data_source``,
+associated with either dataset, to control the spatial region in
 which halo analysis will be performed.
 
 Analysis Using Halo Catalogs
 ----------------------------
 
-Analysis is done by adding actions to the 
+Analysis is done by adding actions to the
 :class:`~yt.analysis_modules.halo_analysis.halo_catalog.HaloCatalog`.
-Each action is represented by a callback function that will be run on each halo. 
-There are three types of actions:
+Each action is represented by a callback function that will be run on
+each halo.  There are four types of actions:
 
 * Filters
 * Quantities
 * Callbacks
+* Recipes
 
-A list of all available filters, quantities, and callbacks can be found in 
-:ref:`halo_analysis_ref`.  
-All interaction with this analysis can be performed by importing from 
+A list of all available filters, quantities, and callbacks can be found in
+:ref:`halo_analysis_ref`.
+All interaction with this analysis can be performed by importing from
 halo_analysis.
 
 Filters
 ^^^^^^^
 
-A filter is a function that returns True or False. If the return value 
-is True, any further queued analysis will proceed and the halo in 
-question will be added to the final catalog. If the return value False, 
-further analysis will not be performed and the halo will not be included 
+A filter is a function that returns True or False. If the return value
+is True, any further queued analysis will proceed and the halo in
+question will be added to the final catalog. If the return value False,
+further analysis will not be performed and the halo will not be included
 in the final catalog.
 
 An example of adding a filter:
@@ -92,11 +93,11 @@
 
    hc.add_filter('quantity_value', 'particle_mass', '>', 1E13, 'Msun')
 
-Currently quantity_value is the only available filter, but more can be 
-added by the user by defining a function that accepts a halo object as 
-the first argument and then adding it as an available filter. If you 
-think that your filter may be of use to the general community, you can 
-add it to ``yt/analysis_modules/halo_analysis/halo_filters.py`` and issue a 
+Currently quantity_value is the only available filter, but more can be
+added by the user by defining a function that accepts a halo object as
+the first argument and then adding it as an available filter. If you
+think that your filter may be of use to the general community, you can
+add it to ``yt/analysis_modules/halo_analysis/halo_filters.py`` and issue a
 pull request.
 
 An example of defining your own filter:
@@ -104,11 +105,11 @@
 .. code-block:: python
 
    def my_filter_function(halo):
-       
+
        # Define condition for filter
        filter_value = True
-       
-       # Return a boolean value 
+
+       # Return a boolean value
        return filter_value
 
    # Add your filter to the filter registry
@@ -120,17 +121,17 @@
 Quantities
 ^^^^^^^^^^
 
-A quantity is a call back that returns a value or values. The return values 
-are stored within the halo object in a dictionary called “quantities.” At 
-the end of the analysis, all of these quantities will be written to disk as 
+A quantity is a call back that returns a value or values. The return values
+are stored within the halo object in a dictionary called “quantities.” At
+the end of the analysis, all of these quantities will be written to disk as
 the final form of the generated halo catalog.
 
-Quantities may be available in the initial fields found in the halo catalog, 
-or calculated from a function after supplying a definition. An example 
-definition of center of mass is shown below. Currently available quantities 
-are center_of_mass and bulk_velocity. Their definitions are available in 
-``yt/analysis_modules/halo_analysis/halo_quantities.py``. If you think that 
-your quantity may be of use to the general community, add it to 
+Quantities may be available in the initial fields found in the halo catalog,
+or calculated from a function after supplying a definition. An example
+definition of center of mass is shown below. Currently available quantities
+are center_of_mass and bulk_velocity. Their definitions are available in
+``yt/analysis_modules/halo_analysis/halo_quantities.py``. If you think that
+your quantity may be of use to the general community, add it to
 ``halo_quantities.py`` and issue a pull request.  Default halo quantities are:
 
 * ``particle_identifier`` -- Halo ID (e.g. 0 to N)
@@ -153,7 +154,7 @@
    def my_quantity_function(halo):
        # Define quantity to return
        quantity = 5
-       
+
        return quantity
 
    # Add your filter to the filter registry
@@ -161,9 +162,9 @@
 
 
    # ... Later on in your script
-   hc.add_quantity("my_quantity") 
+   hc.add_quantity("my_quantity")
 
-This quantity will then be accessible for functions called later via the 
+This quantity will then be accessible for functions called later via the
 *quantities* dictionary that is associated with the halo object.
 
 .. code-block:: python
@@ -178,23 +179,23 @@
 Callbacks
 ^^^^^^^^^
 
-A callback is actually the super class for quantities and filters and 
-is a general purpose function that does something, anything, to a Halo 
-object. This can include hanging new attributes off the Halo object, 
-performing analysis and writing to disk, etc. A callback does not return 
+A callback is actually the super class for quantities and filters and
+is a general purpose function that does something, anything, to a Halo
+object. This can include hanging new attributes off the Halo object,
+performing analysis and writing to disk, etc. A callback does not return
 anything.
 
-An example of using a pre-defined callback where we create a sphere for 
+An example of using a pre-defined callback where we create a sphere for
 each halo with a radius that is twice the saved ``radius``.
 
 .. code-block:: python
 
    hc.add_callback("sphere", factor=2.0)
-    
-Currently available callbacks are located in 
-``yt/analysis_modules/halo_analysis/halo_callbacks.py``.  New callbacks may 
-be added by using the syntax shown below. If you think that your 
-callback may be of use to the general community, add it to 
+
+Currently available callbacks are located in
+``yt/analysis_modules/halo_analysis/halo_callbacks.py``.  New callbacks may
+be added by using the syntax shown below. If you think that your
+callback may be of use to the general community, add it to
 halo_callbacks.py and issue a pull request.
 
 An example of defining your own callback:
@@ -213,40 +214,84 @@
    # ...  Later on in your script
    hc.add_callback("my_callback")
 
+Recipes
+^^^^^^^
+
+Recipes allow you to create analysis tasks that consist of a series of
+callbacks, quantities, and filters that are run in succession.  An example
+of this is
+:func:`~yt.analysis_modules.halo_analysis.halo_recipes.calculate_virial_quantities`,
+which calculates virial quantities by first creating a sphere container,
+performing 1D radial profiles, and then interpolating to get values at a
+specified threshold overdensity.  All of these operations are separate
+callbacks, but the recipes allow you to add them to your analysis pipeline
+with one call.  For example,
+
+.. code-block:: python
+
+   hc.add_recipe("calculate_virial_quantities", ["radius", "matter_mass"])
+
+The available recipes are located in
+``yt/analysis_modules/halo_analysis/halo_recipes.py``.  New recipes can be
+created in the following manner:
+
+.. code-block:: python
+
+   def my_recipe(halo_catalog, fields, weight_field=None):
+       # create a sphere
+       halo_catalog.add_callback("sphere")
+       # make profiles
+       halo_catalog.add_callback("profile", ["radius"], fields,
+                                 weight_field=weight_field)
+       # save the profile data
+       halo_catalog.add_callback("save_profiles", output_dir="profiles")
+
+   # add recipe to the registry of recipes
+   add_recipe("profile_and_save", my_recipe)
+
+
+   # ...  Later on in your script
+   hc.add_recipe("profile_and_save", ["density", "temperature"],
+                 weight_field="cell_mass")
+
+Note, that unlike callback, filter, and quantity functions that take a ``Halo``
+object as the first argument, recipe functions should take a ``HaloCatalog``
+object as the first argument.
+
 Running Analysis
 ----------------
 
-After all callbacks, quantities, and filters have been added, the 
+After all callbacks, quantities, and filters have been added, the
 analysis begins with a call to HaloCatalog.create.
 
 .. code-block:: python
 
    hc.create()
 
-The save_halos keyword determines whether the actual Halo objects 
-are saved after analysis on them has completed or whether just the 
-contents of their quantities dicts will be retained for creating the 
-final catalog. The looping over halos uses a call to parallel_objects 
-allowing the user to control how many processors work on each halo. 
-The final catalog is written to disk in the output directory given 
-when the 
-:class:`~yt.analysis_modules.halo_analysis.halo_catalog.HaloCatalog` 
+The save_halos keyword determines whether the actual Halo objects
+are saved after analysis on them has completed or whether just the
+contents of their quantities dicts will be retained for creating the
+final catalog. The looping over halos uses a call to parallel_objects
+allowing the user to control how many processors work on each halo.
+The final catalog is written to disk in the output directory given
+when the
+:class:`~yt.analysis_modules.halo_analysis.halo_catalog.HaloCatalog`
 object was created.
 
-All callbacks, quantities, and filters are stored in an actions list, 
-meaning that they are executed in the same order in which they were added. 
-This enables the use of simple, reusable, single action callbacks that 
-depend on each other. This also prevents unecessary computation by allowing 
-the user to add filters at multiple stages to skip remaining analysis if it 
+All callbacks, quantities, and filters are stored in an actions list,
+meaning that they are executed in the same order in which they were added.
+This enables the use of simple, reusable, single action callbacks that
+depend on each other. This also prevents unnecessary computation by allowing
+the user to add filters at multiple stages to skip remaining analysis if it
 is not warranted.
 
 Saving and Reloading Halo Catalogs
 ----------------------------------
 
-A :class:`~yt.analysis_modules.halo_analysis.halo_catalog.HaloCatalog` 
-saved to disk can be reloaded as a yt dataset with the 
-standard call to load. Any side data, such as profiles, can be reloaded 
-with a ``load_profiles`` callback and a call to 
+A :class:`~yt.analysis_modules.halo_analysis.halo_catalog.HaloCatalog`
+saved to disk can be reloaded as a yt dataset with the
+standard call to load. Any side data, such as profiles, can be reloaded
+with a ``load_profiles`` callback and a call to
 :func:`~yt.analysis_modules.halo_analysis.halo_catalog.HaloCatalog.load`.
 
 .. code-block:: python
@@ -261,5 +306,5 @@
 Worked Example of Halo Catalog in Action
 ----------------------------------------
 
-For a full example of how to use these methods together see 
+For a full example of how to use these methods together see
 :ref:`halo-analysis-example`.

diff -r 25b77f284f8b48cf271c48aae3ba8b34d100489e -r dfda0ce6531cadeee95b16b820c3a6203f9eaf42 doc/source/analyzing/analysis_modules/halo_finders.rst
--- a/doc/source/analyzing/analysis_modules/halo_finders.rst
+++ b/doc/source/analyzing/analysis_modules/halo_finders.rst
@@ -3,16 +3,16 @@
 Halo Finding
 ============
 
-There are three methods of finding particle haloes in yt. The 
-default method is called HOP, a method described 
-in `Eisenstein and Hut (1998) 
-<http://adsabs.harvard.edu/abs/1998ApJ...498..137E>`_. A basic 
-friends-of-friends (e.g. `Efstathiou et al. (1985) 
-<http://adsabs.harvard.edu/abs/1985ApJS...57..241E>`_) halo 
-finder is also implemented. Finally Rockstar (`Behroozi et a. 
-(2011) <http://adsabs.harvard.edu/abs/2011arXiv1110.4372B>`_) is 
-a 6D-phase space halo finder developed by Peter Behroozi that 
-excels in finding subhalos and substrcture, but does not allow 
+There are three methods of finding particle haloes in yt. The
+default method is called HOP, a method described
+in `Eisenstein and Hut (1998)
+<http://adsabs.harvard.edu/abs/1998ApJ...498..137E>`_. A basic
+friends-of-friends (e.g. `Efstathiou et al. (1985)
+<http://adsabs.harvard.edu/abs/1985ApJS...57..241E>`_) halo
+finder is also implemented. Finally Rockstar (`Behroozi et a.
+(2011) <http://adsabs.harvard.edu/abs/2011arXiv1110.4372B>`_) is
+a 6D-phase space halo finder developed by Peter Behroozi that
+excels in finding subhalos and substrcture, but does not allow
 multiple particle masses.
 
 .. _hop:
@@ -20,32 +20,32 @@
 HOP
 ---
 
-The version of HOP used in yt is an upgraded version of the 
-`publicly available HOP code 
-<http://cmb.as.arizona.edu/~eisenste/hop/hop.html>`_. Support 
-for 64-bit floats and integers has been added, as well as 
-parallel analysis through spatial decomposition. HOP builds 
+The version of HOP used in yt is an upgraded version of the
+`publicly available HOP code
+<http://cmb.as.arizona.edu/~eisenste/hop/hop.html>`_. Support
+for 64-bit floats and integers has been added, as well as
+parallel analysis through spatial decomposition. HOP builds
 groups in this fashion:
 
-#. Estimates the local density at each particle using a 
+#. Estimates the local density at each particle using a
    smoothing kernel.
 
-#. Builds chains of linked particles by 'hopping' from one 
-   particle to its densest neighbor. A particle which is 
+#. Builds chains of linked particles by 'hopping' from one
+   particle to its densest neighbor. A particle which is
    its own densest neighbor is the end of the chain.
 
-#. All chains that share the same densest particle are 
+#. All chains that share the same densest particle are
    grouped together.
 
-#. Groups are included, linked together, or discarded 
+#. Groups are included, linked together, or discarded
    depending on the user-supplied over density
    threshold parameter. The default is 160.0.
 
 Please see the `HOP method paper 
 <http://adsabs.harvard.edu/abs/1998ApJ...498..137E>`_ for 
 full details and the 
-:class:`~yt.analysis_modules.halo_finding.halo_objects.HOPHalo` and
-:class:`~yt.analysis_modules.halo_finding.halo_objects.Halo` classes.
+:class:`~yt.analysis_modules.halo_finding.halo_objects.HOPHaloFinder`
+documentation.
 
 .. _fof:
 
@@ -53,36 +53,36 @@
 ---
 
 A basic friends-of-friends halo finder is included.  See the
-:class:`~yt.analysis_modules.halo_finding.halo_objects.FOFHalo` and
-:class:`~yt.analysis_modules.halo_finding.halo_objects.Halo` classes.
+:class:`~yt.analysis_modules.halo_finding.halo_objects.FOFHaloFinder`
+documentation.
 
 .. _rockstar:
 
 Rockstar Halo Finding
 ---------------------
 
-Rockstar uses an adaptive hierarchical refinement of friends-of-friends 
-groups in six phase-space dimensions and one time dimension, which 
+Rockstar uses an adaptive hierarchical refinement of friends-of-friends
+groups in six phase-space dimensions and one time dimension, which
 allows for robust (grid-independent, shape-independent, and noise-
-resilient) tracking of substructure. The code is prepackaged with yt, 
-but also `separately available <https://bitbucket.org/gfcstanford/rockstar>`_. The lead 
+resilient) tracking of substructure. The code is prepackaged with yt,
+but also `separately available <https://bitbucket.org/gfcstanford/rockstar>`_. The lead
 developer is Peter Behroozi, and the methods are described in `Behroozi
-et al. 2011 <http://arxiv.org/abs/1110.4372>`_. 
-In order to run the Rockstar halo finder in yt, make sure you've 
+et al. 2011 <http://arxiv.org/abs/1110.4372>`_.
+In order to run the Rockstar halo finder in yt, make sure you've
 :ref:`installed it so that it can integrate with yt <rockstar-installation>`.
 
-At the moment, Rockstar does not support multiple particle masses, 
-instead using a fixed particle mass. This will not affect most dark matter 
+At the moment, Rockstar does not support multiple particle masses,
+instead using a fixed particle mass. This will not affect most dark matter
 simulations, but does make it less useful for finding halos from the stellar
-mass. In simulations where the highest-resolution particles all have the 
+mass. In simulations where the highest-resolution particles all have the
 same mass (ie: zoom-in grid based simulations), one can set up a particle
 filter to select the lowest mass particles and perform the halo finding
-only on those.  See the this cookbook recipe for an example: 
+only on those.  See the this cookbook recipe for an example:
 :ref:`cookbook-rockstar-nested-grid`.
 
-To run the Rockstar Halo finding, you must launch python with MPI and 
-parallelization enabled. While Rockstar itself does not require MPI to run, 
-the MPI libraries allow yt to distribute particle information across multiple 
+To run the Rockstar Halo finding, you must launch python with MPI and
+parallelization enabled. While Rockstar itself does not require MPI to run,
+the MPI libraries allow yt to distribute particle information across multiple
 nodes.
 
 .. warning:: At the moment, running Rockstar inside of yt on multiple compute nodes
@@ -92,23 +92,23 @@
    For example, here is how Rockstar might be called using 24 cores:
    ``mpirun -n 24 --mca btl ^openib python ./run_rockstar.py --parallel``.
 
-The script above configures the Halo finder, launches a server process which 
-disseminates run information and coordinates writer-reader processes. 
-Afterwards, it launches reader and writer tasks, filling the available MPI 
-slots, which alternately read particle information and analyze for halo 
+The script above configures the Halo finder, launches a server process which
+disseminates run information and coordinates writer-reader processes.
+Afterwards, it launches reader and writer tasks, filling the available MPI
+slots, which alternately read particle information and analyze for halo
 content.
 
-The RockstarHaloFinder class has these options that can be supplied to the 
+The RockstarHaloFinder class has these options that can be supplied to the
 halo catalog through the ``finder_kwargs`` argument:
 
-* ``dm_type``, the index of the dark matter particle. Default is 1. 
+* ``dm_type``, the index of the dark matter particle. Default is 1.
 * ``outbase``, This is where the out*list files that Rockstar makes should be
   placed. Default is 'rockstar_halos'.
-* ``num_readers``, the number of reader tasks (which are idle most of the 
+* ``num_readers``, the number of reader tasks (which are idle most of the
   time.) Default is 1.
 * ``num_writers``, the number of writer tasks (which are fed particles and
-  do most of the analysis). Default is MPI_TASKS-num_readers-1. 
-  If left undefined, the above options are automatically 
+  do most of the analysis). Default is MPI_TASKS-num_readers-1.
+  If left undefined, the above options are automatically
   configured from the number of available MPI tasks.
 * ``force_res``, the resolution that Rockstar uses for various calculations
   and smoothing lengths. This is in units of Mpc/h.
@@ -130,14 +130,14 @@
   this option can save disk access time if there are no star particles
   (or other non-dark matter particles) in the simulation. Default: ``False``.
 
-Rockstar dumps halo information in a series of text (halo*list and 
-out*list) and binary (halo*bin) files inside the ``outbase`` directory. 
-We use the halo list classes to recover the information. 
+Rockstar dumps halo information in a series of text (halo*list and
+out*list) and binary (halo*bin) files inside the ``outbase`` directory.
+We use the halo list classes to recover the information.
 
 Inside the ``outbase`` directory there is a text file named ``datasets.txt``
 that records the connection between ds names and the Rockstar file names.
 
-For more information, see the 
+For more information, see the
 :class:`~yt.analysis_modules.halo_finding.halo_objects.RockstarHalo` and
 :class:`~yt.analysis_modules.halo_finding.halo_objects.Halo` classes.
 
@@ -146,9 +146,9 @@
 Parallel HOP and FOF
 --------------------
 
-Both the HOP and FoF halo finders can run in parallel using simple 
-spatial decomposition. In order to run them in parallel it is helpful 
-to understand how it works. Below in the first plot (i) is a simplified 
+Both the HOP and FoF halo finders can run in parallel using simple
+spatial decomposition. In order to run them in parallel it is helpful
+to understand how it works. Below in the first plot (i) is a simplified
 depiction of three haloes labeled 1,2 and 3:
 
 .. image:: _images/ParallelHaloFinder.png
@@ -156,35 +156,35 @@
 
 Halo 3 is twice reflected around the periodic boundary conditions.
 
-In (ii), the volume has been sub-divided into four equal subregions, 
-A,B,C and D, shown with dotted lines. Notice that halo 2 is now in 
-two different subregions, C and D, and that halo 3 is now in three, 
+In (ii), the volume has been sub-divided into four equal subregions,
+A,B,C and D, shown with dotted lines. Notice that halo 2 is now in
+two different subregions, C and D, and that halo 3 is now in three,
 A, B and D. If the halo finder is run on these four separate subregions,
-halo 1 is be identified as a single halo, but haloes 2 and 3 are split 
-up into multiple haloes, which is incorrect. The solution is to give 
+halo 1 is be identified as a single halo, but haloes 2 and 3 are split
+up into multiple haloes, which is incorrect. The solution is to give
 each subregion padding to oversample into neighboring regions.
 
-In (iii), subregion C has oversampled into the other three regions, 
-with the periodic boundary conditions taken into account, shown by 
+In (iii), subregion C has oversampled into the other three regions,
+with the periodic boundary conditions taken into account, shown by
 dot-dashed lines. The other subregions oversample in a similar way.
 
-The halo finder is then run on each padded subregion independently 
-and simultaneously. By oversampling like this, haloes 2 and 3 will 
-both be enclosed fully in at least one subregion and identified 
+The halo finder is then run on each padded subregion independently
+and simultaneously. By oversampling like this, haloes 2 and 3 will
+both be enclosed fully in at least one subregion and identified
 completely.
 
-Haloes identified with centers of mass inside the padded part of a 
-subregion are thrown out, eliminating the problem of halo duplication. 
+Haloes identified with centers of mass inside the padded part of a
+subregion are thrown out, eliminating the problem of halo duplication.
 The centers for the three haloes are shown with stars. Halo 1 will
 belong to subregion A, 2 to C and 3 to B.
 
-To run with parallel halo finding, you must supply a value for 
-padding in the finder_kwargs argument. The ``padding`` parameter 
-is in simulation units and defaults to 0.02. This parameter is how 
-much padding is added to each of the six sides of a subregion. 
-This value should be 2x-3x larger than the largest expected halo 
-in the simulation. It is unlikely, of course, that the largest 
-object in the simulation will be on a subregion boundary, but there 
+To run with parallel halo finding, you must supply a value for
+padding in the finder_kwargs argument. The ``padding`` parameter
+is in simulation units and defaults to 0.02. This parameter is how
+much padding is added to each of the six sides of a subregion.
+This value should be 2x-3x larger than the largest expected halo
+in the simulation. It is unlikely, of course, that the largest
+object in the simulation will be on a subregion boundary, but there
 is no way of knowing before the halo finder is run.
 
 .. code-block:: python
@@ -197,10 +197,10 @@
   # --or--
   hc = HaloCatalog(data_ds = ds, finder_method = 'fof', finder_kwargs={'padding':0.02})
 
-In general, a little bit of padding goes a long way, and too much 
-just slows down the analysis and doesn't improve the answer (but 
-doesn't change it).  It may be worth your time to run the parallel 
-halo finder at a few paddings to find the right amount, especially 
+In general, a little bit of padding goes a long way, and too much
+just slows down the analysis and doesn't improve the answer (but
+doesn't change it).  It may be worth your time to run the parallel
+halo finder at a few paddings to find the right amount, especially
 if you're analyzing many similar datasets.
 
 .. _rockstar-installation:
@@ -209,15 +209,15 @@
 ---------------------
 
 Because of changes in the Rockstar API over time, yt only currently works with
-a slightly older version of Rockstar.  This version of Rockstar has been 
-slightly patched and modified to run as a library inside of yt. By default it 
-is not installed with yt, but installation is very easy.  The 
-:ref:`install-script` used to install yt from source has a line: 
+a slightly older version of Rockstar.  This version of Rockstar has been
+slightly patched and modified to run as a library inside of yt. By default it
+is not installed with yt, but installation is very easy.  The
+:ref:`install-script` used to install yt from source has a line:
 ``INST_ROCKSTAR=0`` that must be changed to ``INST_ROCKSTAR=1``.  You can
 rerun this installer script over the top of an existing installation, and
-it will only install components missing from the existing installation.  
+it will only install components missing from the existing installation.
 You can do this as follows.  Put your freshly modified install_script in
-the parent directory of the yt installation directory (e.g. the parent of 
+the parent directory of the yt installation directory (e.g. the parent of
 ``$YT_DEST``, ``yt-x86_64``, ``yt-i386``, etc.), and rerun the installer:
 
 .. code-block:: bash

diff -r 25b77f284f8b48cf271c48aae3ba8b34d100489e -r dfda0ce6531cadeee95b16b820c3a6203f9eaf42 doc/source/analyzing/analysis_modules/halo_mass_function.rst
--- a/doc/source/analyzing/analysis_modules/halo_mass_function.rst
+++ b/doc/source/analyzing/analysis_modules/halo_mass_function.rst
@@ -11,21 +11,21 @@
 General Overview
 ----------------
 
-A halo mass function can be created for the halos identified in a cosmological 
+A halo mass function can be created for the halos identified in a cosmological
 simulation, as well as analytic fits using any arbitrary set of cosmological
-paramters. In order to create a mass function for simulated halos, they must
-first be identified (using HOP, FOF, or Rockstar, see 
+parameters. In order to create a mass function for simulated halos, they must
+first be identified (using HOP, FOF, or Rockstar, see
 :ref:`halo_catalog`) and loaded as a halo dataset object. The distribution of
 halo masses will then be found, and can be compared to the analytic prediction
 at the same redshift and using the same cosmological parameters as were used
 in the simulation. Care should be taken in this regard, as the analytic fit
-requires the specification of cosmological parameters that are not necessarily 
+requires the specification of cosmological parameters that are not necessarily
 stored in the halo or simulation datasets, and must be specified by the user.
-Efforts have been made to set reasonable defaults for these parameters, but 
+Efforts have been made to set reasonable defaults for these parameters, but
 setting them to identically match those used in the simulation will produce a
 much better comparison.
 
-Analytic halo mass functions can also be created without a halo dataset by 
+Analytic halo mass functions can also be created without a halo dataset by
 providing either a simulation dataset or specifying cosmological parameters by
 hand. yt includes 5 analytic fits for the halo mass function which can be
 selected.
@@ -65,8 +65,8 @@
 
 This will create a HaloMassFcn object off of which arrays holding the information
 about the analytic mass function hang. Creating the halo mass function for a set
-of simulated halos requires only the loaded halo dataset to be passed as an 
-argument. This also creates the analytic mass function using all parameters that 
+of simulated halos requires only the loaded halo dataset to be passed as an
+argument. This also creates the analytic mass function using all parameters that
 can be extracted from the halo dataset, at the same redshift, spanning a similar
 range of halo masses.
 
@@ -78,7 +78,7 @@
   my_halos = load("rockstar_halos/halos_0.0.bin")
   hmf = HaloMassFcn(halos_ds=my_halos)
 
-A simulation dataset can be passed along with additonal cosmological parameters 
+A simulation dataset can be passed along with additional cosmological parameters
 to create an analytic mass function.
 
 .. code-block:: python
@@ -87,10 +87,10 @@
   from yt.analysis_modules.halo_mass_function.api import *
 
   my_ds = load("RD0027/RedshiftOutput0027")
-  hmf = HaloMassFcn(simulation_ds=my_ds, omega_baryon0=0.05, primordial_index=0.96, 
+  hmf = HaloMassFcn(simulation_ds=my_ds, omega_baryon0=0.05, primordial_index=0.96,
                     sigma8 = 0.8, log_mass_min=5, log_mass_max=9)
 
-The analytic mass function can be created for a set of arbitrary cosmological 
+The analytic mass function can be created for a set of arbitrary cosmological
 parameters without any dataset being passed as an argument.
 
 .. code-block:: python
@@ -98,7 +98,7 @@
   from yt.mods import *
   from yt.analysis_modules.halo_mass_function.api import *
 
-  hmf = HaloMassFcn(omega_baryon0=0.05, omega_matter0=0.27, 
+  hmf = HaloMassFcn(omega_baryon0=0.05, omega_matter0=0.27,
                     omega_lambda0=0.73, hubble0=0.7, this_redshift=10,
                     log_mass_min=5, log_mass_max=9, fitting_function=5)
 
@@ -106,99 +106,99 @@
 -----------------
 
 * **simulation_ds** (*Simulation dataset object*)
-  The loaded simulation dataset, used to set cosmological paramters.
+  The loaded simulation dataset, used to set cosmological parameters.
   Default : None.
 
 * **halos_ds** (*Halo dataset object*)
-  The halos from a simulation to be used for creation of the 
+  The halos from a simulation to be used for creation of the
   halo mass function in the simulation.
   Default : None.
 
 * **make_analytic** (*bool*)
-  Whether or not to calculate the analytic mass function to go with 
-  the simulated halo mass function.  Automatically set to true if a 
+  Whether or not to calculate the analytic mass function to go with
+  the simulated halo mass function.  Automatically set to true if a
   simulation dataset is provided.
   Default : True.
 
 * **omega_matter0** (*float*)
-  The fraction of the universe made up of matter (dark and baryonic). 
+  The fraction of the universe made up of matter (dark and baryonic).
   Default : 0.2726.
 
 * **omega_lambda0** (*float*)
-  The fraction of the universe made up of dark energy. 
+  The fraction of the universe made up of dark energy.
   Default : 0.7274.
 
 * **omega_baryon0**  (*float*)
-  The fraction of the universe made up of baryonic matter. This is not 
-  always stored in the datset and should be checked by hand.
+  The fraction of the universe made up of baryonic matter. This is not
+  always stored in the dataset and should be checked by hand.
   Default : 0.0456.
 
 * **hubble0** (*float*)
-  The expansion rate of the universe in units of 100 km/s/Mpc. 
+  The expansion rate of the universe in units of 100 km/s/Mpc.
   Default : 0.704.
 
 * **sigma8** (*float*)
-  The amplitude of the linear power spectrum at z=0 as specified by 
-  the rms amplitude of mass-fluctuations in a top-hat sphere of radius 
-  8 Mpc/h. This is not always stored in the datset and should be 
+  The amplitude of the linear power spectrum at z=0 as specified by
+  the rms amplitude of mass-fluctuations in a top-hat sphere of radius
+  8 Mpc/h. This is not always stored in the dataset and should be
   checked by hand.
   Default : 0.86.
 
 * **primoridal_index** (*float*)
-  This is the index of the mass power spectrum before modification by 
-  the transfer function. A value of 1 corresponds to the scale-free 
-  primordial spectrum. This is not always stored in the datset and 
+  This is the index of the mass power spectrum before modification by
+  the transfer function. A value of 1 corresponds to the scale-free
+  primordial spectrum. This is not always stored in the dataset and
   should be checked by hand.
   Default : 1.0.
 
 * **this_redshift** (*float*)
-  The current redshift. 
+  The current redshift.
   Default : 0.
 
 * **log_mass_min** (*float*)
   The log10 of the mass of the minimum of the halo mass range. This is
-  set automatically by the range of halo masses if a simulated halo 
+  set automatically by the range of halo masses if a simulated halo
   dataset is provided. If a halo dataset if not provided and no value
   is specified, it will be set to 5. Units: M_solar
   Default : None.
 
 * **log_mass_max** (*float*)
   The log10 of the mass of the maximum of the halo mass range. This is
-  set automatically by the range of halo masses if a simulated halo 
+  set automatically by the range of halo masses if a simulated halo
   dataset is provided. If a halo dataset if not provided and no value
   is specified, it will be set to 16. Units: M_solar
   Default : None.
 
 * **num_sigma_bins** (*float*)
-  The number of bins (points) to use for the calculation of the 
-  analytic mass function. 
+  The number of bins (points) to use for the calculation of the
+  analytic mass function.
   Default : 360.
 
 * **fitting_function** (*int*)
-  Which fitting function to use. 1 = Press-Schechter, 2 = Jenkins, 
+  Which fitting function to use. 1 = Press-Schechter, 2 = Jenkins,
   3 = Sheth-Tormen, 4 = Warren, 5 = Tinker
   Default : 4.
 
 Outputs
 -------
 
-A HaloMassFnc object has several arrays hanging off of it containing the 
+A HaloMassFnc object has several arrays hanging off of it containing the
 
 * **masses_sim**: Halo masses from simulated halos. Units: M_solar
 
-* **n_cumulative_sim**: Number density of halos with mass greater than the 
+* **n_cumulative_sim**: Number density of halos with mass greater than the
   corresponding mass in masses_sim. Units: comoving Mpc^-3
 
-* **masses_analytic**: Masses used for the generation of the analytic mass 
+* **masses_analytic**: Masses used for the generation of the analytic mass
   function. Units: M_solar
 
-* **n_cumulative_analytic**: Number density of halos with mass greater then 
+* **n_cumulative_analytic**: Number density of halos with mass greater then
   the corresponding mass in masses_analytic. Units: comoving Mpc^-3
 
 * **dndM_dM_analytic**: Differential number density of halos, (dn/dM)*dM.
 
 After the mass function has been created for both simulated halos and the
-corresponding analytic fits, they can be plotted though something along the 
+corresponding analytic fits, they can be plotted though something along the
 lines of
 
 .. code-block:: python
@@ -213,7 +213,7 @@
   plt.loglog(hmf.masses_sim, hmf.n_cumulative_sim)
   plt.loglog(hmf.masses_analytic, hmf.n_cumulative_analytic)
 
-Attached to ``hmf`` is the convenience function ``write_out``, which saves the 
+Attached to ``hmf`` is the convenience function ``write_out``, which saves the
 halo mass function to a text file. (continued from above)
 .. code-block:: python
 

This diff is so big that we needed to truncate the remainder.

https://bitbucket.org/yt_analysis/yt/commits/2276c4a8d702/
Changeset:   2276c4a8d702
Branch:      yt
User:        ngoldbaum
Date:        2016-03-31 23:42:00+00:00
Summary:     Fixing issues pointed out by andrew
Affected #:  1 file

diff -r dfda0ce6531cadeee95b16b820c3a6203f9eaf42 -r 2276c4a8d7020a1144c31c91c1dd278d5bfa46ce doc/install_script.sh
--- a/doc/install_script.sh
+++ b/doc/install_script.sh
@@ -505,7 +505,7 @@
 echo "be installing Mercurial"
 
 printf "%-18s = %s so I " "INST_UNSTRUCTURED" "${INST_UNSTRUCTURED}"
-get_willwont ${INST_HG}
+get_willwont ${INST_UNSTRUCTURED}
 echo "be installing unstructured mesh rendering"
 
 if [ $INST_CONDA -eq 0 ]
@@ -1353,9 +1353,9 @@
         echo "Installing pyembree from source"
         ( ${GETFILE} "$PYEMBREE_URL" 2>&1 ) 1>> ${LOG_FILE} || do_exit
         log_cmd unzip ${DEST_DIR}/src/master.zip
-        log_cmd pushd ${DEST_DIR}/src/pyembree-master
+        pushd ${DEST_DIR}/src/pyembree-master &< /dev/null
         log_cmd python setup.py install build_ext -I${DEST_DIR}/include -L${DEST_DIR}/lib
-        popd
+        popd &> /dev/null
     fi
 
     if [ $INST_PY3 -eq 1 ]
@@ -1376,9 +1376,9 @@
         then
             echo $DEST_DIR > ${YT_DIR}/embree.cfg
         fi
-        log_cmd pushd ${YT_DIR}
+        pushd ${YT_DIR} &> /dev/null
         log_cmd python setup.py develop
-        log_cmd popd
+        popd &> /dev/null
     fi
 
     echo


https://bitbucket.org/yt_analysis/yt/commits/9a8c4256510f/
Changeset:   9a8c4256510f
Branch:      yt
User:        ngoldbaum
Date:        2016-04-01 00:17:00+00:00
Summary:     Fix typo
Affected #:  1 file

diff -r 2276c4a8d7020a1144c31c91c1dd278d5bfa46ce -r 9a8c4256510f1c6168acefad92b8ed841505f986 doc/install_script.sh
--- a/doc/install_script.sh
+++ b/doc/install_script.sh
@@ -1353,7 +1353,7 @@
         echo "Installing pyembree from source"
         ( ${GETFILE} "$PYEMBREE_URL" 2>&1 ) 1>> ${LOG_FILE} || do_exit
         log_cmd unzip ${DEST_DIR}/src/master.zip
-        pushd ${DEST_DIR}/src/pyembree-master &< /dev/null
+        pushd ${DEST_DIR}/src/pyembree-master &> /dev/null
         log_cmd python setup.py install build_ext -I${DEST_DIR}/include -L${DEST_DIR}/lib
         popd &> /dev/null
     fi


https://bitbucket.org/yt_analysis/yt/commits/cb3892723a38/
Changeset:   cb3892723a38
Branch:      yt
User:        ngoldbaum
Date:        2016-04-01 19:47:14+00:00
Summary:     Update dependencies for source installation
Affected #:  1 file

diff -r 9a8c4256510f1c6168acefad92b8ed841505f986 -r cb3892723a38f10b307e93984963b8ed02a8c4e3 doc/install_script.sh
--- a/doc/install_script.sh
+++ b/doc/install_script.sh
@@ -745,33 +745,35 @@
     mkdir -p ${DEST_DIR}/src
     cd ${DEST_DIR}/src
 
-    PYTHON2='Python-2.7.9'
+    PYTHON2='Python-2.7.11'
     PYTHON3='Python-3.5.1'
-    CYTHON='Cython-0.22'
+    CYTHON='Cython-0.23.5'
     PYX='PyX-0.12.1'
     BZLIB='bzip2-1.0.6'
     FREETYPE_VER='freetype-2.4.12' 
     H5PY='h5py-2.5.0'
     HDF5='hdf5-1.8.14' 
     LAPACK='lapack-3.4.2'
-    PNG=libpng-1.6.3
-    MATPLOTLIB='matplotlib-1.4.3'
-    MERCURIAL='mercurial-3.7.2'
-    NOSE='nose-1.3.6'
-    NUMPY='numpy-1.9.2'
-    PYTHON_HGLIB='python-hglib-1.6'
+    PNG='libpng-1.6.3'
+    MATPLOTLIB='matplotlib-1.5.1'
+    MERCURIAL='mercurial-3.7.3'
+    NOSE='nose-1.3.7'
+    NUMPY='numpy-1.11.0'
+    PYTHON_HGLIB='python-hglib-2.0'
     ROCKSTAR='rockstar-0.99.6'
-    SCIPY='scipy-0.15.1'
+    SCIPY='scipy-0.17.0'
     SQLITE='sqlite-autoconf-3071700'
-    SYMPY='sympy-0.7.6'
+    SYMPY='sympy-1.0'
     ZLIB='zlib-1.2.8'
-    SETUPTOOLS='setuptools-18.0.1'
+    SETUPTOOLS='setuptools-20.6.7'
+    ASTROPY='astropy-1.1.2'
     
     # Now we dump all our SHA512 files out.
-    echo '856220fa579e272ac38dcef091760f527431ff3b98df9af6e68416fcf77d9659ac5abe5c7dee41331f359614637a4ff452033085335ee499830ed126ab584267  Cython-0.22.tar.gz' > Cython-0.22.tar.gz.sha512
+    echo '9052d74bbd0c93757fd916939cc3c39eb1aba6c9692b48887ae577256bec64b39b1fd25b6c751e6c8fe723de4c0ddf9a1a207de39f75b0839500dfcdde69f925  Cython-0.23.5.tar.gz' > Cython-0.23.5.tar.gz.sha512
     echo '4941f5aa21aff3743546495fb073c10d2657ff42b2aff401903498638093d0e31e344cce778980f28a7170c6d29eab72ac074277b9d4088376e8692dc71e55c1  PyX-0.12.1.tar.gz' > PyX-0.12.1.tar.gz.sha512
-    echo 'a42f28ed8e49f04cf89e2ea7434c5ecbc264e7188dcb79ab97f745adf664dd9ab57f9a913543731635f90859536244ac37dca9adf0fc2aa1b215ba884839d160  Python-2.7.9.tgz' > Python-2.7.9.tgz.sha512
+    echo 'f21df53da87e9e3c14599a34388976e7dd09b951dff3c4b978fe224beeff07e749c0059ffd94f68ca9b75ecaef142b285d579b8dfaad4eab85aca33957114937  Python-2.7.11.tgz' > Python-2.7.11.tgz.sha512
     echo '73f1477f3d3f5bd978c4ea1d1b679467b45e9fd2f443287b88c5c107a9ced580c56e0e8f33acea84e06b11a252e2a4e733120b721a9b6e1bb3d34493a3353bfb  Python-3.5.1.tgz' > Python-3.5.1.tgz.sha512
+    echo 'b83c4a1415a3eb8c016507705d0d2f22971e4da937bb97953eec08f8f856933d8fa76ce8c536122235b19e7879b16add2e20fd2fee3e488f9b2b4bf1b9f4dbdb  astropy-1.1.2.tar.gz' > astropy.1.1.2.tar.gz
     echo '276bd9c061ec9a27d478b33078a86f93164ee2da72210e12e2c9da71dcffeb64767e4460b93f257302b09328eda8655e93c4b9ae85e74472869afbeae35ca71e  blas.tar.gz' > blas.tar.gz.sha512
     echo '00ace5438cfa0c577e5f578d8a808613187eff5217c35164ffe044fbafdfec9e98f4192c02a7d67e01e5a5ccced630583ad1003c37697219b0f147343a3fdd12  bzip2-1.0.6.tar.gz' > bzip2-1.0.6.tar.gz.sha512
     echo '609a68a3675087e0cc95268574f31e104549daa48efe15a25a33b8e269a93b4bd160f4c3e8178dca9c950ef5ca514b039d6fd1b45db6af57f25342464d0429ce  freetype-2.4.12.tar.gz' > freetype-2.4.12.tar.gz.sha512
@@ -779,16 +781,17 @@
     echo '4073fba510ccadaba41db0939f909613c9cb52ba8fb6c1062fc9118edc601394c75e102310be1af4077d07c9b327e6bbb1a6359939a7268dc140382d0c1e0199  hdf5-1.8.14.tar.gz' > hdf5-1.8.14.tar.gz.sha512
     echo '8770214491e31f0a7a3efaade90eee7b0eb20a8a6ab635c5f854d78263f59a1849133c14ef5123d01023f0110cbb9fc6f818da053c01277914ae81473430a952  lapack-3.4.2.tar.gz' > lapack-3.4.2.tar.gz.sha512
     echo '887582e5a22e4cde338aa8fec7a89f6dd31f2f02b8842735f00f970f64582333fa03401cea6d01704083403c7e8b7ebc26655468ce930165673b33efa4bcd586  libpng-1.6.3.tar.gz' > libpng-1.6.3.tar.gz.sha512
-    echo '51b0f58b2618b47b653e17e4f6b6a1215d3a3b0f1331ce3555cc7435e365d9c75693f289ce12fe3bf8f69fd57b663e545f0f1c2c94e81eaa661cac0689e125f5  matplotlib-1.4.3.tar.gz' > matplotlib-1.4.3.tar.gz.sha512
-    echo '573b35052ec50f4672cdd5afd93080edc864deb854b890c8cedba53c40d6d3a37a560e364b94b819cfc6d9fc5c07c0411a13167f0888007e5c9366bc94154306  mercurial-3.7.2.tar.gz' > mercurial-3.7.2.tar.gz.sha512
-    echo 'd0cede08dc33a8ac0af0f18063e57f31b615f06e911edb5ca264575174d8f4adb4338448968c403811d9dcc60f38ade3164662d6c7b69b499f56f0984bb6283c  nose-1.3.6.tar.gz' > nose-1.3.6.tar.gz.sha512
-    echo '70470ebb9afef5dfd0c83ceb7a9d5f1b7a072b1a9b54b04f04f5ed50fbaedd5b4906bd500472268d478f94df9e749a88698b1ff30f2d80258e7f3fec040617d9  numpy-1.9.2.tar.gz' > numpy-1.9.2.tar.gz.sha512
-    echo 'bfd10455e74e30df568c4c4827140fb6cc29893b0e062ce1764bd52852ec7487a70a0f5ea53c3fca7886f5d36365c9f4db52b8c93cad35fb67beeb44a2d56f2d  python-hglib-1.6.tar.gz' > python-hglib-1.6.tar.gz.sha512
-    echo 'fff4412d850c431a1b4e6ee3b17958ee5ab3beb81e6cb8a8e7d56d368751eaa8781d7c3e69d932dc002d718fddc66a72098acfe74cfe29ec80b24e6736317275  scipy-0.15.1.tar.gz' > scipy-0.15.1.tar.gz.sha512
+    echo 'a0e78b5027a3a49cf8e77dc0d26f5f380dcd80f7b309b6121199acd5e1d94f48482864a9eee3bd397f7ac6f07fe1d3c21bf517217df3c72e8e3d105b7c2ae58e  matplotlib-1.5.1.tar.gz' > matplotlib-1.5.1.tar.gz.sha512
+    echo '7f9f97229e40c7092c16ccf227b19a08a9839d8ce19a9d057341fff75876bff32241ee9aa10eab293f779ea3e8a1d97577597187bd96251fb499cbb1075a82cf  mercurial-3.7.3.tar.gz' > mercurial-3.7.3.tar.gz.sha512
+    echo 'e65c914f621f8da06b9ab11a0ff2763d6e29b82ce2aaed56da0e3773dc899d9deb1f20015789d44c65a5dad7214520f5b659b3f8d7695fb207ad3f78e5cf1b62  nose-1.3.7.tar.gz' > nose-1.3.7.tar.gz.sha512
+    echo '92c1889397ad013e25da3a0657fc01e787d528fc19c29cc2acd286c3f07d41b984252583457b1b9259fc303afbe9694565cdcf5752eb4ecb950cc7a99ec1ad8b  numpy-1.11.0.tar.gz' > numpy-1.11.0.tar.gz.sha512
+    echo '647cc82424783efc3d74540e34976af66acc35fc36d66afba169508946cc62027910c7e41dc9d11ec88c15d6b1e113ce22c2781711ea324de58db3b24d5079c4  python-hglib-2.0.tar.gz' > python-hglib-2.0.tar.gz.sha512
+    echo 'de6409d75a3ff3cf1e5391d3b09126f0bc7e1a40a15f9bee244195638fe2f8481fca032896d8534623e6122ff59aaf669664e27ff89cf1b094a5ce7312f220b7  scipy-0.17.0.tar.gz' > scipy-0.17.0.tar.gz.sha512
     echo '96f3e51b46741450bc6b63779c10ebb4a7066860fe544385d64d1eda52592e376a589ef282ace2e1df73df61c10eab1a0d793abbdaf770e60289494d4bf3bcb4  sqlite-autoconf-3071700.tar.gz' > sqlite-autoconf-3071700.tar.gz.sha512
-    echo 'ce0f1a17ac01eb48aec31fc0ad431d9d7ed9907f0e8584a6d79d0ffe6864fe62e203fe3f2a3c3e4e3d485809750ce07507a6488e776a388a7a9a713110882fcf  sympy-0.7.6.tar.gz' > sympy-0.7.6.tar.gz.sha512
+    echo '977db6e9bc6a5918cceb255981a57e85e7060c0922aefd2968b004d25d704e25a5cb5bbe09eb387e8695581e23e2825d9c40310068fe25ece7e9c23037a21f39  sympy-1.0.tar.gz' > sympy-1.0.tar.gz.sha512
     echo 'ece209d4c7ec0cb58ede791444dc754e0d10811cbbdebe3df61c0fd9f9f9867c1c3ccd5f1827f847c005e24eef34fb5bf87b5d3f894d75da04f1797538290e4a  zlib-1.2.8.tar.gz' > zlib-1.2.8.tar.gz.sha512
-    echo '9b318ce2ee2cf787929dcb886d76c492b433e71024fda9452d8b4927652a298d6bd1bdb7a4c73883a98e100024f89b46ea8aa14b250f896e549e6dd7e10a6b41  setuptools-18.0.1.tar.gz' > setuptools-18.0.1.tar.gz.sha512
+    echo '91a212b5007f9fdfacb4341e06dc0355c5c29897eb8ea407dd4864091f845ba1417bb0d33b5ed6897869d0233e2d0ec6548898d3dbe9eda23f751829bd51a104  setuptools-20.6.7.tar.gz' > setuptools-20.6.7.tar.gz.sha512
+    echo 'b83c4a1415a3eb8c016507705d0d2f22971e4da937bb97953eec08f8f856933d8fa76ce8c536122235b19e7879b16add2e20fd2fee3e488f9b2b4bf1b9f4dbdb  astropy-1.1.2.tar.gz' > astropy-1.1.2.tar.gz
     # Individual processes
     [ -z "$HDF5_DIR" ] && get_ytproject $HDF5.tar.gz
     [ $INST_ZLIB -eq 1 ] && get_ytproject $ZLIB.tar.gz
@@ -804,6 +807,7 @@
     [ $INST_PY3 -eq 1 ] && get_ytproject $PYTHON3.tgz
     [ $INST_H5PY -eq 1 ] && get_ytproject $H5PY.tat.gz
     [ $INST_NOSE -eq 1 ] && get_ytproject $NOSE.tar.gz
+    [ $INST_ASTROPY -eq 1] && get_ytproject $ASTROPY.tar.gz
     get_ytproject $PYTHON2.tgz
     get_ytproject $NUMPY.tar.gz
     get_ytproject $MATPLOTLIB.tar.gz
@@ -1095,8 +1099,18 @@
     ( ${DEST_DIR}/bin/pip install "jupyter<2.0.0" 2>&1 ) 1>> ${LOG_FILE}
     
     do_setup_py $CYTHON
-    do_setup_py $H5PY
-    do_setup_py $NOSE
+    if [ $INST_H5PY -eq 1 ]
+    then
+        do_setup_py $H5PY
+    fi
+    if [ $INST_NOSE -eq 1 ]
+    then
+        do_setup_py $NOSE
+    fi
+    if [ $INST_ASTROPY -eq 1]
+    then
+        do_setup_py $ASTROPY
+    fi
     do_setup_py $PYTHON_HGLIB
     do_setup_py $SYMPY
     [ $INST_PYX -eq 1 ] && do_setup_py $PYX


https://bitbucket.org/yt_analysis/yt/commits/6efc04faa058/
Changeset:   6efc04faa058
Branch:      yt
User:        ngoldbaum
Date:        2016-04-13 14:56:47+00:00
Summary:     fix bash syntax errors
Affected #:  1 file

diff -r cb3892723a38f10b307e93984963b8ed02a8c4e3 -r 6efc04faa058a4f432f706a28e846c49339c1ed0 doc/install_script.sh
--- a/doc/install_script.sh
+++ b/doc/install_script.sh
@@ -460,7 +460,7 @@
 
 if [ $INST_UNSTRUCTURED -ne 0 ]
 then
-    if [$INST_YT_SOURCE -eq 0 ]
+    if [ $INST_YT_SOURCE -eq 0 ]
     then
         echo "yt must be compiled from source to install support for"
         echo "unstructured mesh rendering. Please set INST_YT_SOURCE to 1"
@@ -807,7 +807,7 @@
     [ $INST_PY3 -eq 1 ] && get_ytproject $PYTHON3.tgz
     [ $INST_H5PY -eq 1 ] && get_ytproject $H5PY.tat.gz
     [ $INST_NOSE -eq 1 ] && get_ytproject $NOSE.tar.gz
-    [ $INST_ASTROPY -eq 1] && get_ytproject $ASTROPY.tar.gz
+    [ $INST_ASTROPY -eq 1 ] && get_ytproject $ASTROPY.tar.gz
     get_ytproject $PYTHON2.tgz
     get_ytproject $NUMPY.tar.gz
     get_ytproject $MATPLOTLIB.tar.gz
@@ -1107,7 +1107,7 @@
     then
         do_setup_py $NOSE
     fi
-    if [ $INST_ASTROPY -eq 1]
+    if [ $INST_ASTROPY -eq 1 ]
     then
         do_setup_py $ASTROPY
     fi


https://bitbucket.org/yt_analysis/yt/commits/527b865a566f/
Changeset:   527b865a566f
Branch:      yt
User:        ngoldbaum
Date:        2016-04-13 14:57:20+00:00
Summary:     Correct typo in astropy sha512 hash section
Affected #:  1 file

diff -r 6efc04faa058a4f432f706a28e846c49339c1ed0 -r 527b865a566f3d72cb4f4bfca6141a08ab1ae94d doc/install_script.sh
--- a/doc/install_script.sh
+++ b/doc/install_script.sh
@@ -773,7 +773,7 @@
     echo '4941f5aa21aff3743546495fb073c10d2657ff42b2aff401903498638093d0e31e344cce778980f28a7170c6d29eab72ac074277b9d4088376e8692dc71e55c1  PyX-0.12.1.tar.gz' > PyX-0.12.1.tar.gz.sha512
     echo 'f21df53da87e9e3c14599a34388976e7dd09b951dff3c4b978fe224beeff07e749c0059ffd94f68ca9b75ecaef142b285d579b8dfaad4eab85aca33957114937  Python-2.7.11.tgz' > Python-2.7.11.tgz.sha512
     echo '73f1477f3d3f5bd978c4ea1d1b679467b45e9fd2f443287b88c5c107a9ced580c56e0e8f33acea84e06b11a252e2a4e733120b721a9b6e1bb3d34493a3353bfb  Python-3.5.1.tgz' > Python-3.5.1.tgz.sha512
-    echo 'b83c4a1415a3eb8c016507705d0d2f22971e4da937bb97953eec08f8f856933d8fa76ce8c536122235b19e7879b16add2e20fd2fee3e488f9b2b4bf1b9f4dbdb  astropy-1.1.2.tar.gz' > astropy.1.1.2.tar.gz
+    echo 'b83c4a1415a3eb8c016507705d0d2f22971e4da937bb97953eec08f8f856933d8fa76ce8c536122235b19e7879b16add2e20fd2fee3e488f9b2b4bf1b9f4dbdb  astropy-1.1.2.tar.gz' > astropy-1.1.2.tar.gz.sha512
     echo '276bd9c061ec9a27d478b33078a86f93164ee2da72210e12e2c9da71dcffeb64767e4460b93f257302b09328eda8655e93c4b9ae85e74472869afbeae35ca71e  blas.tar.gz' > blas.tar.gz.sha512
     echo '00ace5438cfa0c577e5f578d8a808613187eff5217c35164ffe044fbafdfec9e98f4192c02a7d67e01e5a5ccced630583ad1003c37697219b0f147343a3fdd12  bzip2-1.0.6.tar.gz' > bzip2-1.0.6.tar.gz.sha512
     echo '609a68a3675087e0cc95268574f31e104549daa48efe15a25a33b8e269a93b4bd160f4c3e8178dca9c950ef5ca514b039d6fd1b45db6af57f25342464d0429ce  freetype-2.4.12.tar.gz' > freetype-2.4.12.tar.gz.sha512


https://bitbucket.org/yt_analysis/yt/commits/52ee485c280b/
Changeset:   52ee485c280b
Branch:      yt
User:        ngoldbaum
Date:        2016-04-13 14:57:52+00:00
Summary:     remove duplicate astropy hash
Affected #:  1 file

diff -r 527b865a566f3d72cb4f4bfca6141a08ab1ae94d -r 52ee485c280b95446823e0ba67fc60ae0e96d411 doc/install_script.sh
--- a/doc/install_script.sh
+++ b/doc/install_script.sh
@@ -791,7 +791,6 @@
     echo '977db6e9bc6a5918cceb255981a57e85e7060c0922aefd2968b004d25d704e25a5cb5bbe09eb387e8695581e23e2825d9c40310068fe25ece7e9c23037a21f39  sympy-1.0.tar.gz' > sympy-1.0.tar.gz.sha512
     echo 'ece209d4c7ec0cb58ede791444dc754e0d10811cbbdebe3df61c0fd9f9f9867c1c3ccd5f1827f847c005e24eef34fb5bf87b5d3f894d75da04f1797538290e4a  zlib-1.2.8.tar.gz' > zlib-1.2.8.tar.gz.sha512
     echo '91a212b5007f9fdfacb4341e06dc0355c5c29897eb8ea407dd4864091f845ba1417bb0d33b5ed6897869d0233e2d0ec6548898d3dbe9eda23f751829bd51a104  setuptools-20.6.7.tar.gz' > setuptools-20.6.7.tar.gz.sha512
-    echo 'b83c4a1415a3eb8c016507705d0d2f22971e4da937bb97953eec08f8f856933d8fa76ce8c536122235b19e7879b16add2e20fd2fee3e488f9b2b4bf1b9f4dbdb  astropy-1.1.2.tar.gz' > astropy-1.1.2.tar.gz
     # Individual processes
     [ -z "$HDF5_DIR" ] && get_ytproject $HDF5.tar.gz
     [ $INST_ZLIB -eq 1 ] && get_ytproject $ZLIB.tar.gz


https://bitbucket.org/yt_analysis/yt/commits/913bd45e7f27/
Changeset:   913bd45e7f27
Branch:      yt
User:        ngoldbaum
Date:        2016-04-13 14:58:20+00:00
Summary:     Correct typo in h5py tarball name
Affected #:  1 file

diff -r 52ee485c280b95446823e0ba67fc60ae0e96d411 -r 913bd45e7f2719f2a22c959f78d6508c712da90f doc/install_script.sh
--- a/doc/install_script.sh
+++ b/doc/install_script.sh
@@ -804,7 +804,7 @@
     [ $INST_SCIPY -eq 1 ] && get_ytproject $LAPACK.tar.gz
     [ $INST_HG -eq 1 ] && get_ytproject $MERCURIAL.tar.gz
     [ $INST_PY3 -eq 1 ] && get_ytproject $PYTHON3.tgz
-    [ $INST_H5PY -eq 1 ] && get_ytproject $H5PY.tat.gz
+    [ $INST_H5PY -eq 1 ] && get_ytproject $H5PY.tar.gz
     [ $INST_NOSE -eq 1 ] && get_ytproject $NOSE.tar.gz
     [ $INST_ASTROPY -eq 1 ] && get_ytproject $ASTROPY.tar.gz
     get_ytproject $PYTHON2.tgz


https://bitbucket.org/yt_analysis/yt/commits/f5616a3ff884/
Changeset:   f5616a3ff884
Branch:      yt
User:        ngoldbaum
Date:        2016-04-13 14:58:46+00:00
Summary:     Make it possible to install rockstar under conda
Affected #:  1 file

diff -r 913bd45e7f2719f2a22c959f78d6508c712da90f -r f5616a3ff8842e92d662fb276a19bf1fea297d29 doc/install_script.sh
--- a/doc/install_script.sh
+++ b/doc/install_script.sh
@@ -483,6 +483,17 @@
     PYEMBREE_URL="https://github.com/scopatz/pyembree/archive/master.zip"
 fi
 
+if [ $INST_ROCKSTAR -ne 0 ]
+then
+    if [ $INST_YT_SOURCE -eq 0 ]
+    then
+        echo "yt must be compiled from source to install support for"
+        echo "the rockstar halo finder. Please set INST_YT_SOURCE to 1"
+        echo "and re-run the install script"
+        exit 1
+    fi
+fi
+
 echo
 echo
 echo "========================================================================"
@@ -1319,11 +1330,6 @@
     then
         YT_DEPS+=('astropy')
     fi
-    if [ $INST_ROCKSTAR -ne 0 ]
-    then
-        echo "Installing with rockstar and conda hasn't been implemented yet"
-        exit 1
-    fi
     YT_DEPS+=('conda-build')
     if [ $INST_PY3 -eq 0 ]
     then
@@ -1371,6 +1377,43 @@
         popd &> /dev/null
     fi
 
+    if [ $INST_ROCKSTAR -eq 1 ]
+    then
+        if [ ! -d ${DEST_DIR}/src ]
+        then
+            mkdir ${DEST_DIR}/src
+        fi
+        cd ${DEST_DIR}/src
+        if [ ! -e rockstar/done ]
+        then
+            echo "Building Rockstar"
+            if [ ! -e rockstar ]
+            then
+                ( hg clone http://bitbucket.org/MatthewTurk/rockstar 2>&1 ) 1>> ${LOG_FILE}
+            fi
+            cd rockstar
+            ( hg pull 2>&1 ) 1>> ${LOG_FILE}
+            ( hg up -C tip 2>&1 ) 1>> ${LOG_FILE}
+            ( make lib 2>&1 ) 1>> ${LOG_FILE} || do_exit
+            cp librockstar.so ${DEST_DIR}/lib
+            ROCKSTAR_DIR=${DEST_DIR}/src/rockstar
+            echo $ROCKSTAR_DIR > ${YT_DIR}/rockstar.cfg
+            touch done
+            cd ..
+        fi
+    fi
+
+    # conda doesn't package pyx, so we install manually with pip
+    if [ $INST_PYX -eq 1 ]
+    then
+        if [ $INST_PY3 -eq 1 ]
+        then
+            log_cmd pip install pyx
+        else
+            log_cmd pip install pyx==0.12.1
+        fi
+    fi
+
     if [ $INST_PY3 -eq 1 ]
     then
         log_cmd conda create -y -n py27 python=2.7 mercurial


https://bitbucket.org/yt_analysis/yt/commits/9a2d5e0ad37c/
Changeset:   9a2d5e0ad37c
Branch:      yt
User:        ngoldbaum
Date:        2016-04-13 14:59:06+00:00
Summary:     Fail when trying to install unstructured mesh rendering support with INST_CONDA=0
Affected #:  1 file

diff -r f5616a3ff8842e92d662fb276a19bf1fea297d29 -r 9a2d5e0ad37cc42c2bcad3f62146de499e62eed3 doc/install_script.sh
--- a/doc/install_script.sh
+++ b/doc/install_script.sh
@@ -467,6 +467,12 @@
         echo "and re-run the install script."
         exit 1
     fi
+    if [ $INST_CONDA -eq 0 ]
+    then
+        echo "unstructured mesh rendering support has not yet been implemented"
+        echo "for INST_CONDA=0."
+        exit 1
+    fi
     if [ `uname` = "Darwin" ]
     then
         EMBREE="embree-2.8.0.x86_64.macosx"


https://bitbucket.org/yt_analysis/yt/commits/0ccd0eda96b1/
Changeset:   0ccd0eda96b1
Branch:      yt
User:        ngoldbaum
Date:        2016-04-13 14:59:30+00:00
Summary:     Add a --yes option to skip interactive part of install script
Affected #:  1 file

diff -r 9a2d5e0ad37cc42c2bcad3f62146de499e62eed3 -r 0ccd0eda96b1d01e5e4a30faef422662dcd3d79f doc/install_script.sh
--- a/doc/install_script.sh
+++ b/doc/install_script.sh
@@ -606,7 +606,10 @@
 fi
 echo "========================================================================"
 echo
-read -p "[hit enter] "
+if [[ $1 != "--yes" ]]
+then
+    read -p "[hit enter] "
+fi
 echo
 echo "Awesome!  Here we go."
 echo


https://bitbucket.org/yt_analysis/yt/commits/6fe5141e9cc1/
Changeset:   6fe5141e9cc1
Branch:      yt
User:        ngoldbaum
Date:        2016-04-13 14:59:58+00:00
Summary:     Fix issue with pyx supported version divergence between py2 and py3
Affected #:  1 file

diff -r 0ccd0eda96b1d01e5e4a30faef422662dcd3d79f -r 6fe5141e9cc12526fb6c69e0c21ba03a5f4808b7 doc/install_script.sh
--- a/doc/install_script.sh
+++ b/doc/install_script.sh
@@ -768,7 +768,12 @@
     PYTHON2='Python-2.7.11'
     PYTHON3='Python-3.5.1'
     CYTHON='Cython-0.23.5'
-    PYX='PyX-0.12.1'
+    if [ $INST_PY3 -eq 0 ]
+    then
+        PYX='PyX-0.12.1'
+    else
+        PYX='PyX-0.14.1'
+    fi
     BZLIB='bzip2-1.0.6'
     FREETYPE_VER='freetype-2.4.12' 
     H5PY='h5py-2.5.0'
@@ -790,7 +795,12 @@
     
     # Now we dump all our SHA512 files out.
     echo '9052d74bbd0c93757fd916939cc3c39eb1aba6c9692b48887ae577256bec64b39b1fd25b6c751e6c8fe723de4c0ddf9a1a207de39f75b0839500dfcdde69f925  Cython-0.23.5.tar.gz' > Cython-0.23.5.tar.gz.sha512
-    echo '4941f5aa21aff3743546495fb073c10d2657ff42b2aff401903498638093d0e31e344cce778980f28a7170c6d29eab72ac074277b9d4088376e8692dc71e55c1  PyX-0.12.1.tar.gz' > PyX-0.12.1.tar.gz.sha512
+    if [ $INST_PY3 -eq 0 ]
+    then
+        echo '4941f5aa21aff3743546495fb073c10d2657ff42b2aff401903498638093d0e31e344cce778980f28a7170c6d29eab72ac074277b9d4088376e8692dc71e55c1  PyX-0.12.1.tar.gz' > PyX-0.12.1.tar.gz.sha512
+    else
+        echo '16265bbdcaf28ce194189a2987b32952f296c850b829454bcccce0abd23838bfca0276c3e9c8e96b8cbfaf1473bf14669f9b7f2032ee039b61ae59ea3aa45a20  PyX-0.14.1.tar.gz' > PyX-0.14.1.tar.gz.sha512
+    fi
     echo 'f21df53da87e9e3c14599a34388976e7dd09b951dff3c4b978fe224beeff07e749c0059ffd94f68ca9b75ecaef142b285d579b8dfaad4eab85aca33957114937  Python-2.7.11.tgz' > Python-2.7.11.tgz.sha512
     echo '73f1477f3d3f5bd978c4ea1d1b679467b45e9fd2f443287b88c5c107a9ced580c56e0e8f33acea84e06b11a252e2a4e733120b721a9b6e1bb3d34493a3353bfb  Python-3.5.1.tgz' > Python-3.5.1.tgz.sha512
     echo 'b83c4a1415a3eb8c016507705d0d2f22971e4da937bb97953eec08f8f856933d8fa76ce8c536122235b19e7879b16add2e20fd2fee3e488f9b2b4bf1b9f4dbdb  astropy-1.1.2.tar.gz' > astropy-1.1.2.tar.gz.sha512


https://bitbucket.org/yt_analysis/yt/commits/8354519c3901/
Changeset:   8354519c3901
Branch:      yt
User:        ngoldbaum
Date:        2016-04-13 15:00:23+00:00
Summary:     install hglib in the conda install
Affected #:  1 file

diff -r 6fe5141e9cc12526fb6c69e0c21ba03a5f4808b7 -r 8354519c3901f7ec60de249c4fb8e33ef4e1fea2 doc/install_script.sh
--- a/doc/install_script.sh
+++ b/doc/install_script.sh
@@ -1369,6 +1369,8 @@
         echo "Installing $YT_DEP"
         log_cmd conda install --yes ${YT_DEP}
     done
+
+    log_cmd pip install python-hglib
     
     if [ $INST_UNSTRUCTURED -eq 1 ]
     then


https://bitbucket.org/yt_analysis/yt/commits/b87d3e929e7e/
Changeset:   b87d3e929e7e
Branch:      yt
User:        ngoldbaum
Date:        2016-04-13 15:00:37+00:00
Summary:     Only make the src directory if it doesn't exist yet
Affected #:  1 file

diff -r 8354519c3901f7ec60de249c4fb8e33ef4e1fea2 -r b87d3e929e7ee839f97212e8af3bbb55e011bfc7 doc/install_script.sh
--- a/doc/install_script.sh
+++ b/doc/install_script.sh
@@ -1376,7 +1376,10 @@
     then
         
         echo "Installing embree"
-        mkdir ${DEST_DIR}/src
+        if [ ! -d ${DEST_DIR}/src ]
+        then
+            mkdir ${DEST_DIR}/src
+        fi
         cd ${DEST_DIR}/src
         ( ${GETFILE} "$EMBREE_URL" 2>&1 ) 1>> ${LOG_FILE} || do_exit
         log_cmd tar xfz ${EMBREE}.tar.gz


https://bitbucket.org/yt_analysis/yt/commits/9c2097cfcde8/
Changeset:   9c2097cfcde8
Branch:      yt
User:        ngoldbaum
Date:        2016-04-13 15:00:53+00:00
Summary:     Add a test script for the install script
Affected #:  1 file

diff -r b87d3e929e7ee839f97212e8af3bbb55e011bfc7 -r 9c2097cfcde80b3f0b71313c140a7bf9af8a591b tests/test_install_script.py
--- /dev/null
+++ b/tests/test_install_script.py
@@ -0,0 +1,143 @@
+import contextlib
+import glob
+import os
+import subprocess
+import shutil
+import tempfile
+
+# dependencies that are always installed
+REQUIRED_DEPS = [
+    'mercurial',
+    'jupyter',
+    'numpy',
+    'matplotlib',
+    'h5py',
+    'cython',
+    'nose',
+    'sympy',
+    'setuptools',
+    'hglib'
+]
+
+# dependencies that aren't installed by default
+OPTIONAL_DEPS = [
+    'unstructured',
+    'pyx',
+    'rockstar',
+    'scipy',
+    'astropy',
+]
+
+# dependencies that are only installable when yt is built from source
+YT_SOURCE_ONLY_DEPS = [
+    'unstructured',
+    'rockstar'
+]
+
+# dependencies that are only installable when yt is built from source under conda
+YT_SOURCE_CONDA_ONLY_DEPS = [
+    'unstructured'
+]
+
+
+def call_unix_command(command):
+    print ('Running "%s" in %s' % (command, os.getcwd()))
+    output = ''
+    try:
+        output = subprocess.check_output(command, stderr=subprocess.STDOUT,
+                                         shell=True)
+    except subprocess.CalledProcessError as er:
+        raise RuntimeError(
+            'Command \'%s\' failed with return code \'%s\' and error:\n\n%s' %
+            (command, er.returncode, er.output))
+    finally:
+        if len(output.splitlines()) > 25:
+            print ('truncated output:')
+            print ('\n'.join((output.splitlines())[-25:]))
+        else:
+            print (output)
+
+
+ at contextlib.contextmanager
+def working_directory(path):
+    """A context manager which changes the working directory to the given
+    path, and then changes it back to its previous value on exit.
+
+    """
+    prev_cwd = os.getcwd()
+    os.chdir(path)
+    try:
+        yield
+    finally:
+        os.chdir(prev_cwd)
+        shutil.rmtree(path)
+
+
+def run_install_script(install_script_path, inst_py3,
+                       conda=False, binary_yt=False):
+    msg = 'Testing installation with conda={}, inst_py3={}, and binary_yt={}'
+    print (msg.format(conda, inst_py3, binary_yt))
+    shutil.copy(install_script_path, os.curdir)
+    with open('install_script.sh', 'r') as source:
+        with open('install_script_edited.sh', 'w') as target:
+            data = source.read()
+            for dep in OPTIONAL_DEPS:
+                if binary_yt is True and dep in YT_SOURCE_ONLY_DEPS:
+                    continue
+                if conda is False and dep in YT_SOURCE_CONDA_ONLY_DEPS:
+                    continue
+                dname = 'INST_%s' % dep.upper()
+                data = data.replace(dname + '=0', dname + '=1')
+            if inst_py3 is True:
+                data = data.replace('INST_PY3=0', 'INST_PY3=1')
+            if conda is False:
+                data = data.replace('INST_CONDA=1', 'INST_CONDA=0')
+            if binary_yt is False:
+                data = data.replace('INST_YT_SOURCE=0', 'INST_YT_SOURCE=1')
+            target.write(data)
+    shutil.copyfile('install_script_edited.sh', 'install_script.sh')
+    call_unix_command('bash install_script.sh --yes')
+
+
+def verify_yt_installation(binary_yt):
+    yt_dir = glob.glob('yt-*')
+    ndirs = len(yt_dir)
+    if ndirs != 1:
+        raise RuntimeError(
+            'A yt installation was not properly cleaned up, exiting')
+    yt_dir = yt_dir[0]
+    python_path = os.sep.join([yt_dir, 'bin', 'python'])
+    for dep in OPTIONAL_DEPS + REQUIRED_DEPS:
+        if binary_yt is True and dep in YT_SOURCE_ONLY_DEPS:
+            continue
+        elif dep == 'mercurial':
+            hg_path = os.sep.join([yt_dir, 'bin', 'hg'])
+            call_unix_command('{} --version'.format(hg_path))
+        elif dep in ['unstructured', 'rockstar']:
+            # FIXME, how do we test these?
+            pass
+        else:
+            call_unix_command("{} -c 'import {}'".format(python_path, dep))
+    return yt_dir
+
+
+if __name__ == '__main__':
+    install_script_path = os.path.abspath(os.path.sep.join(
+        [os.getcwd(), os.pardir, 'doc', 'install_script.sh']))
+    for inst_py3 in [False, True]:
+        tmpdir = tempfile.mkdtemp()
+        with working_directory(tmpdir):
+            run_install_script(
+                install_script_path, inst_py3, conda=True, binary_yt=True)
+            conda_binary_path = verify_yt_installation(binary_yt=True)
+            shutil.rmtree(conda_binary_path)
+
+            run_install_script(
+                install_script_path, inst_py3, conda=True, binary_yt=False)
+            conda_source_path = verify_yt_installation(binary_yt=False)
+            shutil.rmtree(conda_source_path)
+
+            run_install_script(
+                install_script_path, inst_py3, conda=False, binary_yt=False)
+            source_path = verify_yt_installation(binary_yt=False)
+            shutil.rmtree(source_path)


https://bitbucket.org/yt_analysis/yt/commits/a8ea50a2a99f/
Changeset:   a8ea50a2a99f
Branch:      yt
User:        ngoldbaum
Date:        2016-04-14 03:14:08+00:00
Summary:     Add tests for rockstar and unstructured. Fix rockstar installation issues.
Affected #:  2 files

diff -r 9c2097cfcde80b3f0b71313c140a7bf9af8a591b -r a8ea50a2a99f9a1ece3587adec3a7a40388c9745 doc/install_script.sh
--- a/doc/install_script.sh
+++ b/doc/install_script.sh
@@ -1371,6 +1371,8 @@
     done
 
     log_cmd pip install python-hglib
+
+    log_cmd hg clone https://bitbucket.org/yt_analysis/yt_conda ${DEST_DIR}/src/yt_conda
     
     if [ $INST_UNSTRUCTURED -eq 1 ]
     then
@@ -1403,28 +1405,12 @@
 
     if [ $INST_ROCKSTAR -eq 1 ]
     then
-        if [ ! -d ${DEST_DIR}/src ]
-        then
-            mkdir ${DEST_DIR}/src
-        fi
-        cd ${DEST_DIR}/src
-        if [ ! -e rockstar/done ]
-        then
-            echo "Building Rockstar"
-            if [ ! -e rockstar ]
-            then
-                ( hg clone http://bitbucket.org/MatthewTurk/rockstar 2>&1 ) 1>> ${LOG_FILE}
-            fi
-            cd rockstar
-            ( hg pull 2>&1 ) 1>> ${LOG_FILE}
-            ( hg up -C tip 2>&1 ) 1>> ${LOG_FILE}
-            ( make lib 2>&1 ) 1>> ${LOG_FILE} || do_exit
-            cp librockstar.so ${DEST_DIR}/lib
-            ROCKSTAR_DIR=${DEST_DIR}/src/rockstar
-            echo $ROCKSTAR_DIR > ${YT_DIR}/rockstar.cfg
-            touch done
-            cd ..
-        fi
+        echo "Building Rockstar"
+        ( hg clone http://bitbucket.org/MatthewTurk/rockstar ${DEST_DIR}/src/rockstar/ 2>&1 ) 1>> ${LOG_FILE}
+        ROCKSTAR_PACKAGE=$(conda build ${DEST_DIR}/src/yt_conda/rockstar --output)
+        log_cmd conda build ${DEST_DIR}/src/yt_conda/rockstar
+        log_cmd conda install $ROCKSTAR_PACKAGE
+        ROCKSTAR_DIR=${DEST_DIR}/src/rockstar
     fi
 
     # conda doesn't package pyx, so we install manually with pip
@@ -1440,6 +1426,7 @@
 
     if [ $INST_PY3 -eq 1 ]
     then
+        echo "Installing mercurial"
         log_cmd conda create -y -n py27 python=2.7 mercurial
         log_cmd ln -s ${DEST_DIR}/envs/py27/bin/hg ${DEST_DIR}/bin
     fi
@@ -1449,15 +1436,20 @@
         echo "Installing yt"
         log_cmd conda install --yes yt
     else
-        echo "Installing yt from source"
+        echo "Building yt from source"
         YT_DIR="${DEST_DIR}/src/yt-hg"
         log_cmd hg clone -r ${BRANCH} https://bitbucket.org/yt_analysis/yt ${YT_DIR}
         if [ $INST_UNSTRUCTURED -eq 1 ]
         then
             echo $DEST_DIR > ${YT_DIR}/embree.cfg
         fi
+        if [ $INST_ROCKSTAR -eq 1 ]
+        then
+            echo $ROCKSTAR_DIR > ${YT_DIR}/rockstar.cfg
+            ROCKSTAR_LIBRARY_PATH=${DEST_DIR}/lib
+        fi
         pushd ${YT_DIR} &> /dev/null
-        log_cmd python setup.py develop
+        ( LIBRARY_PATH=$ROCKSTAR_LIBRARY_PATH python setup.py develop 2>&1) 1>> ${LOG_FILE}
         popd &> /dev/null
     fi
 
@@ -1496,6 +1488,21 @@
     echo "(e.g. .bashrc, .bash_profile, .cshrc, or .zshrc) to include"
     echo "the same command."
     echo
+    if [ $INST_ROCKSTAR -eq 1 ]
+    then
+        if [ $MYOS = "Darwin" ]
+        then
+            LD_NAME="DYLD_LIBRARY_PATH"
+        else
+            LD_NAME="LD_LIBRARY_PATH"
+        fi
+        echo
+        echo "For rockstar to work, you must also set $LD_NAME:"
+        echo
+        echo "    export $LD_NAME=$DEST_DIR/lib:\$$LD_NAME"
+        echo
+        echo "or whichever invocation is appropriate for your shell."
+    fi
     echo "========================================================================"
     echo
     echo "Oh, look at me, still talking when there's science to do!"

diff -r 9c2097cfcde80b3f0b71313c140a7bf9af8a591b -r a8ea50a2a99f9a1ece3587adec3a7a40388c9745 tests/test_install_script.py
--- a/tests/test_install_script.py
+++ b/tests/test_install_script.py
@@ -1,8 +1,9 @@
 import contextlib
 import glob
 import os
+import shutil
 import subprocess
-import shutil
+import sys
 import tempfile
 
 # dependencies that are always installed
@@ -39,6 +40,12 @@
     'unstructured'
 ]
 
+DEPENDENCY_IMPORT_TESTS = {
+    'unstructured': "from yt.utilities.lib import mesh_traversal",
+    'rockstar': ("from yt.analysis_modules.halo_finding.rockstar "
+                 "import rockstar_interface")
+}
+
 
 def call_unix_command(command):
     print ('Running "%s" in %s' % (command, os.getcwd()))
@@ -99,7 +106,7 @@
     call_unix_command('bash install_script.sh --yes')
 
 
-def verify_yt_installation(binary_yt):
+def verify_yt_installation(binary_yt, conda):
     yt_dir = glob.glob('yt-*')
     ndirs = len(yt_dir)
     if ndirs != 1:
@@ -110,12 +117,20 @@
     for dep in OPTIONAL_DEPS + REQUIRED_DEPS:
         if binary_yt is True and dep in YT_SOURCE_ONLY_DEPS:
             continue
+        if conda is False and dep in YT_SOURCE_CONDA_ONLY_DEPS:
+            continue
         elif dep == 'mercurial':
             hg_path = os.sep.join([yt_dir, 'bin', 'hg'])
             call_unix_command('{} --version'.format(hg_path))
-        elif dep in ['unstructured', 'rockstar']:
-            # FIXME, how do we test these?
-            pass
+        elif dep in DEPENDENCY_IMPORT_TESTS:
+            cmd = "{} -c '{}'"
+            if dep == 'rockstar':
+                cmd = 'LD_LIBRARY_PATH={} '.format(
+                    os.sep.join([os.curdir, yt_dir, 'lib'])) + cmd
+                if sys.platform == 'darwin':
+                    cmd = 'DY' + cmd
+            call_unix_command(cmd.format(
+                python_path, DEPENDENCY_IMPORT_TESTS[dep]))
         else:
             call_unix_command("{} -c 'import {}'".format(python_path, dep))
     return yt_dir
@@ -129,15 +144,18 @@
         with working_directory(tmpdir):
             run_install_script(
                 install_script_path, inst_py3, conda=True, binary_yt=True)
-            conda_binary_path = verify_yt_installation(binary_yt=True)
+            conda_binary_path = verify_yt_installation(
+                binary_yt=True, conda=True)
             shutil.rmtree(conda_binary_path)
 
             run_install_script(
                 install_script_path, inst_py3, conda=True, binary_yt=False)
-            conda_source_path = verify_yt_installation(binary_yt=False)
+            conda_source_path = verify_yt_installation(
+                binary_yt=False, conda=True)
             shutil.rmtree(conda_source_path)
 
             run_install_script(
                 install_script_path, inst_py3, conda=False, binary_yt=False)
-            source_path = verify_yt_installation(binary_yt=False)
+            source_path = verify_yt_installation(
+                binary_yt=False, conda=False)
             shutil.rmtree(source_path)


https://bitbucket.org/yt_analysis/yt/commits/351fd47e37ad/
Changeset:   351fd47e37ad
Branch:      yt
User:        ngoldbaum
Date:        2016-04-14 15:11:12+00:00
Summary:     Fail when a conda environment is already activated and INST_CONDA=1
Affected #:  1 file

diff -r a8ea50a2a99f9a1ece3587adec3a7a40388c9745 -r 351fd47e37ad0190e9d59b918b8543fe166bd20f doc/install_script.sh
--- a/doc/install_script.sh
+++ b/doc/install_script.sh
@@ -108,6 +108,16 @@
 
 if [ $INST_CONDA -ne 0 ]
 then
+    if [ ! -z "${CONDA_DEFAULT_ENV}" ]
+    then
+        echo "Aborting the yt installation because you appear to already"
+        echo "have a conda environment activated. Either deactivate it with:"
+        echo
+        echo "    $ source deactivate"
+        echo
+        echo "or install yt into your current environment"
+        exit 1
+    fi
     DEST_SUFFIX="yt-conda"
 else
     if [ $INST_YT_SOURCE -eq 0 ]


https://bitbucket.org/yt_analysis/yt/commits/635ccfda54b2/
Changeset:   635ccfda54b2
Branch:      yt
User:        ngoldbaum
Date:        2016-04-14 15:11:28+00:00
Summary:     Print out what INST_CONDA and INST_YT_SOURCE are before running
Affected #:  1 file

diff -r 351fd47e37ad0190e9d59b918b8543fe166bd20f -r 635ccfda54b26b8118ec595def7957e59ccdcced doc/install_script.sh
--- a/doc/install_script.sh
+++ b/doc/install_script.sh
@@ -523,6 +523,14 @@
 echo "the script if you aren't such a fan."
 echo
 
+printf "%-18s = %s so I " "INST_CONDA" "${INST_CONDA}"
+get_willwont ${INST_PY3}
+echo "be installing a conda-based python environment"
+
+printf "%-18s = %s so I " "INST_YT_SOURCE" "${INST_YT_SOURCE}"
+get_willwont ${INST_PY3}
+echo "be compiling yt from source"
+
 printf "%-18s = %s so I " "INST_PY3" "${INST_PY3}"
 get_willwont ${INST_PY3}
 echo "be installing Python 3"


https://bitbucket.org/yt_analysis/yt/commits/61532f7361ec/
Changeset:   61532f7361ec
Branch:      yt
User:        ngoldbaum
Date:        2016-04-14 19:24:52+00:00
Summary:     Use curl before wget
Affected #:  1 file

diff -r 635ccfda54b26b8118ec595def7957e59ccdcced -r 61532f7361ec919c9905e327a571efbbc8ecad71 doc/install_script.sh
--- a/doc/install_script.sh
+++ b/doc/install_script.sh
@@ -701,13 +701,13 @@
     cd ..
 }
 
-if type -P wget &>/dev/null
+if type -P curl &>/dev/null
 then
+    echo "Using curl"
+    export GETFILE="curl -sSO"
+else
     echo "Using wget"
     export GETFILE="wget -nv"
-else
-    echo "Using curl"
-    export GETFILE="curl -sSO"
 fi
 
 if type -P sha512sum &> /dev/null


https://bitbucket.org/yt_analysis/yt/commits/a474e0c679f8/
Changeset:   a474e0c679f8
Branch:      yt
User:        xarthisius
Date:        2016-04-20 18:03:47+00:00
Summary:     Merged in ngoldbaum/yt (pull request #2009)

merge get_yt.sh into install_script.sh, add install script tests
Affected #:  3 files

diff -r a3f0bf738e6566959d9144adc81c2a918099615b -r a474e0c679f8d38a8431115c80c784fce575a52c doc/get_yt.sh
--- a/doc/get_yt.sh
+++ b/doc/get_yt.sh
@@ -1,394 +1,4 @@
-#
-# Hi there!  Welcome to the yt installation script.
-#
-# This script is designed to create a fully isolated Python installation
-# with the dependencies you need to run yt.
-#
-# This script is based on Conda, a distribution mechanism from Continuum
-# Analytics.  The process is as follows:
-#
-#  1. Download the appropriate Conda installation package
-#  2. Install Conda into the specified directory
-#  3. Install yt-specific dependencies
-#  4. Install yt
-#
-# There are a few options listed below, but by default, this will install
-# everything.  At the end, it will tell you what to do to use yt.
-#
-# By default this will install yt from source.
-#
-# If you experience problems, please visit the Help section at
-# http://yt-project.org.
-#
-DEST_SUFFIX="yt-conda"
-DEST_DIR="`pwd`/${DEST_SUFFIX/ /}"   # Installation location
-BRANCH="yt" # This is the branch to which we will forcibly update.
-INST_YT_SOURCE=1 # Do we do a source install of yt?
-INST_UNSTRUCTURED=1 # Do we want to build with unstructured mesh support?
-
-##################################################################
-#                                                                #
-# You will likely not have to modify anything below this region. #
-#                                                                #
-##################################################################
-
-LOG_FILE="`pwd`/yt_install.log"
-
-# Here is the idiom for redirecting to the log file:
-# ( SOMECOMMAND 2>&1 ) 1>> ${LOG_FILE} || do_exit
-
-MINICONDA_URLBASE="http://repo.continuum.io/miniconda"
-MINICONDA_VERSION="latest"
-YT_RECIPE_REPO="https://bitbucket.org/yt_analysis/yt_conda/raw/default"
-
-if [ $INST_UNSTRUCTURED -eq 1 ]
-then
-  if [ $INST_YT_SOURCE -eq 0 ]
-  then
-      echo "yt must be compiled from source to use the unstructured mesh support."
-      echo "Please set INST_YT_SOURCE to 1 and re-run."
-      exit 1
-  fi
-  if [ `uname` = "Darwin" ]
-  then
-      EMBREE="embree-2.8.0.x86_64.macosx"
-      EMBREE_URL="https://github.com/embree/embree/releases/download/v2.8.0/$EMBREE.tar.gz"
-  else
-      EMBREE="embree-2.8.0.x86_64.linux"
-      EMBREE_URL="https://github.com/embree/embree/releases/download/v2.8.0/$EMBREE.tar.gz"
-  fi
-  PYEMBREE_URL="https://github.com/scopatz/pyembree/archive/master.zip"
-fi
-
-function do_exit
-{
-    echo "********************************************"
-    echo "        FAILURE REPORT:"
-    echo "********************************************"
-    echo
-    tail -n 10 ${LOG_FILE}
-    echo
-    echo "********************************************"
-    echo "********************************************"
-    echo "Failure.  Check ${LOG_FILE}.  The last 10 lines are above."
-    exit 1
-}
-
-function log_cmd
-{
-    echo "EXECUTING:" >> ${LOG_FILE}
-    echo "  $*" >> ${LOG_FILE}
-    ( $* 2>&1 ) 1>> ${LOG_FILE} || do_exit
-}
-
-# These are needed to prevent pushd and popd from printing to stdout
-
-function pushd () {
-    command pushd "$@" > /dev/null
-}
-
-function popd () {
-    command popd "$@" > /dev/null
-}
-
-function get_ytdata
-{
-    echo "Downloading $1 from yt-project.org"
-    [ -e $1 ] && return
-    ${GETFILE} "http://yt-project.org/data/$1" || do_exit
-    ( ${SHASUM} -c $1.sha512 2>&1 ) 1>> ${LOG_FILE} || do_exit
-}
-
-function get_ytrecipe {
-    RDIR=${DEST_DIR}/src/yt-recipes/$1
-    mkdir -p ${RDIR}
-    pushd ${RDIR}
-    log_cmd ${GETFILE} ${YT_RECIPE_REPO}/$1/meta.yaml
-    log_cmd ${GETFILE} ${YT_RECIPE_REPO}/$1/build.sh
-    NEW_PKG=`conda build --output ${RDIR}`
-    log_cmd conda build --no-binstar-upload ${RDIR}
-    log_cmd conda install ${NEW_PKG}
-    popd
-}
-
-
-echo
-echo
-echo "========================================================================"
-echo
-echo "Hi there!  This is the yt installation script.  We're going to download"
-echo "some stuff and install it to create a self-contained, isolated"
-echo "environment for yt to run within."
-echo
-echo "This will install Miniconda from Continuum Analytics, the necessary"
-echo "packages to run yt, and create a self-contained environment for you to"
-echo "use yt.  Additionally, Conda itself provides the ability to install"
-echo "many other packages that can be used for other purposes using the"
-echo "'conda install' command."
-echo
-MYOS=`uname -s`       # A guess at the OS
-if [ $INST_YT_SOURCE -ne 0 ]
-then
-    if [ "${MYOS##Darwin}" != "${MYOS}" ]
-    then
-        echo "Looks like you're running on Mac OSX."
-        echo
-        echo "NOTE: you must have the Xcode command line tools installed."
-        echo
-        echo "The instructions for obtaining these tools varies according"
-        echo "to your exact OS version.  On older versions of OS X, you"
-        echo "must register for an account on the apple developer tools"
-        echo "website: https://developer.apple.com/downloads to obtain the"
-        echo "download link."
-        echo
-        echo "We have gathered some additional instructions for each"
-        echo "version of OS X below. If you have trouble installing yt"
-        echo "after following these instructions, don't hesitate to contact"
-        echo "the yt user's e-mail list."
-        echo
-        echo "You can see which version of OSX you are running by clicking"
-        echo "'About This Mac' in the apple menu on the left hand side of"
-        echo "menu bar.  We're assuming that you've installed all operating"
-        echo "system updates; if you have an older version, we suggest"
-        echo "running software update and installing all available updates."
-        echo
-        echo "OS X 10.5.8: search for and download Xcode 3.1.4 from the"
-        echo "Apple developer tools website."
-        echo
-        echo "OS X 10.6.8: search for and download Xcode 3.2 from the Apple"
-        echo "developer tools website.  You can either download the"
-        echo "Xcode 3.2.2 Developer Tools package (744 MB) and then use"
-        echo "Software Update to update to XCode 3.2.6 or"
-        echo "alternatively, you can download the Xcode 3.2.6/iOS SDK"
-        echo "bundle (4.1 GB)."
-        echo
-        echo "OS X 10.7.5: download Xcode 4.2 from the mac app store"
-        echo "(search for Xcode)."
-        echo "Alternatively, download the Xcode command line tools from"
-        echo "the Apple developer tools website."
-        echo
-        echo "OS X 10.8.4, 10.9, 10.10, and 10.11:"
-        echo "download the appropriate version of Xcode from the"
-        echo "mac app store (search for Xcode)."
-        echo
-        echo "Additionally, you will have to manually install the Xcode"
-        echo "command line tools."
-        echo
-        echo "For OS X 10.8, see:"
-        echo "http://stackoverflow.com/questions/9353444"
-        echo
-        echo "For OS X 10.9 and newer the command line tools can be installed"
-        echo "with the following command:"
-        echo "    xcode-select --install"
-    fi
-    if [ "${MYOS##Linux}" != "${MYOS}" ]
-    then
-        echo "Looks like you're on Linux."
-        echo
-        echo "Please make sure you have the developer tools for your OS "
-        echo "installed."
-        echo
-        if [ -f /etc/SuSE-release ] && [ `grep --count SUSE /etc/SuSE-release` -gt 0 ]
-        then
-            echo "Looks like you're on an OpenSUSE-compatible machine."
-            echo
-            echo "You need to have these packages installed:"
-            echo
-            echo "  * devel_C_C++"
-            echo "  * libuuid-devel"
-            echo "  * gcc-c++"
-            echo "  * chrpath"
-            echo
-            echo "You can accomplish this by executing:"
-            echo
-            echo "$ sudo zypper install -t pattern devel_C_C++"
-            echo "$ sudo zypper install gcc-c++ libuuid-devel zip"
-            echo "$ sudo zypper install chrpath"
-        fi
-        if [ -f /etc/lsb-release ] && [ `grep --count buntu /etc/lsb-release` -gt 0 ]
-        then
-            echo "Looks like you're on an Ubuntu-compatible machine."
-            echo
-            echo "You need to have these packages installed:"
-            echo
-            echo "  * libssl-dev"
-            echo "  * build-essential"
-            echo "  * libncurses5"
-            echo "  * libncurses5-dev"
-            echo "  * uuid-dev"
-            echo "  * chrpath"
-            echo
-            echo "You can accomplish this by executing:"
-            echo
-            echo "$ sudo apt-get install libssl-dev build-essential libncurses5 libncurses5-dev zip uuid-dev chrpath"
-            echo
-        fi
-        echo
-        echo "If you are running on a supercomputer or other module-enabled"
-        echo "system, please make sure that the GNU module has been loaded."
-        echo
-    fi
-fi
-if [ "${MYOS##x86_64}" != "${MYOS}" ]
-then
-    MINICONDA_OS="Linux-x86_64"
-elif [ "${MYOS##i386}" != "${MYOS}" ]
-then
-    MINICONDA_OS="Linux-x86"
-elif [ "${MYOS##Darwin}" != "${MYOS}" ]
-then
-     MINICONDA_OS="MacOSX-x86_64"
-else
-    echo "Not sure which Linux distro you are running."
-    echo "Going with x86_64 architecture."
-    MINICONDA_OS="Linux-x86_64"
-fi
-echo
-echo "If you'd rather not continue, hit Ctrl-C."
-echo
-echo "========================================================================"
-echo
-read -p "[hit enter] "
-echo
-echo "Awesome!  Here we go."
-echo
-
-MINICONDA_PKG=Miniconda-${MINICONDA_VERSION}-${MINICONDA_OS}.sh
-
-if type -P wget &>/dev/null
-then
-    echo "Using wget"
-    export GETFILE="wget -nv -nc"
-else
-    echo "Using curl"
-    export GETFILE="curl -sSO"
-fi
-
-echo
-echo "Downloading ${MINICONDA_URLBASE}/${MINICONDA_PKG}"
-echo "Downloading ${MINICONDA_URLBASE}/${MINICONDA_PKG}" >> ${LOG_FILE}
-echo
-
-${GETFILE} ${MINICONDA_URLBASE}/${MINICONDA_PKG} || do_exit
-
-echo "Installing the Miniconda python environment."
-
-log_cmd bash ./${MINICONDA_PKG} -b -p $DEST_DIR
-
-# This we *do* need.
-export PATH=${DEST_DIR}/bin:$PATH
-
-echo "Installing the necessary packages for yt."
-echo "This may take a while, but don't worry.  yt loves you."
-
-declare -a YT_DEPS
-YT_DEPS+=('python')
-YT_DEPS+=('setuptools')
-YT_DEPS+=('numpy')
-YT_DEPS+=('jupyter')
-YT_DEPS+=('ipython')
-YT_DEPS+=('sphinx')
-YT_DEPS+=('h5py')
-YT_DEPS+=('matplotlib')
-YT_DEPS+=('cython')
-YT_DEPS+=('nose')
-YT_DEPS+=('conda-build')
-YT_DEPS+=('mercurial')
-YT_DEPS+=('sympy')
-
-if [ $INST_UNSTRUCTURED -eq 1 ]
-then
-  YT_DEPS+=('netcdf4')
-fi
-
-# Here is our dependency list for yt
-log_cmd conda update --yes conda
-
-log_cmd echo "DEPENDENCIES" ${YT_DEPS[@]}
-for YT_DEP in "${YT_DEPS[@]}"; do
-    echo "Installing $YT_DEP"
-    log_cmd conda install --yes ${YT_DEP}
-done
-
-if [ $INST_UNSTRUCTURED -eq 1 ]
-then
-
-  echo "Installing embree"
-  mkdir ${DEST_DIR}/src
-  cd ${DEST_DIR}/src
-  ( ${GETFILE} "$EMBREE_URL" 2>&1 ) 1>> ${LOG_FILE} || do_exit
-  log_cmd tar xfz ${EMBREE}.tar.gz
-  log_cmd mv ${DEST_DIR}/src/${EMBREE}/include/embree2 ${DEST_DIR}/include
-  log_cmd mv ${DEST_DIR}/src/${EMBREE}/lib/lib*.* ${DEST_DIR}/lib
-  if [ `uname` = "Darwin" ]
-  then
-    ln -s ${DEST_DIR}/lib/libembree.2.dylib ${DEST_DIR}/lib/libembree.dylib
-    install_name_tool -id ${DEST_DIR}/lib/libembree.2.dylib ${DEST_DIR}/lib/libembree.2.dylib
-  else
-    ln -s ${DEST_DIR}/lib/libembree.so.2 ${DEST_DIR}/lib/libembree.so
-  fi
-
-  echo "Installing pyembree from source"
-  ( ${GETFILE} "$PYEMBREE_URL" 2>&1 ) 1>> ${LOG_FILE} || do_exit
-  log_cmd unzip ${DEST_DIR}/src/master.zip
-  pushd ${DEST_DIR}/src/pyembree-master
-  log_cmd python setup.py install build_ext -I${DEST_DIR}/include -L${DEST_DIR}/lib
-  popd
-fi
-
-if [ $INST_YT_SOURCE -eq 0 ]
-then
-  echo "Installing yt"
-  log_cmd conda install --yes yt
-else
-    # We do a source install.
-    echo "Installing yt from source"
-    YT_DIR="${DEST_DIR}/src/yt-hg"
-    log_cmd hg clone -r ${BRANCH} https://bitbucket.org/yt_analysis/yt ${YT_DIR}
-if [ $INST_UNSTRUCTURED -eq 1 ]
-then
-    echo $DEST_DIR > ${YT_DIR}/embree.cfg
-fi
-    pushd ${YT_DIR}
-    log_cmd python setup.py develop
-    popd
-fi
-
-echo
-echo
-echo "========================================================================"
-echo
-echo "yt and the Conda system are now installed in $DEST_DIR ."
-echo
-echo "You must now modify your PATH variable by prepending:"
-echo
-echo "   $DEST_DIR/bin"
-echo
-echo "On Bash-style shells you can copy/paste the following command to "
-echo "temporarily activate the yt installation:"
-echo
-echo "    export PATH=$DEST_DIR/bin:\$PATH"
-echo
-echo "and on csh-style shells:"
-echo
-echo "    setenv PATH $DEST_DIR/bin:\$PATH"
-echo
-echo "You can also update the init file appropriate for your shell to include"
-echo "the same command."
-echo
-echo "To get started with yt, check out the orientation:"
-echo
-echo "    http://yt-project.org/doc/orientation/"
-echo
-echo "For support, see the website and join the mailing list:"
-echo
-echo "    http://yt-project.org/"
-echo "    http://yt-project.org/data/      (Sample data)"
-echo "    http://yt-project.org/doc/       (Docs)"
-echo
-echo "    http://lists.spacepope.org/listinfo.cgi/yt-users-spacepope.org"
-echo
-echo "========================================================================"
-echo
-echo "Oh, look at me, still talking when there's science to do!"
-echo "Good luck, and email the user list if you run into any problems."
+echo "This script has been deprecated."
+echo "You can now create a conda-based build using install_script.sh"
+echo "Please download that script and run it"
+exit 0

This diff is so big that we needed to truncate the remainder.

Repository URL: https://bitbucket.org/yt_analysis/yt/

--

This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://lists.spacepope.org/pipermail/yt-svn-spacepope.org/attachments/20160420/1639833b/attachment-0001.htm>


More information about the yt-svn mailing list