[yt-svn] commit/yt: 5 new changesets

commits-noreply at bitbucket.org commits-noreply at bitbucket.org
Tue Oct 29 09:01:14 PDT 2013


5 new commits in yt:

https://bitbucket.org/yt_analysis/yt/commits/19728ac91bad/
Changeset:   19728ac91bad
Branch:      yt
User:        jzuhone
Date:        2013-10-29 14:27:42
Summary:     Prettier S-Z plots. Minor touch-ups to the FITS writing interface.
Affected #:  3 files

diff -r 72aa625e39ec63eb1c10b9dc911e782521ff62b4 -r 19728ac91bad2835149a981df4a09d73f847308f yt/analysis_modules/sunyaev_zeldovich/projection.py
--- a/yt/analysis_modules/sunyaev_zeldovich/projection.py
+++ b/yt/analysis_modules/sunyaev_zeldovich/projection.py
@@ -259,21 +259,21 @@
         self.data["TeSZ"] = ImageArray(Te)
 
     @parallel_root_only
-    def write_fits(self, filename_prefix, clobber=True):
+    def write_fits(self, filename, clobber=True):
         r""" Export images to a FITS file. Writes the SZ distortion in all
         specified frequencies as well as the mass-weighted temperature and the
         optical depth. Distance units are in kpc.  
         
         Parameters
         ----------
-        filename_prefix : string
-            The prefix of the FITS filename.
+        filename : string
+            The name of the FITS file to be written. 
         clobber : boolean, optional
             If the file already exists, do we overwrite?
                     
         Examples
         --------
-        >>> szprj.write_fits("SZbullet", clobber=False)
+        >>> szprj.write_fits("SZbullet.fits", clobber=False)
         """
         coords = {}
         coords["dx"] = self.dx*self.pf.units["kpc"]
@@ -282,11 +282,12 @@
         coords["yctr"] = 0.0
         coords["units"] = "kpc"
         other_keys = {"Time" : self.pf.current_time}
-        write_fits(self.data, filename_prefix, clobber=clobber, coords=coords,
+        write_fits(self.data, filename, clobber=clobber, coords=coords,
                    other_keys=other_keys)
 
     @parallel_root_only
-    def write_png(self, filename_prefix):
+    def write_png(self, filename_prefix, cmap_name="algae",
+                  log_fields=None):
         r""" Export images to PNG files. Writes the SZ distortion in all
         specified frequencies as well as the mass-weighted temperature and the
         optical depth. Distance units are in kpc. 
@@ -299,17 +300,65 @@
         Examples
         --------
         >>> szprj.write_png("SZsloshing")
-        """     
+        """
+        from IPython import embed
+        import matplotlib
+        import matplotlib.pyplot as plt
+        if log_fields is None: log_fields = {}
+        ticks_font = matplotlib.font_manager.FontProperties(family='serif')
         extent = tuple([bound*self.pf.units["kpc"] for bound in self.bounds])
         for field, image in self.items():
-            filename=filename_prefix+"_"+field+".png"
-            label = self.display_names[field]
+            data = image.copy()
+            vmin, vmax = image.min(), image.max()
+            negative = False
+            crossover = False
+            if vmin < 0 and vmax < 0:
+                data *= -1
+                negative = True                                        
+            if log_fields.has_key(field):
+                log_field = log_fields[field]
+            else:
+                log_field = True
+            if log_field:
+                formatter = matplotlib.ticker.LogFormatterMathtext()        
+                norm = matplotlib.colors.LogNorm()
+                if vmin < 0 and vmax > 0:
+                    crossover = True
+                    linthresh = min(vmax, -vmin)/100.
+                    norm=matplotlib.colors.SymLogNorm(linthresh,
+                                                      vmin=vmin, vmax=vmax)
+            else:
+                norm = None
+                formatter = None
+            filename = filename_prefix+"_"+field+".png"
+            cbar_label = self.display_names[field]
             if self.units[field] is not None:
-                label += " ("+self.units[field]+")"
-            write_projection(image, filename, colorbar_label=label, take_log=False,
-                             extent=extent, xlabel=r"$\mathrm{x\ (kpc)}$",
-                             ylabel=r"$\mathrm{y\ (kpc)}$")
-
+                cbar_label += " ("+self.units[field]+")"
+            fig = plt.figure(figsize=(8.0,6.0))
+            ax = fig.add_subplot(111)
+            cax = ax.imshow(data, norm=norm, extent=extent, cmap=cmap_name)
+            for label in ax.get_xticklabels():
+                label.set_fontproperties(ticks_font)
+            for label in ax.get_yticklabels():
+                label.set_fontproperties(ticks_font)                      
+            ax.set_xlabel(r"$\mathrm{x\ (kpc)}$")
+            ax.set_ylabel(r"$\mathrm{y\ (kpc)}$")
+            cbar = fig.colorbar(cax, format=formatter)
+            cbar.ax.set_ylabel(cbar_label)
+            if negative:
+                cbar.ax.set_yticklabels(["-"+label.get_text()
+                                         for label in cbar.ax.get_yticklabels()])
+            if crossover:
+                yticks = list(-10**np.arange(np.floor(np.log10(-vmin)),
+                                             np.rint(np.log10(linthresh))-1, -1)) + [0] + \
+                         list(10**np.arange(np.rint(np.log10(linthresh)),
+                                            np.ceil(np.log10(vmax))+1))
+                cbar.set_ticks(yticks)
+            for label in cbar.ax.get_yticklabels():
+                label.set_fontproperties(ticks_font)                 
+            fig.tight_layout()
+            plt.savefig(filename)
+            
     @parallel_root_only
     def write_hdf5(self, filename):
         r"""Export the set of S-Z fields to a set of HDF5 datasets.

diff -r 72aa625e39ec63eb1c10b9dc911e782521ff62b4 -r 19728ac91bad2835149a981df4a09d73f847308f yt/visualization/fixed_resolution.py
--- a/yt/visualization/fixed_resolution.py
+++ b/yt/visualization/fixed_resolution.py
@@ -264,13 +264,13 @@
             output.create_dataset(field,data=self[field])
         output.close()
 
-    def export_fits(self, filename_prefix, fields=None, clobber=False,
+    def export_fits(self, filename, fields=None, clobber=False,
                     other_keys=None, units="cm", sky_center=(0.0,0.0), D_A=None):
 
         """
         This will export a set of FITS images of either the fields specified
         or all the fields already in the object.  The output filename is
-        *filename_prefix*. If clobber is set to True, this will overwrite any
+        *filename*. If clobber is set to True, this will overwrite any
         existing FITS file.
 
         This requires the *pyfits* module, which is a standalone module
@@ -284,8 +284,8 @@
 
         Parameters
         ----------
-        filename_prefix : string
-            This prefix will be prepended to the FITS file name.
+        filename : string
+            The name of the FITS file to be written.
         fields : list of strings
             These fields will be pixelized and output.
         clobber : boolean
@@ -345,7 +345,7 @@
         hdu_keys["Time"] = self.pf.current_time
 
         data = dict([(field,self[field]) for field in fields])
-        write_fits(data, filename_prefix, clobber=clobber, coords=coords,
+        write_fits(data, filename, clobber=clobber, coords=coords,
                    other_keys=hdu_keys)
                 
     def open_in_ds9(self, field, take_log=True):

diff -r 72aa625e39ec63eb1c10b9dc911e782521ff62b4 -r 19728ac91bad2835149a981df4a09d73f847308f yt/visualization/image_writer.py
--- a/yt/visualization/image_writer.py
+++ b/yt/visualization/image_writer.py
@@ -438,7 +438,7 @@
     return filename
 
 
-def write_fits(image, filename_prefix, clobber=True, coords=None,
+def write_fits(image, filename, clobber=True, coords=None,
                other_keys=None):
     """
     This will export a FITS image of a floating point array. The output filename is
@@ -457,8 +457,8 @@
     image : array_like, or dict of array_like objects
         This is either an (unscaled) array of floating point values, or a dict of
         such arrays, shape (N,N,) to save in a FITS file. 
-    filename_prefix : string
-        This prefix will be prepended to every FITS file name.
+    filename : string
+        This name of the FITS file to be written.
     clobber : boolean
         If the file exists, this governs whether we will overwrite.
     coords : dictionary, optional
@@ -520,7 +520,7 @@
         hdulist.append(hdu)
 
     hdulist = pyfits.HDUList(hdulist)
-    hdulist.writeto("%s.fits" % (filename_prefix), clobber=clobber)                    
+    hdulist.writeto(filename, clobber=clobber)                    
 
 def display_in_notebook(image, max_val=None):
     """


https://bitbucket.org/yt_analysis/yt/commits/fc45b6d9d7ae/
Changeset:   fc45b6d9d7ae
Branch:      yt
User:        jzuhone
Date:        2013-10-29 14:28:17
Summary:     Merging
Affected #:  25 files

diff -r 19728ac91bad2835149a981df4a09d73f847308f -r fc45b6d9d7ae3ea295c722e14f00113bdb2f00d4 doc/get_yt.sh
--- /dev/null
+++ b/doc/get_yt.sh
@@ -0,0 +1,358 @@
+#
+# Hi there!  Welcome to the yt installation script.
+#
+# This script is designed to create a fully isolated Python installation
+# with the dependencies you need to run yt.
+#
+# This script is based on Conda, a distribution mechanism from Continuum
+# Analytics.  The process is as follows:
+#
+#  1. Download the appropriate Conda installation package
+#  2. Install Conda into the specified directory
+#  3. Install yt-specific dependencies
+#  4. Install yt
+#
+# There are a few options listed below, but by default, this will install
+# everything.  At the end, it will tell you what to do to use yt.
+#
+# By default this will install yt from source.
+#
+# If you experience problems, please visit the Help section at 
+# http://yt-project.org.
+#
+DEST_SUFFIX="yt-conda"
+DEST_DIR="`pwd`/${DEST_SUFFIX/ /}"   # Installation location
+BRANCH="yt" # This is the branch to which we will forcibly update.
+INST_YT_SOURCE=1 # Do we do a source install of yt?
+
+##################################################################
+#                                                                #
+# You will likely not have to modify anything below this region. #
+#                                                                #
+##################################################################
+
+LOG_FILE="`pwd`/yt_install.log"
+
+# Here is the idiom for redirecting to the log file:
+# ( SOMECOMMAND 2>&1 ) 1>> ${LOG_FILE} || do_exit
+
+MINICONDA_URLBASE="http://repo.continuum.io/miniconda"
+MINICONDA_VERSION="1.9.1"
+YT_RECIPE_REPO="https://bitbucket.org/yt_analysis/yt_conda/raw/default"
+
+function do_exit
+{
+    echo "********************************************"
+    echo "        FAILURE REPORT:"
+    echo "********************************************"
+    echo
+    tail -n 10 ${LOG_FILE}
+    echo
+    echo "********************************************"
+    echo "********************************************"
+    echo "Failure.  Check ${LOG_FILE}.  The last 10 lines are above."
+    exit 1
+}
+
+function log_cmd
+{
+    echo "EXECUTING:" >> ${LOG_FILE}
+    echo "  $*" >> ${LOG_FILE}
+    ( $* 2>&1 ) 1>> ${LOG_FILE} || do_exit
+}
+
+function get_ytproject
+{
+    [ -e $1 ] && return
+    echo "Downloading $1 from yt-project.org"
+    ${GETFILE} "http://yt-project.org/dependencies/$1" || do_exit
+    ( ${SHASUM} -c $1.sha512 2>&1 ) 1>> ${LOG_FILE} || do_exit
+}
+
+function get_ytdata
+{
+    echo "Downloading $1 from yt-project.org"
+    [ -e $1 ] && return
+    ${GETFILE} "http://yt-project.org/data/$1" || do_exit
+    ( ${SHASUM} -c $1.sha512 2>&1 ) 1>> ${LOG_FILE} || do_exit
+}
+
+function get_ytrecipe {
+    RDIR=${DEST_DIR}/src/yt-recipes/$1
+    mkdir -p ${RDIR}
+    pushd ${RDIR}
+    log_cmd ${GETFILE} ${YT_RECIPE_REPO}/$1/meta.yaml
+    log_cmd ${GETFILE} ${YT_RECIPE_REPO}/$1/build.sh
+    NEW_PKG=`conda build --output ${RDIR}`
+    log_cmd conda build --no-binstar-upload ${RDIR}
+    log_cmd conda install ${NEW_PKG}
+    popd
+}
+
+
+echo
+echo
+echo "========================================================================"
+echo
+echo "Hi there!  This is the yt installation script.  We're going to download"
+echo "some stuff and install it to create a self-contained, isolated"
+echo "environment for yt to run within."
+echo
+echo "This will install Miniconda from Continuum Analytics, the necessary"
+echo "packages to run yt, and create a self-contained environment for you to"
+echo "use yt.  Additionally, Conda itself provides the ability to install"
+echo "many other packages that can be used for other purposes."
+echo
+MYOS=`uname -s`       # A guess at the OS
+if [ "${MYOS##Darwin}" != "${MYOS}" ]
+then
+  echo "Looks like you're running on Mac OSX."
+  echo
+  echo "NOTE: you must have the Xcode command line tools installed."
+  echo
+  echo "The instructions for obtaining these tools varies according"
+  echo "to your exact OS version.  On older versions of OS X, you"
+  echo "must register for an account on the apple developer tools"
+  echo "website: https://developer.apple.com/downloads to obtain the"
+  echo "download link."
+  echo
+  echo "We have gathered some additional instructions for each"
+  echo "version of OS X below. If you have trouble installing yt"
+  echo "after following these instructions, don't hesitate to contact"
+  echo "the yt user's e-mail list."
+  echo
+  echo "You can see which version of OSX you are running by clicking"
+  echo "'About This Mac' in the apple menu on the left hand side of"
+  echo "menu bar.  We're assuming that you've installed all operating"
+  echo "system updates; if you have an older version, we suggest"
+  echo "running software update and installing all available updates."
+  echo
+  echo "OS X 10.5.8: search for and download Xcode 3.1.4 from the"
+  echo "Apple developer tools website."
+  echo
+  echo "OS X 10.6.8: search for and download Xcode 3.2 from the Apple"
+  echo "developer tools website.  You can either download the"
+  echo "Xcode 3.2.2 Developer Tools package (744 MB) and then use"
+  echo "Software Update to update to XCode 3.2.6 or"
+  echo "alternatively, you can download the Xcode 3.2.6/iOS SDK"
+  echo "bundle (4.1 GB)."
+  echo
+  echo "OS X 10.7.5: download Xcode 4.2 from the mac app store"
+  echo "(search for Xcode)."
+  echo "Alternatively, download the Xcode command line tools from"
+  echo "the Apple developer tools website."
+  echo
+  echo "OS X 10.8.2: download Xcode 4.6.1 from the mac app store."
+  echo "(search for Xcode)."
+  echo "Additionally, you will have to manually install the Xcode"
+  echo "command line tools, see:"
+  echo "http://stackoverflow.com/questions/9353444"
+  echo "Alternatively, download the Xcode command line tools from"
+  echo "the Apple developer tools website."
+  echo
+  echo "NOTE: It's possible that the installation will fail, if so,"
+  echo "please set the following environment variables, remove any"
+  echo "broken installation tree, and re-run this script verbatim."
+  echo
+  echo "$ export CC=gcc"
+  echo "$ export CXX=g++"
+  echo
+  MINICONDA_OS="MacOSX-x86_64"
+fi
+if [ "${MYOS##Linux}" != "${MYOS}" ]
+then
+  echo "Looks like you're on Linux."
+  echo
+  echo "Please make sure you have the developer tools for your OS installed."
+  echo
+  if [ -f /etc/SuSE-release ] && [ `grep --count SUSE /etc/SuSE-release` -gt 0 ]
+  then
+    echo "Looks like you're on an OpenSUSE-compatible machine."
+    echo
+    echo "You need to have these packages installed:"
+    echo
+    echo "  * devel_C_C++"
+    echo "  * libopenssl-devel"
+    echo "  * libuuid-devel"
+    echo "  * zip"
+    echo "  * gcc-c++"
+    echo "  * chrpath"
+    echo
+    echo "You can accomplish this by executing:"
+    echo
+    echo "$ sudo zypper install -t pattern devel_C_C++"
+    echo "$ sudo zypper install gcc-c++ libopenssl-devel libuuid-devel zip"
+    echo "$ sudo zypper install chrpath"
+  fi
+  if [ -f /etc/lsb-release ] && [ `grep --count buntu /etc/lsb-release` -gt 0 ]
+  then
+    echo "Looks like you're on an Ubuntu-compatible machine."
+    echo
+    echo "You need to have these packages installed:"
+    echo
+    echo "  * libssl-dev"
+    echo "  * build-essential"
+    echo "  * libncurses5"
+    echo "  * libncurses5-dev"
+    echo "  * zip"
+    echo "  * uuid-dev"
+    echo "  * chrpath"
+    echo
+    echo "You can accomplish this by executing:"
+    echo
+    echo "$ sudo apt-get install libssl-dev build-essential libncurses5 libncurses5-dev zip uuid-dev chrpath"
+    echo
+  fi
+  echo
+  echo "If you are running on a supercomputer or other module-enabled"
+  echo "system, please make sure that the GNU module has been loaded."
+  echo
+  if [ "${MYOS##x86_64}" != "${MYOS}" ]
+  then
+    MINICONDA_OS="Linux-x86_64"
+  elif [ "${MYOS##i386}" != "${MYOS}" ]
+  then
+    MINICONDA_OS="Linux-x86"
+  else
+    echo "Not sure which type of Linux you're on.  Going with x86_64."
+    MINICONDA_OS="Linux-x86_64"
+  fi
+fi
+echo
+echo "If you'd rather not continue, hit Ctrl-C."
+echo
+echo "========================================================================"
+echo
+read -p "[hit enter] "
+echo
+echo "Awesome!  Here we go."
+echo
+
+MINICONDA_PKG=Miniconda-${MINICONDA_VERSION}-${MINICONDA_OS}.sh
+
+if type -P wget &>/dev/null
+then
+    echo "Using wget"
+    export GETFILE="wget -nv"
+else
+    echo "Using curl"
+    export GETFILE="curl -sSO"
+fi
+
+echo
+echo "Downloading ${MINICONDA_URLBASE}/${MINICONDA_PKG}"
+echo "Downloading ${MINICONDA_URLBASE}/${MINICONDA_PKG}" >> ${LOG_FILE}
+echo
+
+${GETFILE} ${MINICONDA_URLBASE}/${MINICONDA_PKG} || do_exit
+
+echo "Installing the Miniconda python environment."
+
+log_cmd bash ./${MINICONDA_PKG} -b -p $DEST_DIR
+
+# I don't think we need OR want this anymore:
+#export LD_LIBRARY_PATH=${DEST_DIR}/lib:$LD_LIBRARY_PATH
+
+# This we *do* need.
+export PATH=${DEST_DIR}/bin:$PATH
+
+echo "Installing the necessary packages for yt."
+echo "This may take a while, but don't worry.  yt loves you."
+
+declare -a YT_DEPS
+YT_DEPS+=('python')
+YT_DEPS+=('distribute')
+YT_DEPS+=('libpng')
+YT_DEPS+=('freetype')
+YT_DEPS+=('hdf5')
+YT_DEPS+=('numpy')
+YT_DEPS+=('pygments')
+YT_DEPS+=('jinja2')
+YT_DEPS+=('tornado')
+YT_DEPS+=('pyzmq')
+YT_DEPS+=('ipython')
+YT_DEPS+=('sphinx')
+YT_DEPS+=('h5py')
+YT_DEPS+=('matplotlib')
+YT_DEPS+=('cython')
+
+# Here is our dependency list for yt
+log_cmd conda config --system --add channels http://repo.continuum.io/pkgs/free
+log_cmd conda config --system --add channels http://repo.continuum.io/pkgs/dev
+log_cmd conda config --system --add channels http://repo.continuum.io/pkgs/gpl
+log_cmd conda update --yes conda
+
+echo "Current dependencies: ${YT_DEPS[@]}"
+log_cmd echo "DEPENDENCIES" ${YT_DEPS[@]}
+log_cmd conda install --yes ${YT_DEPS[@]}
+
+echo "Installing mercurial."
+get_ytrecipe mercurial
+
+if [ $INST_YT_SOURCE -eq 0 ]
+then
+  echo "Installing yt as a package."
+  get_ytrecipe yt
+else
+  # We do a source install.
+  YT_DIR="${DEST_DIR}/src/yt-hg"
+  export PNG_DIR=${DEST_DIR}
+  export FTYPE_DIR=${DEST_DIR}
+  export HDF5_DIR=${DEST_DIR}
+  log_cmd hg clone -r ${BRANCH} https://bitbucket.org/yt_analysis/yt ${YT_DIR}
+  pushd ${YT_DIR}
+  echo $DEST_DIR > hdf5.cfg
+  log_cmd python setup.py develop
+  popd
+  log_cmd cp ${YT_DIR}/doc/activate ${DEST_DIR}/bin/activate 
+  log_cmd sed -i.bak -e "s,__YT_DIR__,${DEST_DIR}," ${DEST_DIR}/bin/activate
+  log_cmd cp ${YT_DIR}/doc/activate.csh ${DEST_DIR}/bin/activate.csh
+  log_cmd sed -i.bak -e "s,__YT_DIR__,${DEST_DIR}," ${DEST_DIR}/bin/activate.csh
+fi
+
+echo
+echo
+echo "========================================================================"
+echo
+echo "yt and the Conda system are now installed in $DEST_DIR ."
+echo
+if [ $INST_YT_SOURCE -eq 0 ]
+then
+  echo "You must now modify your PATH variable by prepending:"
+  echo 
+  echo "   $DEST_DIR/bin"
+  echo
+  echo "For example, if you use bash, place something like this at the end"
+  echo "of your ~/.bashrc :"
+  echo
+  echo "   export PATH=$DEST_DIR/bin:$PATH"
+else
+  echo "To run from this new installation, use the activate script for this "
+  echo "environment."
+  echo
+  echo "    $ source $DEST_DIR/bin/activate"
+  echo
+  echo "This modifies the environment variables YT_DEST, PATH, PYTHONPATH, and"
+  echo "LD_LIBRARY_PATH to match your new yt install.  If you use csh, just"
+  echo "append .csh to the above."
+fi
+echo
+echo "To get started with yt, check out the orientation:"
+echo
+echo "    http://yt-project.org/doc/orientation/"
+echo
+echo "or just activate your environment and run 'yt serve' to bring up the"
+echo "yt GUI."
+echo
+echo "For support, see the website and join the mailing list:"
+echo
+echo "    http://yt-project.org/"
+echo "    http://yt-project.org/data/      (Sample data)"
+echo "    http://yt-project.org/doc/       (Docs)"
+echo
+echo "    http://lists.spacepope.org/listinfo.cgi/yt-users-spacepope.org"
+echo
+echo "========================================================================"
+echo
+echo "Oh, look at me, still talking when there's science to do!"
+echo "Good luck, and email the user list if you run into any problems."

diff -r 19728ac91bad2835149a981df4a09d73f847308f -r fc45b6d9d7ae3ea295c722e14f00113bdb2f00d4 doc/install_script.sh
--- a/doc/install_script.sh
+++ b/doc/install_script.sh
@@ -918,6 +918,8 @@
 do_setup_py $SYMPY
 [ $INST_PYX -eq 1 ] && do_setup_py $PYX
 
+( ${DEST_DIR}/bin/pip install jinja2 2>&1 ) 1>> ${LOG_FILE}
+
 # Now we build Rockstar and set its environment variable.
 if [ $INST_ROCKSTAR -eq 1 ]
 then

diff -r 19728ac91bad2835149a981df4a09d73f847308f -r fc45b6d9d7ae3ea295c722e14f00113bdb2f00d4 yt/analysis_modules/sunyaev_zeldovich/tests/test_projection.py
--- a/yt/analysis_modules/sunyaev_zeldovich/tests/test_projection.py
+++ b/yt/analysis_modules/sunyaev_zeldovich/tests/test_projection.py
@@ -29,11 +29,11 @@
 
 mue = 1./0.88
 freqs = np.array([30., 90., 240.])
-    
+
 def setup():
     """Test specific setup."""
     from yt.config import ytcfg
-    ytcfg["yt", "__withintesting"] = "True"        
+    ytcfg["yt", "__withintesting"] = "True"
 
 def full_szpack3d(pf, xo):
     data = pf.h.grids[0]
@@ -43,7 +43,7 @@
     Dtau = sigma_thompson*data["Density"]/(mh*mue)*dz
     Te = data["Temperature"]/K_per_keV
     betac = data["z-velocity"]/clight
-    pbar = get_pbar("Computing 3-D cell-by-cell S-Z signal for comparison.", nx) 
+    pbar = get_pbar("Computing 3-D cell-by-cell S-Z signal for comparison.", nx)
     for i in xrange(nx):
         pbar.update(i)
         for j in xrange(ny):
@@ -67,7 +67,7 @@
     a = 200.
     v0 = 300.*cm_per_km
     ddims = (nx,ny,nz)
-    
+
     x, y, z = np.mgrid[-R:R:nx*1j,
                        -R:R:ny*1j,
                        -R:R:nz*1j]
@@ -87,7 +87,7 @@
     data["z-velocity"] = velz
 
     bbox = np.array([[-0.5,0.5],[-0.5,0.5],[-0.5,0.5]])
-    
+
     L = 2*R*cm_per_kpc
     dl = L/nz
 
@@ -118,9 +118,11 @@
         return szprj.data
     def onaxis_image_func(filename_prefix):
         szprj.write_png(filename_prefix)
-    yield GenericArrayTest(pf, onaxis_array_func)
-    yield GenericImageTest(pf, onaxis_image_func, 3)
-       
+    for test in [GenericArrayTest(pf, onaxis_array_func),
+                 GenericImageTest(pf, onaxis_image_func, 3)]:
+        test_M7_onaxis.__name__ = test.description
+        yield test
+
 @requires_module("SZpack")
 @requires_pf(M7)
 def test_M7_offaxis():
@@ -131,5 +133,7 @@
         return szprj.data
     def offaxis_image_func(filename_prefix):
         szprj.write_png(filename_prefix)
-    yield GenericArrayTest(pf, offaxis_array_func)
-    yield GenericImageTest(pf, offaxis_image_func, 3)
+    for test in [GenericArrayTest(pf, offaxis_array_func),
+                 GenericImageTest(pf, offaxis_image_func, 3)]:
+        test_M7_offaxis.__name__ = test.description
+        yield test

diff -r 19728ac91bad2835149a981df4a09d73f847308f -r fc45b6d9d7ae3ea295c722e14f00113bdb2f00d4 yt/data_objects/hierarchy.py
--- a/yt/data_objects/hierarchy.py
+++ b/yt/data_objects/hierarchy.py
@@ -395,6 +395,23 @@
           [self.grid_left_edge[:,0], self.grid_right_edge[:,1], self.grid_right_edge[:,2]],
         ], dtype='float64')
 
+    def lock_grids_to_parents(self):
+        r"""This function locks grid edges to their parents.
+
+        This is useful in cases where the grid structure may be somewhat
+        irregular, or where setting the left and right edges is a lossy
+        process.  It is designed to correct situations where left/right edges
+        may be set slightly incorrectly, resulting in discontinuities in images
+        and the like.
+        """
+        mylog.info("Locking grids to parents.")
+        for i, g in enumerate(self.grids):
+            si = g.get_global_startindex()
+            g.LeftEdge = self.pf.domain_left_edge + g.dds * si
+            g.RightEdge = g.LeftEdge + g.ActiveDimensions * g.dds
+            self.grid_left_edge[i,:] = g.LeftEdge
+            self.grid_right_edge[i,:] = g.RightEdge
+
     def print_stats(self):
         """
         Prints out (stdout) relevant information about the simulation

diff -r 19728ac91bad2835149a981df4a09d73f847308f -r fc45b6d9d7ae3ea295c722e14f00113bdb2f00d4 yt/frontends/chombo/tests/test_outputs.py
--- a/yt/frontends/chombo/tests/test_outputs.py
+++ b/yt/frontends/chombo/tests/test_outputs.py
@@ -29,6 +29,7 @@
     pf = data_dir_load(gc)
     yield assert_equal, str(pf), "data.0077.3d.hdf5"
     for test in small_patch_amr(gc, _fields):
+        test_gc.__name__ = test.description
         yield test
 
 tb = "TurbBoxLowRes/data.0005.3d.hdf5"
@@ -37,4 +38,5 @@
     pf = data_dir_load(tb)
     yield assert_equal, str(pf), "data.0005.3d.hdf5"
     for test in small_patch_amr(tb, _fields):
+        test_tb.__name__ = test.description
         yield test

diff -r 19728ac91bad2835149a981df4a09d73f847308f -r fc45b6d9d7ae3ea295c722e14f00113bdb2f00d4 yt/frontends/enzo/data_structures.py
--- a/yt/frontends/enzo/data_structures.py
+++ b/yt/frontends/enzo/data_structures.py
@@ -40,6 +40,7 @@
 from yt.utilities.definitions import \
     mpc_conversion, sec_conversion
 from yt.utilities import hdf5_light_reader
+from yt.utilities.io_handler import io_registry
 from yt.utilities.logger import ytLogger as mylog
 
 from .definitions import parameterDict
@@ -131,10 +132,11 @@
 
     def retrieve_ghost_zones(self, n_zones, fields, all_levels=False,
                              smoothed=False):
-        # We ignore smoothed in this case.
-        if n_zones > 3:
+        NGZ = self.pf.parameters.get("NumberOfGhostZones", 3)
+        if n_zones > NGZ:
             return EnzoGrid.retrieve_ghost_zones(
                 self, n_zones, fields, all_levels, smoothed)
+
         # ----- Below is mostly the original code, except we remove the field
         # ----- access section
         # We will attempt this by creating a datacube that is exactly bigger
@@ -162,7 +164,12 @@
                 level, new_left_edge, **kwargs)
         # ----- This is EnzoGrid.get_data, duplicated here mostly for
         # ----  efficiency's sake.
-        sl = [slice(3 - n_zones, -(3 - n_zones)) for i in range(3)]
+        start_zone = NGZ - n_zones
+        if start_zone == 0:
+            end_zone = None
+        else:
+            end_zone = -(NGZ - n_zones)
+        sl = [slice(start_zone, end_zone) for i in range(3)]
         if fields is None: return cube
         for field in ensure_list(fields):
             if field in self.hierarchy.field_list:
@@ -543,6 +550,9 @@
                     result[p] = result[p][0:max_num]
         return result
 
+    def _setup_data_io(self):
+            self.io = io_registry[self.data_style](self.parameter_file)
+
 
 class EnzoHierarchyInMemory(EnzoHierarchy):
 

diff -r 19728ac91bad2835149a981df4a09d73f847308f -r fc45b6d9d7ae3ea295c722e14f00113bdb2f00d4 yt/frontends/enzo/io.py
--- a/yt/frontends/enzo/io.py
+++ b/yt/frontends/enzo/io.py
@@ -32,6 +32,10 @@
 
     _data_style = "enzo_hdf4"
 
+    def __init__(self, pf, *args, **kwargs):
+        BaseIOHandler.__init__(self, *args, **kwargs)
+        self.pf = pf
+
     def modify(self, field):
         return field.swapaxes(0,2)
 
@@ -61,6 +65,10 @@
     _data_style = "enzo_hdf5"
     _particle_reader = True
 
+    def __init__(self, pf, *args, **kwargs):
+        BaseIOHandler.__init__(self, *args, **kwargs)
+        self.pf = pf
+
     def _read_field_names(self, grid):
         """
         Returns a list of fields associated with the filename
@@ -90,6 +98,10 @@
     _data_style = "enzo_packed_3d"
     _particle_reader = True
 
+    def __init__(self, pf, *args, **kwargs):
+        BaseIOHandler.__init__(self, *args, **kwargs)
+        self.pf = pf
+
     def _read_particles(self, fields, rtype, args, grid_list, enclosed,
                         conv_factors):
         filenames = [g.filename for g in grid_list]
@@ -144,10 +156,18 @@
 class IOHandlerPackedHDF5GhostZones(IOHandlerPackedHDF5):
     _data_style = "enzo_packed_3d_gz"
 
+    def __init__(self, pf, *args, **kwargs):
+        BaseIOHandler.__init__(self, *args, **kwargs)
+        self.pf = pf
+
     def modify(self, field):
+        NGZ = self.pf.parameters.get("NumberOfGhostZones", 3)
+        sl =  (slice(NGZ,-NGZ),
+               slice(NGZ,-NGZ),
+               slice(NGZ,-NGZ))
         if len(field.shape) < 3:
             return field
-        tr = field[3:-3,3:-3,3:-3].swapaxes(0,2)
+        tr = field[sl].swapaxes(0,2)
         return tr.copy() # To ensure contiguous
 
     def _read_raw_data_set(self, grid, field):
@@ -158,7 +178,7 @@
 
     _data_style = "enzo_inline"
 
-    def __init__(self, ghost_zones=3):
+    def __init__(self, pf, ghost_zones=3):
         import enzo
         self.enzo = enzo
         self.grids_in_memory = enzo.grid_data
@@ -166,6 +186,7 @@
         self.my_slice = (slice(ghost_zones,-ghost_zones),
                       slice(ghost_zones,-ghost_zones),
                       slice(ghost_zones,-ghost_zones))
+        self.pf = pf
         BaseIOHandler.__init__(self)
 
     def _read_data(self, grid, field):
@@ -210,6 +231,10 @@
     _data_style = "enzo_packed_2d"
     _particle_reader = False
 
+    def __init__(self, pf, *args, **kwargs):
+        BaseIOHandler.__init__(self, *args, **kwargs)
+        self.pf = pf
+
     def _read_data(self, grid, field):
         return hdf5_light_reader.ReadData(grid.filename,
             "/Grid%08i/%s" % (grid.id, field)).transpose()[:,:,None]
@@ -228,6 +253,10 @@
     _data_style = "enzo_packed_1d"
     _particle_reader = False
 
+    def __init__(self, pf, *args, **kwargs):
+        BaseIOHandler.__init__(self, *args, **kwargs)
+        self.pf = pf
+
     def _read_data(self, grid, field):
         return hdf5_light_reader.ReadData(grid.filename,
             "/Grid%08i/%s" % (grid.id, field)).transpose()[:,None,None]

diff -r 19728ac91bad2835149a981df4a09d73f847308f -r fc45b6d9d7ae3ea295c722e14f00113bdb2f00d4 yt/frontends/enzo/tests/test_outputs.py
--- a/yt/frontends/enzo/tests/test_outputs.py
+++ b/yt/frontends/enzo/tests/test_outputs.py
@@ -30,6 +30,7 @@
     pf = data_dir_load(m7)
     yield assert_equal, str(pf), "moving7_0010"
     for test in small_patch_amr(m7, _fields):
+        test_moving7.__name__ = test.description
         yield test
 
 g30 = "IsolatedGalaxy/galaxy0030/galaxy0030"
@@ -38,4 +39,5 @@
     pf = data_dir_load(g30)
     yield assert_equal, str(pf), "galaxy0030"
     for test in big_patch_amr(g30, _fields):
+        test_galaxy0030.__name__ = test.description
         yield test

diff -r 19728ac91bad2835149a981df4a09d73f847308f -r fc45b6d9d7ae3ea295c722e14f00113bdb2f00d4 yt/frontends/flash/tests/test_outputs.py
--- a/yt/frontends/flash/tests/test_outputs.py
+++ b/yt/frontends/flash/tests/test_outputs.py
@@ -29,6 +29,7 @@
     pf = data_dir_load(sloshing)
     yield assert_equal, str(pf), "sloshing_low_res_hdf5_plt_cnt_0300"
     for test in small_patch_amr(sloshing, _fields):
+        test_sloshing.__name__ = test.description
         yield test
 
 _fields_2d = ("Temperature", "Density")
@@ -39,4 +40,5 @@
     pf = data_dir_load(wt)
     yield assert_equal, str(pf), "windtunnel_4lev_hdf5_plt_cnt_0030"
     for test in small_patch_amr(wt, _fields_2d):
+        test_wind_tunnel.__name__ = test.description
         yield test

diff -r 19728ac91bad2835149a981df4a09d73f847308f -r fc45b6d9d7ae3ea295c722e14f00113bdb2f00d4 yt/frontends/orion/tests/test_outputs.py
--- a/yt/frontends/orion/tests/test_outputs.py
+++ b/yt/frontends/orion/tests/test_outputs.py
@@ -29,6 +29,7 @@
     pf = data_dir_load(radadvect)
     yield assert_equal, str(pf), "plt00000"
     for test in small_patch_amr(radadvect, _fields):
+        test_radadvect.__name__ = test.description
         yield test
 
 rt = "RadTube/plt00500"
@@ -37,4 +38,5 @@
     pf = data_dir_load(rt)
     yield assert_equal, str(pf), "plt00500"
     for test in small_patch_amr(rt, _fields):
+        test_radtube.__name__ = test.description
         yield test

diff -r 19728ac91bad2835149a981df4a09d73f847308f -r fc45b6d9d7ae3ea295c722e14f00113bdb2f00d4 yt/funcs.py
--- a/yt/funcs.py
+++ b/yt/funcs.py
@@ -625,3 +625,38 @@
         return
     if not os.path.exists(my_dir):
         only_on_root(os.makedirs, my_dir)
+
+ at contextlib.contextmanager
+def memory_checker(interval = 15):
+    r"""This is a context manager that monitors memory usage.
+
+    Parameters
+    ----------
+    interval : int
+        The number of seconds between printing the current memory usage in
+        gigabytes of the current Python interpreter.
+
+    Examples
+    --------
+
+    >>> with memory_checker(10):
+    ...     arr = np.zeros(1024*1024*1024, dtype="float64")
+    ...     time.sleep(15)
+    ...     del arr
+    """
+    import threading
+    class MemoryChecker(threading.Thread):
+        def __init__(self, event, interval):
+            self.event = event
+            self.interval = interval
+            threading.Thread.__init__(self)
+
+        def run(self):
+            while not self.event.wait(self.interval):
+                print "MEMORY: %0.3e gb" % (get_memory_usage()/1024.)
+
+    e = threading.Event()
+    mem_check = MemoryChecker(e, interval)
+    mem_check.start()
+    yield
+    e.set()

diff -r 19728ac91bad2835149a981df4a09d73f847308f -r fc45b6d9d7ae3ea295c722e14f00113bdb2f00d4 yt/gui/reason/extdirect_router.py
--- a/yt/gui/reason/extdirect_router.py
+++ b/yt/gui/reason/extdirect_router.py
@@ -9,6 +9,13 @@
 This code was released under the BSD License.
 """
 
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
 import inspect
 
 class DirectException(Exception):
@@ -186,12 +193,4 @@
 
 
 
-"""
 
-#-----------------------------------------------------------------------------
-# Copyright (c) 2013, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------

diff -r 19728ac91bad2835149a981df4a09d73f847308f -r fc45b6d9d7ae3ea295c722e14f00113bdb2f00d4 yt/visualization/api.py
--- a/yt/visualization/api.py
+++ b/yt/visualization/api.py
@@ -20,8 +20,7 @@
 from plot_collection import \
     PlotCollection, \
     PlotCollectionInteractive, \
-    concatenate_pdfs, \
-    get_multi_plot
+    concatenate_pdfs
 
 from fixed_resolution import \
     FixedResolutionBuffer, \
@@ -54,5 +53,7 @@
     OffAxisSlicePlot, \
     ProjectionPlot, \
     OffAxisProjectionPlot
-    
 
+from base_plot_types import \
+    get_multi_plot
+

diff -r 19728ac91bad2835149a981df4a09d73f847308f -r fc45b6d9d7ae3ea295c722e14f00113bdb2f00d4 yt/visualization/base_plot_types.py
--- a/yt/visualization/base_plot_types.py
+++ b/yt/visualization/base_plot_types.py
@@ -94,3 +94,86 @@
         canvas.print_figure(f)
         f.seek(0)
         return f.read()
+
+def get_multi_plot(nx, ny, colorbar = 'vertical', bw = 4, dpi=300,
+                   cbar_padding = 0.4):
+    r"""Construct a multiple axes plot object, with or without a colorbar, into
+    which multiple plots may be inserted.
+
+    This will create a set of :class:`matplotlib.axes.Axes`, all lined up into
+    a grid, which are then returned to the user and which can be used to plot
+    multiple plots on a single figure.
+
+    Parameters
+    ----------
+    nx : int
+        Number of axes to create along the x-direction
+    ny : int
+        Number of axes to create along the y-direction
+    colorbar : {'vertical', 'horizontal', None}, optional
+        Should Axes objects for colorbars be allocated, and if so, should they
+        correspond to the horizontal or vertical set of axes?
+    bw : number
+        The base height/width of an axes object inside the figure, in inches
+    dpi : number
+        The dots per inch fed into the Figure instantiation
+
+    Returns
+    -------
+    fig : :class:`matplotlib.figure.Figure`
+        The figure created inside which the axes reside
+    tr : list of list of :class:`matplotlib.axes.Axes` objects
+        This is a list, where the inner list is along the x-axis and the outer
+        is along the y-axis
+    cbars : list of :class:`matplotlib.axes.Axes` objects
+        Each of these is an axes onto which a colorbar can be placed.
+
+    Notes
+    -----
+    This is a simple implementation for a common use case.  Viewing the source
+    can be instructure, and is encouraged to see how to generate more
+    complicated or more specific sets of multiplots for your own purposes.
+    """
+    hf, wf = 1.0/ny, 1.0/nx
+    fudge_x = fudge_y = 1.0
+    if colorbar is None:
+        fudge_x = fudge_y = 1.0
+    elif colorbar.lower() == 'vertical':
+        fudge_x = nx/(cbar_padding+nx)
+        fudge_y = 1.0
+    elif colorbar.lower() == 'horizontal':
+        fudge_x = 1.0
+        fudge_y = ny/(cbar_padding+ny)
+    fig = matplotlib.figure.Figure((bw*nx/fudge_x, bw*ny/fudge_y), dpi=dpi)
+    from _mpl_imports import FigureCanvasAgg
+    fig.set_canvas(FigureCanvasAgg(fig))
+    fig.subplots_adjust(wspace=0.0, hspace=0.0,
+                        top=1.0, bottom=0.0,
+                        left=0.0, right=1.0)
+    tr = []
+    for j in range(ny):
+        tr.append([])
+        for i in range(nx):
+            left = i*wf*fudge_x
+            bottom = fudge_y*(1.0-(j+1)*hf) + (1.0-fudge_y)
+            ax = fig.add_axes([left, bottom, wf*fudge_x, hf*fudge_y])
+            tr[-1].append(ax)
+    cbars = []
+    if colorbar is None:
+        pass
+    elif colorbar.lower() == 'horizontal':
+        for i in range(nx):
+            # left, bottom, width, height
+            # Here we want 0.10 on each side of the colorbar
+            # We want it to be 0.05 tall
+            # And we want a buffer of 0.15
+            ax = fig.add_axes([wf*(i+0.10)*fudge_x, hf*fudge_y*0.20,
+                               wf*(1-0.20)*fudge_x, hf*fudge_y*0.05])
+            cbars.append(ax)
+    elif colorbar.lower() == 'vertical':
+        for j in range(ny):
+            ax = fig.add_axes([wf*(nx+0.05)*fudge_x, hf*fudge_y*(ny-(j+0.95)),
+                               wf*fudge_x*0.05, hf*fudge_y*0.90])
+            ax.clear()
+            cbars.append(ax)
+    return fig, tr, cbars

diff -r 19728ac91bad2835149a981df4a09d73f847308f -r fc45b6d9d7ae3ea295c722e14f00113bdb2f00d4 yt/visualization/plot_collection.py
--- a/yt/visualization/plot_collection.py
+++ b/yt/visualization/plot_collection.py
@@ -1728,90 +1728,6 @@
             canvas = FigureCanvasAgg(plot._figure)
             send_figure(plot._figure)
 
-def get_multi_plot(nx, ny, colorbar = 'vertical', bw = 4, dpi=300,
-                   cbar_padding = 0.4):
-    r"""Construct a multiple axes plot object, with or without a colorbar, into
-    which multiple plots may be inserted.
-
-    This will create a set of :class:`matplotlib.axes.Axes`, all lined up into
-    a grid, which are then returned to the user and which can be used to plot
-    multiple plots on a single figure.
-
-    Parameters
-    ----------
-    nx : int
-        Number of axes to create along the x-direction
-    ny : int
-        Number of axes to create along the y-direction
-    colorbar : {'vertical', 'horizontal', None}, optional
-        Should Axes objects for colorbars be allocated, and if so, should they
-        correspond to the horizontal or vertical set of axes?
-    bw : number
-        The base height/width of an axes object inside the figure, in inches
-    dpi : number
-        The dots per inch fed into the Figure instantiation
-
-    Returns
-    -------
-    fig : :class:`matplotlib.figure.Figure`
-        The figure created inside which the axes reside
-    tr : list of list of :class:`matplotlib.axes.Axes` objects
-        This is a list, where the inner list is along the x-axis and the outer
-        is along the y-axis
-    cbars : list of :class:`matplotlib.axes.Axes` objects
-        Each of these is an axes onto which a colorbar can be placed.
-
-    Notes
-    -----
-    This is a simple implementation for a common use case.  Viewing the source
-    can be instructure, and is encouraged to see how to generate more
-    complicated or more specific sets of multiplots for your own purposes.
-    """
-    hf, wf = 1.0/ny, 1.0/nx
-    fudge_x = fudge_y = 1.0
-    if colorbar is None:
-        fudge_x = fudge_y = 1.0
-    elif colorbar.lower() == 'vertical':
-        fudge_x = nx/(cbar_padding+nx)
-        fudge_y = 1.0
-    elif colorbar.lower() == 'horizontal':
-        fudge_x = 1.0
-        fudge_y = ny/(cbar_padding+ny)
-    fig = figure.Figure((bw*nx/fudge_x, bw*ny/fudge_y), dpi=dpi)
-    from _mpl_imports import FigureCanvasAgg
-    fig.set_canvas(FigureCanvasAgg(fig))
-    fig.subplots_adjust(wspace=0.0, hspace=0.0,
-                        top=1.0, bottom=0.0,
-                        left=0.0, right=1.0)
-    tr = []
-    print fudge_x, fudge_y
-    for j in range(ny):
-        tr.append([])
-        for i in range(nx):
-            left = i*wf*fudge_x
-            bottom = fudge_y*(1.0-(j+1)*hf) + (1.0-fudge_y)
-            ax = fig.add_axes([left, bottom, wf*fudge_x, hf*fudge_y])
-            tr[-1].append(ax)
-    cbars = []
-    if colorbar is None:
-        pass
-    elif colorbar.lower() == 'horizontal':
-        for i in range(nx):
-            # left, bottom, width, height
-            # Here we want 0.10 on each side of the colorbar
-            # We want it to be 0.05 tall
-            # And we want a buffer of 0.15
-            ax = fig.add_axes([wf*(i+0.10)*fudge_x, hf*fudge_y*0.20,
-                               wf*(1-0.20)*fudge_x, hf*fudge_y*0.05])
-            cbars.append(ax)
-    elif colorbar.lower() == 'vertical':
-        for j in range(ny):
-            ax = fig.add_axes([wf*(nx+0.05)*fudge_x, hf*fudge_y*(ny-(j+0.95)),
-                               wf*fudge_x*0.05, hf*fudge_y*0.90])
-            ax.clear()
-            cbars.append(ax)
-    return fig, tr, cbars
-
 def _MPLFixImage(data_source, image_obj, field, cbar, cls):
     nx, ny = image_obj.get_size()
     def f(axes):

diff -r 19728ac91bad2835149a981df4a09d73f847308f -r fc45b6d9d7ae3ea295c722e14f00113bdb2f00d4 yt/visualization/plot_window.py
--- a/yt/visualization/plot_window.py
+++ b/yt/visualization/plot_window.py
@@ -290,7 +290,8 @@
     _vector_info = None
     _frb = None
     def __init__(self, data_source, bounds, buff_size=(800,800), antialias=True,
-                 periodic=True, origin='center-window', oblique=False, window_size=10.0):
+                 periodic=True, origin='center-window', oblique=False,
+                 window_size=10.0, fields=None):
         if not hasattr(self, "pf"):
             self.pf = data_source.pf
             ts = self._initialize_dataset(self.pf)
@@ -304,6 +305,12 @@
         self.buff_size = buff_size
         self.window_size = window_size
         self.antialias = antialias
+        skip = list(FixedResolutionBuffer._exclude_fields) + data_source._key_fields
+        if fields is None:
+            fields = []
+        else:
+            fields = ensure_list(fields)
+        self.override_fields = list(np.intersect1d(fields, skip))
         self.set_window(bounds) # this automatically updates the data and plot
         self.origin = origin
         if self.data_source.center is not None and oblique == False:
@@ -359,6 +366,8 @@
             self._frb._get_data_source_fields()
         else:
             for key in old_fields: self._frb[key]
+        for key in self.override_fields:
+            self._frb[key]
         self._data_valid = True
 
     def _setup_plots(self):
@@ -366,7 +375,7 @@
 
     @property
     def fields(self):
-        return self._frb.data.keys()
+        return self._frb.data.keys() + self.override_fields
 
     @property
     def width(self):
@@ -1274,7 +1283,8 @@
             axes_unit = units
         if field_parameters is None: field_parameters = {}
         slc = pf.h.slice(axis, center[axis], center=center, fields=fields, **field_parameters)
-        PWViewerMPL.__init__(self, slc, bounds, origin=origin, fontsize=fontsize)
+        PWViewerMPL.__init__(self, slc, bounds, origin=origin,
+                             fontsize=fontsize, fields=fields)
         self.set_axes_unit(axes_unit)
 
 class ProjectionPlot(PWViewerMPL):
@@ -1391,7 +1401,8 @@
         if field_parameters is None: field_parameters = {}
         proj = pf.h.proj(axis, fields, weight_field=weight_field, max_level=max_level,
                          center=center, source=data_source, **field_parameters)
-        PWViewerMPL.__init__(self, proj, bounds, origin=origin, fontsize=fontsize)
+        PWViewerMPL.__init__(self, proj, bounds, origin=origin,
+                             fontsize=fontsize, fields=fields)
         self.set_axes_unit(axes_unit)
 
 class OffAxisSlicePlot(PWViewerMPL):
@@ -1450,8 +1461,9 @@
         cutting = pf.h.cutting(normal, center, fields=fields, north_vector=north_vector, **field_parameters)
         # Hard-coding the origin keyword since the other two options
         # aren't well-defined for off-axis data objects
-        PWViewerMPL.__init__(self, cutting, bounds, origin='center-window', periodic=False,
-                             oblique=True, fontsize=fontsize)
+        PWViewerMPL.__init__(self, cutting, bounds, origin='center-window',
+                             periodic=False, oblique=True, fontsize=fontsize,
+                             fields=fields)
         self.set_axes_unit(axes_unit)
 
 class OffAxisProjectionDummyDataSource(object):

diff -r 19728ac91bad2835149a981df4a09d73f847308f -r fc45b6d9d7ae3ea295c722e14f00113bdb2f00d4 yt/visualization/tests/test_plotwindow.py
--- a/yt/visualization/tests/test_plotwindow.py
+++ b/yt/visualization/tests/test_plotwindow.py
@@ -94,8 +94,10 @@
     for ax in 'xyz':
         for attr_name in ATTR_ARGS.keys():
             for args in ATTR_ARGS[attr_name]:
-                yield PlotWindowAttributeTest(pf, plot_field, ax, attr_name,
-                                              args, decimals)
+                test = PlotWindowAttributeTest(pf, plot_field, ax, attr_name,
+                                               args, decimals)
+                test_attributes.__name__ = test.description
+                yield test
 
 
 @requires_pf(WT)

diff -r 19728ac91bad2835149a981df4a09d73f847308f -r fc45b6d9d7ae3ea295c722e14f00113bdb2f00d4 yt/visualization/volume_rendering/camera.py
--- a/yt/visualization/volume_rendering/camera.py
+++ b/yt/visualization/volume_rendering/camera.py
@@ -72,6 +72,9 @@
         cubical, but if not, it is left/right, top/bottom, front/back.
     resolution : int or list of ints
         The number of pixels in each direction.
+    transfer_function : `yt.visualization.volume_rendering.TransferFunction`
+        The transfer function used to map values to colors in an image.  If
+        not specified, defaults to a ProjectionTransferFunction.
     north_vector : array_like, optional
         The 'up' direction for the plane of rays.  If not specific, calculated
         automatically.
@@ -184,7 +187,7 @@
     _tf_figure = None
     _render_figure = None
     def __init__(self, center, normal_vector, width,
-                 resolution, transfer_function,
+                 resolution, transfer_function = None,
                  north_vector = None, steady_north=False,
                  volume = None, fields = None,
                  log_fields = None,
@@ -1465,7 +1468,7 @@
 
 class MosaicCamera(Camera):
     def __init__(self, center, normal_vector, width,
-                 resolution, transfer_function,
+                 resolution, transfer_function = None,
                  north_vector = None, steady_north=False,
                  volume = None, fields = None,
                  log_fields = None,

diff -r 19728ac91bad2835149a981df4a09d73f847308f -r fc45b6d9d7ae3ea295c722e14f00113bdb2f00d4 yt/visualization/volume_rendering/multi_texture.py
--- a/yt/visualization/volume_rendering/multi_texture.py
+++ b/yt/visualization/volume_rendering/multi_texture.py
@@ -35,6 +35,14 @@
 I hope this helps,
   Almar
 """
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
 from yt.mods import *
 from yt.funcs import *
 
@@ -300,14 +308,3 @@
     ax.Draw()
 
     return mtex, ax
-
-
-"""
-
-#-----------------------------------------------------------------------------
-# Copyright (c) 2013, yt Development Team.
-#
-# Distributed under the terms of the Modified BSD License.
-#
-# The full license is in the file COPYING.txt, distributed with this software.
-#-----------------------------------------------------------------------------

diff -r 19728ac91bad2835149a981df4a09d73f847308f -r fc45b6d9d7ae3ea295c722e14f00113bdb2f00d4 yt/visualization/volume_rendering/transfer_function_helper.py
--- /dev/null
+++ b/yt/visualization/volume_rendering/transfer_function_helper.py
@@ -0,0 +1,211 @@
+"""
+A helper class to build, display, and modify transfer functions for volume
+rendering.
+
+
+
+"""
+
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, yt Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
+#-----------------------------------------------------------------------------
+
+from yt.funcs import mylog
+from yt.data_objects.profiles import BinnedProfile1D
+from yt.visualization.volume_rendering.api import ColorTransferFunction
+from yt.visualization._mpl_imports import FigureCanvasAgg
+from matplotlib.figure import Figure
+from IPython.core.display import Image
+import cStringIO
+import numpy as np
+
+
+class TransferFunctionHelper(object):
+
+    profiles = None
+
+    def __init__(self, pf):
+        r"""A transfer function helper.
+
+        This attempts to help set up a good transfer function by finding
+        bounds, handling linear/log options, and displaying the transfer
+        function combined with 1D profiles of rendering quantity.
+
+        Parameters
+        ----------
+        pf: A StaticOutput instance
+            A static output that is currently being rendered. This is used to
+            help set up data bounds.
+
+        Notes
+        -----
+        """
+        self.pf = pf
+        self.field = None
+        self.log = False
+        self.tf = None
+        self.bounds = None
+        self.grey_opacity = True
+        self.profiles = {}
+
+    def set_bounds(self, bounds=None):
+        """
+        Set the bounds of the transfer function.
+
+        Parameters
+        ----------
+        bounds: array-like, length 2, optional
+            A length 2 list/array in the form [min, max]. These should be the
+            raw values and not the logarithm of the min and max. If bounds is
+            None, the bounds of the data are calculated from all of the data
+            in the dataset.  This can be slow for very large datasets.
+        """
+        if bounds is None:
+            bounds = self.pf.h.all_data().quantities['Extrema'](self.field)[0]
+        self.bounds = bounds
+
+        # Do some error checking.
+        assert(len(self.bounds) == 2)
+        if self.log:
+            assert(self.bounds[0] > 0.0)
+            assert(self.bounds[1] > 0.0)
+        return
+
+    def set_field(self, field):
+        """
+        Set the field to be rendered
+
+        Parameters
+        ----------
+        field: string
+            The field to be rendered.
+        """
+        self.field = field
+
+    def set_log(self, log):
+        """
+        Set whether or not the transfer function should be in log or linear
+        space. Also modifies the pf.field_info[field].take_log attribute to
+        stay in sync with this setting.
+
+        Parameters
+        ----------
+        log: boolean
+            Sets whether the transfer function should use log or linear space.
+        """
+        self.log = log
+        self.pf.h
+        self.pf.field_info[self.field].take_log = log
+
+    def build_transfer_function(self):
+        """
+        Builds the transfer function according to the current state of the
+        TransferFunctionHelper.
+
+        Parameters
+        ----------
+        None
+
+        Returns
+        -------
+
+        A ColorTransferFunction object.
+
+        """
+        if self.bounds is None:
+            mylog.info('Calculating data bounds. This may take a while.' +
+                       '  Set the .bounds to avoid this.')
+            self.set_bounds()
+
+        if self.log:
+            mi, ma = np.log10(self.bounds[0]), np.log10(self.bounds[1])
+        else:
+            mi, ma = self.bounds
+        self.tf = ColorTransferFunction((mi, ma),
+                                        grey_opacity=self.grey_opacity,
+                                        nbins=512)
+        return self.tf
+
+    def plot(self, fn=None, profile_field=None, profile_weight=None):
+        """
+        Save the current transfer function to a bitmap, or display
+        it inline.
+
+        Parameters
+        ----------
+        fn: string, optional
+            Filename to save the image to. If None, the returns an image
+            to an IPython session.
+
+        Returns
+        -------
+
+        If fn is None, will return an image to an IPython notebook.
+
+        """
+        if self.tf is None:
+            self.build_transfer_function()
+        tf = self.tf
+        if self.log:
+            xfunc = np.logspace
+            xmi, xma = np.log10(self.bounds[0]), np.log10(self.bounds[1])
+        else:
+            xfunc = np.linspace
+            xmi, xma = self.bounds
+
+        x = xfunc(xmi, xma, tf.nbins)
+        y = tf.funcs[3].y
+        w = np.append(x[1:]-x[:-1], x[-1]-x[-2])
+        colors = np.array([tf.funcs[0].y, tf.funcs[1].y, tf.funcs[2].y,
+                           np.ones_like(x)]).T
+
+        fig = Figure(figsize=[6, 3])
+        canvas = FigureCanvasAgg(fig)
+        ax = fig.add_axes([0.2, 0.2, 0.75, 0.75])
+        ax.bar(x, tf.funcs[3].y, w, edgecolor=[0.0, 0.0, 0.0, 0.0],
+               log=True, color=colors, bottom=[0])
+
+        if profile_field is not None:
+            try:
+                prof = self.profiles[self.field]
+            except KeyError:
+                self.setup_profile(profile_field, profile_weight)
+                prof = self.profiles[self.field]
+            if profile_field not in prof.keys():
+                prof.add_fields([profile_field], fractional=False,
+                                weight=profile_weight)
+            ax.plot(prof[self.field], prof[profile_field]*tf.funcs[3].y.max() /
+                    prof[profile_field].max(), color='w', linewidth=3)
+            ax.plot(prof[self.field], prof[profile_field]*tf.funcs[3].y.max() /
+                    prof[profile_field].max(), color='k')
+
+        ax.set_xscale({True: 'log', False: 'linear'}[self.log])
+        ax.set_xlim(x.min(), x.max())
+        ax.set_xlabel(self.pf.field_info[self.field].get_label())
+        ax.set_ylabel(r'$\mathrm{alpha}$')
+        ax.set_ylim(y.max()*1.0e-3, y.max()*2)
+
+        if fn is None:
+            f = cStringIO.StringIO()
+            canvas.print_figure(f)
+            f.seek(0)
+            img = f.read()
+            return Image(img)
+        else:
+            fig.savefig(fn)
+
+    def setup_profile(self, profile_field=None, profile_weight=None):
+        if profile_field is None:
+            profile_field = 'CellVolume'
+        prof = BinnedProfile1D(self.pf.h.all_data(), 128, self.field,
+                               self.bounds[0], self.bounds[1],
+                               log_space=self.log,
+                               lazy_reader=False, end_collect=False)
+        prof.add_fields([profile_field], fractional=False,
+                        weight=profile_weight)
+        self.profiles[self.field] = prof
+        return


https://bitbucket.org/yt_analysis/yt/commits/4e2e9cfe70ce/
Changeset:   4e2e9cfe70ce
Branch:      yt
User:        jzuhone
Date:        2013-10-29 14:30:40
Summary:     Taking this import out
Affected #:  1 file

diff -r fc45b6d9d7ae3ea295c722e14f00113bdb2f00d4 -r 4e2e9cfe70ced27182fa6d9724d43c36553ed321 yt/analysis_modules/sunyaev_zeldovich/projection.py
--- a/yt/analysis_modules/sunyaev_zeldovich/projection.py
+++ b/yt/analysis_modules/sunyaev_zeldovich/projection.py
@@ -301,7 +301,6 @@
         --------
         >>> szprj.write_png("SZsloshing")
         """
-        from IPython import embed
         import matplotlib
         import matplotlib.pyplot as plt
         if log_fields is None: log_fields = {}


https://bitbucket.org/yt_analysis/yt/commits/a9ba5d41376d/
Changeset:   a9ba5d41376d
Branch:      yt
User:        jzuhone
Date:        2013-10-29 15:30:36
Summary:     Found a few bugs in the way the center was being handled.
Affected #:  1 file

diff -r 4e2e9cfe70ced27182fa6d9724d43c36553ed321 -r a9ba5d41376d6f08777b5e18c1c26d72c89ac9d4 yt/analysis_modules/sunyaev_zeldovich/projection.py
--- a/yt/analysis_modules/sunyaev_zeldovich/projection.py
+++ b/yt/analysis_modules/sunyaev_zeldovich/projection.py
@@ -27,6 +27,7 @@
 from yt.visualization.volume_rendering.camera import off_axis_projection
 from yt.utilities.parallel_tools.parallel_analysis_interface import \
      communication_system, parallel_root_only
+from yt.visualization.plot_window import StandardCenter
 import numpy as np
 
 I0 = 2*(kboltz*Tcmb)**3/((hcgs*clight)**2)*1.0e17
@@ -122,13 +123,20 @@
         """
         axis = fix_axis(axis)
 
+        if center == "c":
+            ctr = self.pf.domain_center
+        elif center == "max":
+            v, ctr = self.pf.h.find_max("Density")
+        else:
+            ctr = center
+
         def _beta_par(field, data):
             axis = data.get_field_parameter("axis")
             vpar = data["Density"]*data["%s-velocity" % (vlist[axis])]
             return vpar/clight
         add_field("BetaPar", function=_beta_par)    
 
-        proj = self.pf.h.proj(axis, "Density", source=source)
+        proj = self.pf.h.proj(axis, "Density", center=ctr, source=source)
         proj.set_field_parameter("axis", axis)
         frb = proj.to_frb(width, nx)
         dens = frb["Density"]
@@ -181,7 +189,7 @@
         if center == "c":
             ctr = self.pf.domain_center
         elif center == "max":
-            ctr = self.pf.h.find_max("Density")
+            v, ctr = self.pf.h.find_max("Density")
         else:
             ctr = center
 
@@ -304,7 +312,7 @@
         import matplotlib
         import matplotlib.pyplot as plt
         if log_fields is None: log_fields = {}
-        ticks_font = matplotlib.font_manager.FontProperties(family='serif')
+        ticks_font = matplotlib.font_manager.FontProperties(family='serif',size=16)
         extent = tuple([bound*self.pf.units["kpc"] for bound in self.bounds])
         for field, image in self.items():
             data = image.copy()
@@ -333,17 +341,17 @@
             cbar_label = self.display_names[field]
             if self.units[field] is not None:
                 cbar_label += " ("+self.units[field]+")"
-            fig = plt.figure(figsize=(8.0,6.0))
+            fig = plt.figure(figsize=(10.0,8.0))
             ax = fig.add_subplot(111)
-            cax = ax.imshow(data, norm=norm, extent=extent, cmap=cmap_name)
+            cax = ax.imshow(data, norm=norm, extent=extent, cmap=cmap_name, origin="lower")
             for label in ax.get_xticklabels():
                 label.set_fontproperties(ticks_font)
             for label in ax.get_yticklabels():
                 label.set_fontproperties(ticks_font)                      
-            ax.set_xlabel(r"$\mathrm{x\ (kpc)}$")
-            ax.set_ylabel(r"$\mathrm{y\ (kpc)}$")
+            ax.set_xlabel(r"$\mathrm{x\ (kpc)}$", fontsize=16)
+            ax.set_ylabel(r"$\mathrm{y\ (kpc)}$", fontsize=16)
             cbar = fig.colorbar(cax, format=formatter)
-            cbar.ax.set_ylabel(cbar_label)
+            cbar.ax.set_ylabel(cbar_label, fontsize=16)
             if negative:
                 cbar.ax.set_yticklabels(["-"+label.get_text()
                                          for label in cbar.ax.get_yticklabels()])


https://bitbucket.org/yt_analysis/yt/commits/c144fe080f8c/
Changeset:   c144fe080f8c
Branch:      yt
User:        jzuhone
Date:        2013-10-29 16:02:04
Summary:     Merged yt_analysis/yt into yt
Affected #:  10 files

diff -r a9ba5d41376d6f08777b5e18c1c26d72c89ac9d4 -r c144fe080f8c3011e7e49d8e2d82557467bd459e MANIFEST.in
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -1,4 +1,4 @@
-include distribute_setup.py README* CREDITS FUNDING LICENSE.txt
+include distribute_setup.py README* CREDITS COPYING.txt CITATION
 recursive-include yt/gui/reason/html *.html *.png *.ico *.js
-recursive-include yt *.pyx *.pxd *.hh *.h README*
+recursive-include yt *.pyx *.pxd *.h README*
 recursive-include yt/utilities/kdtree *.f90 *.v Makefile LICENSE
\ No newline at end of file

diff -r a9ba5d41376d6f08777b5e18c1c26d72c89ac9d4 -r c144fe080f8c3011e7e49d8e2d82557467bd459e yt/analysis_modules/absorption_spectrum/absorption_spectrum_fit.py
--- a/yt/analysis_modules/absorption_spectrum/absorption_spectrum_fit.py
+++ b/yt/analysis_modules/absorption_spectrum/absorption_spectrum_fit.py
@@ -86,6 +86,10 @@
     #Empty fit without any lines
     yFit = na.ones(len(fluxData))
 
+    #Force the first and last flux pixel to be 1 to prevent OOB
+    fluxData[0]=1
+    fluxData[-1]=1
+
     #Find all regions where lines/groups of lines are present
     cBounds = _find_complexes(x, fluxData, fitLim=fitLim,
             complexLim=complexLim, minLength=minLength,
@@ -120,9 +124,10 @@
                     z,fitLim,minError*(b[2]-b[1]),speciesDict)
 
             #Check existence of partner lines if applicable
-            newLinesP = _remove_unaccepted_partners(newLinesP, x, fluxData, 
-                    b, minError*(b[2]-b[1]),
-                    x0, xRes, speciesDict)
+            if len(speciesDict['wavelength']) != 1:
+                newLinesP = _remove_unaccepted_partners(newLinesP, x, fluxData, 
+                        b, minError*(b[2]-b[1]),
+                        x0, xRes, speciesDict)
 
             #If flagged as a bad fit, species is lyman alpha,
             #   and it may be a saturated line, use special tools
@@ -548,6 +553,10 @@
         #Index of the redshifted wavelength
         indexRedWl = (redWl-x0)/xRes
 
+        #Check to see if even in flux range
+        if indexRedWl > len(y):
+            return False
+
         #Check if surpasses minimum absorption bound
         if y[int(indexRedWl)]>fluxMin:
             return False

diff -r a9ba5d41376d6f08777b5e18c1c26d72c89ac9d4 -r c144fe080f8c3011e7e49d8e2d82557467bd459e yt/data_objects/data_containers.py
--- a/yt/data_objects/data_containers.py
+++ b/yt/data_objects/data_containers.py
@@ -642,7 +642,7 @@
     --------
 
     >>> pf = load("RedshiftOutput0005")
-    >>> ray = pf.h._ray((0.2, 0.74, 0.11), (0.4, 0.91, 0.31))
+    >>> ray = pf.h.ray((0.2, 0.74, 0.11), (0.4, 0.91, 0.31))
     >>> print ray["Density"], ray["t"], ray["dts"]
     """
     _type_name = "ray"
@@ -1864,7 +1864,7 @@
         new_buf.append(self.comm.mpi_allreduce(buf.pop(0), op=op))
         tree = self._get_tree(len(fields))
         tree.frombuffer(new_buf[0], new_buf[1], new_buf[2], merge_style)
-        coord_data, field_data, weight_data, dxs = [], [], [], []
+        coord_data, field_data, weight_data, dxs, dys = [], [], [], [], []
         for level in range(0, self._max_level + 1):
             npos, nvals, nwvals = tree.get_all_from_level(level, False)
             coord_data.append(npos)
@@ -1873,10 +1873,12 @@
             weight_data.append(nwvals)
             gs = self.source.select_grids(level)
             if len(gs) > 0:
-                ds = gs[0].dds[0]
+                dx = gs[0].dds[x_dict[self.axis]]
+                dy = gs[0].dds[y_dict[self.axis]]
             else:
-                ds = 0.0
-            dxs.append(np.ones(nvals.shape[0], dtype='float64') * ds)
+                dx = dy = 0.0
+            dxs.append(np.ones(nvals.shape[0], dtype='float64') * dx)
+            dys.append(np.ones(nvals.shape[0], dtype='float64') * dy)
         coord_data = np.concatenate(coord_data, axis=0).transpose()
         field_data = np.concatenate(field_data, axis=0).transpose()
         if self._weight is None:
@@ -1884,17 +1886,19 @@
             field_data *= convs[:,None]
         weight_data = np.concatenate(weight_data, axis=0).transpose()
         dxs = np.concatenate(dxs, axis=0).transpose()
+        dys = np.concatenate(dys, axis=0).transpose()
         # We now convert to half-widths and center-points
         data = {}
         data['pdx'] = dxs
+        data['pdy'] = dys
         ox = self.pf.domain_left_edge[x_dict[self.axis]]
         oy = self.pf.domain_left_edge[y_dict[self.axis]]
         data['px'] = (coord_data[0,:]+0.5) * data['pdx'] + ox
-        data['py'] = (coord_data[1,:]+0.5) * data['pdx'] + oy
+        data['py'] = (coord_data[1,:]+0.5) * data['pdy'] + oy
         data['weight_field'] = weight_data
         del coord_data
         data['pdx'] *= 0.5
-        data['pdy'] = data['pdx'] # generalization is out the window!
+        data['pdy'] *= 0.5
         data['fields'] = field_data
         # Now we run the finalizer, which is ignored if we don't need it
         field_data = np.vsplit(data.pop('fields'), len(fields))
@@ -3687,6 +3691,7 @@
                            fields=fields, pf=pf, **kwargs)
         self.left_edge = np.array(left_edge)
         self.level = level
+        dims = np.array(dims)
         rdx = self.pf.domain_dimensions*self.pf.refine_by**level
         rdx[np.where(dims - 2 * num_ghost_zones <= 1)] = 1   # issue 602
         self.dds = self.pf.domain_width / rdx.astype("float64")

diff -r a9ba5d41376d6f08777b5e18c1c26d72c89ac9d4 -r c144fe080f8c3011e7e49d8e2d82557467bd459e yt/data_objects/field_info_container.py
--- a/yt/data_objects/field_info_container.py
+++ b/yt/data_objects/field_info_container.py
@@ -190,7 +190,7 @@
         return "(%s)" % (self.missing_parameters)
 
 class FieldDetector(defaultdict):
-    Level = 1
+    Level = level = 1
     NumberOfParticles = 1
     _read_exception = None
     _id_offset = 0

diff -r a9ba5d41376d6f08777b5e18c1c26d72c89ac9d4 -r c144fe080f8c3011e7e49d8e2d82557467bd459e yt/data_objects/time_series.py
--- a/yt/data_objects/time_series.py
+++ b/yt/data_objects/time_series.py
@@ -13,10 +13,11 @@
 # The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
 
-import inspect, functools, weakref, glob, types
+import inspect, functools, weakref, glob, types, os
 
 from yt.funcs import *
 from yt.convenience import load
+from yt.config import ytcfg
 from .data_containers import data_object_registry
 from .analyzer_objects import create_quantity_proxy, \
     analysis_task_registry, AnalysisTask
@@ -250,10 +251,17 @@
         """
         
         if isinstance(filenames, types.StringTypes):
-            filenames = glob.glob(filenames)
+            if len(glob.glob(filenames)) == 0:
+                data_dir = ytcfg.get("yt", "test_data_dir")
+                pattern = os.path.join(data_dir, filenames)
+                td_filenames = glob.glob(pattern)
+                if len(td_filenames) > 0:
+                    filenames = td_filenames
+                else:
+                    raise YTOutputNotIdentified(filenames, {})
+            else:
+                filenames = glob.glob(filenames)
             filenames.sort()
-        if len(filenames) == 0:
-            raise YTOutputNotIdentified(filenames, {})
         obj = cls(filenames[:], parallel = parallel, **kwargs)
         return obj
 

diff -r a9ba5d41376d6f08777b5e18c1c26d72c89ac9d4 -r c144fe080f8c3011e7e49d8e2d82557467bd459e yt/data_objects/universal_fields.py
--- a/yt/data_objects/universal_fields.py
+++ b/yt/data_objects/universal_fields.py
@@ -1124,6 +1124,182 @@
           units=r"\rm{s}^{-2}",
           convert_function=_convertVorticitySquared)
 
+def _Shear(field, data):
+    """
+    Shear is defined as [(dvx/dy + dvy/dx)^2 + (dvz/dy + dvy/dz)^2 +
+                         (dvx/dz + dvz/dx)^2 ]^(0.5)
+    where dvx/dy = [vx(j-1) - vx(j+1)]/[2dy]
+    and is in units of s^(-1)
+    (it's just like vorticity except add the derivative pairs instead
+     of subtracting them)
+    """
+    # We need to set up stencils
+    if data.pf["HydroMethod"] == 2:
+        sl_left = slice(None,-2,None)
+        sl_right = slice(1,-1,None)
+        div_fac = 1.0
+    else:
+        sl_left = slice(None,-2,None)
+        sl_right = slice(2,None,None)
+        div_fac = 2.0
+    new_field = np.zeros(data["x-velocity"].shape)
+    if data.pf.dimensionality > 1:
+        dvydx = (data["y-velocity"][sl_right,1:-1,1:-1] -
+                data["y-velocity"][sl_left,1:-1,1:-1]) \
+                / (div_fac*data["dx"].flat[0])
+        dvxdy = (data["x-velocity"][1:-1,sl_right,1:-1] -
+                data["x-velocity"][1:-1,sl_left,1:-1]) \
+                / (div_fac*data["dy"].flat[0])
+        new_field[1:-1,1:-1,1:-1] += (dvydx + dvxdy)**2.0
+        del dvydx, dvxdy
+    if data.pf.dimensionality > 2:
+        dvzdy = (data["z-velocity"][1:-1,sl_right,1:-1] -
+                data["z-velocity"][1:-1,sl_left,1:-1]) \
+                / (div_fac*data["dy"].flat[0])
+        dvydz = (data["y-velocity"][1:-1,1:-1,sl_right] -
+                data["y-velocity"][1:-1,1:-1,sl_left]) \
+                / (div_fac*data["dz"].flat[0])
+        new_field[1:-1,1:-1,1:-1] += (dvzdy + dvydz)**2.0
+        del dvzdy, dvydz
+        dvxdz = (data["x-velocity"][1:-1,1:-1,sl_right] -
+                data["x-velocity"][1:-1,1:-1,sl_left]) \
+                / (div_fac*data["dz"].flat[0])
+        dvzdx = (data["z-velocity"][sl_right,1:-1,1:-1] -
+                data["z-velocity"][sl_left,1:-1,1:-1]) \
+                / (div_fac*data["dx"].flat[0])
+        new_field[1:-1,1:-1,1:-1] += (dvxdz + dvzdx)**2.0
+        del dvxdz, dvzdx
+    new_field = new_field**0.5
+    new_field = np.abs(new_field)
+    return new_field
+def _convertShear(data):
+    return data.convert("cm")**-1.0
+add_field("Shear", function=_Shear,
+          validators=[ValidateSpatial(1,
+              ["x-velocity","y-velocity","z-velocity"])],
+          units=r"\rm{s}^{-1}",
+          convert_function=_convertShear, take_log=False)
+
+def _ShearCriterion(field, data):
+    """
+    Shear is defined as [(dvx/dy + dvy/dx)^2 + (dvz/dy + dvy/dz)^2 +
+                         (dvx/dz + dvz/dx)^2 ]^(0.5)
+    where dvx/dy = [vx(j-1) - vx(j+1)]/[2dy]
+    and is in units of s^(-1)
+    (it's just like vorticity except add the derivative pairs instead
+     of subtracting them)
+
+    Divide by c_s to leave Shear in units of cm**-1, which 
+    can be compared against the inverse of the local cell size (1/dx) 
+    to determine if refinement should occur.
+    """
+    # We need to set up stencils
+    if data.pf["HydroMethod"] == 2:
+        sl_left = slice(None,-2,None)
+        sl_right = slice(1,-1,None)
+        div_fac = 1.0
+    else:
+        sl_left = slice(None,-2,None)
+        sl_right = slice(2,None,None)
+        div_fac = 2.0
+    new_field = np.zeros(data["x-velocity"].shape)
+    if data.pf.dimensionality > 1:
+        dvydx = (data["y-velocity"][sl_right,1:-1,1:-1] -
+                data["y-velocity"][sl_left,1:-1,1:-1]) \
+                / (div_fac*data["dx"].flat[0])
+        dvxdy = (data["x-velocity"][1:-1,sl_right,1:-1] -
+                data["x-velocity"][1:-1,sl_left,1:-1]) \
+                / (div_fac*data["dy"].flat[0])
+        new_field[1:-1,1:-1,1:-1] += (dvydx + dvxdy)**2.0
+        del dvydx, dvxdy
+    if data.pf.dimensionality > 2:
+        dvzdy = (data["z-velocity"][1:-1,sl_right,1:-1] -
+                data["z-velocity"][1:-1,sl_left,1:-1]) \
+                / (div_fac*data["dy"].flat[0])
+        dvydz = (data["y-velocity"][1:-1,1:-1,sl_right] -
+                data["y-velocity"][1:-1,1:-1,sl_left]) \
+                / (div_fac*data["dz"].flat[0])
+        new_field[1:-1,1:-1,1:-1] += (dvzdy + dvydz)**2.0
+        del dvzdy, dvydz
+        dvxdz = (data["x-velocity"][1:-1,1:-1,sl_right] -
+                data["x-velocity"][1:-1,1:-1,sl_left]) \
+                / (div_fac*data["dz"].flat[0])
+        dvzdx = (data["z-velocity"][sl_right,1:-1,1:-1] -
+                data["z-velocity"][sl_left,1:-1,1:-1]) \
+                / (div_fac*data["dx"].flat[0])
+        new_field[1:-1,1:-1,1:-1] += (dvxdz + dvzdx)**2.0
+        del dvxdz, dvzdx
+    new_field /= data["SoundSpeed"]**2.0
+    new_field = new_field**(0.5)
+    new_field = np.abs(new_field)
+    return new_field
+
+def _convertShearCriterion(data):
+    return data.convert("cm")**-1.0
+add_field("ShearCriterion", function=_ShearCriterion,
+          validators=[ValidateSpatial(1,
+              ["x-velocity","y-velocity","z-velocity", "SoundSpeed"])],
+          units=r"\rm{cm}^{-1}",
+          convert_function=_convertShearCriterion, take_log=False)
+
+def _ShearMach(field, data):
+    """
+    Dimensionless Shear (ShearMach) is defined nearly the same as shear, 
+    except that it is scaled by the local dx/dy/dz and the local sound speed.
+    So it results in a unitless quantity that is effectively measuring 
+    shear in mach number.  
+
+    In order to avoid discontinuities created by multiplying by dx/dy/dz at
+    grid refinement boundaries, we also multiply by 2**GridLevel.
+
+    Shear (Mach) = [(dvx + dvy)^2 + (dvz + dvy)^2 +
+                    (dvx + dvz)^2  ]^(0.5) / c_sound
+    """
+    # We need to set up stencils
+    if data.pf["HydroMethod"] == 2:
+        sl_left = slice(None,-2,None)
+        sl_right = slice(1,-1,None)
+        div_fac = 1.0
+    else:
+        sl_left = slice(None,-2,None)
+        sl_right = slice(2,None,None)
+        div_fac = 2.0
+    new_field = np.zeros(data["x-velocity"].shape)
+    if data.pf.dimensionality > 1:
+        dvydx = (data["y-velocity"][sl_right,1:-1,1:-1] -
+                data["y-velocity"][sl_left,1:-1,1:-1]) \
+                / (div_fac)
+        dvxdy = (data["x-velocity"][1:-1,sl_right,1:-1] -
+                data["x-velocity"][1:-1,sl_left,1:-1]) \
+                / (div_fac)
+        new_field[1:-1,1:-1,1:-1] += (dvydx + dvxdy)**2.0
+        del dvydx, dvxdy
+    if data.pf.dimensionality > 2:
+        dvzdy = (data["z-velocity"][1:-1,sl_right,1:-1] -
+                data["z-velocity"][1:-1,sl_left,1:-1]) \
+                / (div_fac)
+        dvydz = (data["y-velocity"][1:-1,1:-1,sl_right] -
+                data["y-velocity"][1:-1,1:-1,sl_left]) \
+                / (div_fac)
+        new_field[1:-1,1:-1,1:-1] += (dvzdy + dvydz)**2.0
+        del dvzdy, dvydz
+        dvxdz = (data["x-velocity"][1:-1,1:-1,sl_right] -
+                data["x-velocity"][1:-1,1:-1,sl_left]) \
+                / (div_fac)
+        dvzdx = (data["z-velocity"][sl_right,1:-1,1:-1] -
+                data["z-velocity"][sl_left,1:-1,1:-1]) \
+                / (div_fac)
+        new_field[1:-1,1:-1,1:-1] += (dvxdz + dvzdx)**2.0
+        del dvxdz, dvzdx
+    new_field *= ((2.0**data.level)/data["SoundSpeed"])**2.0
+    new_field = new_field**0.5
+    new_field = np.abs(new_field)
+    return new_field
+add_field("ShearMach", function=_ShearMach,
+          validators=[ValidateSpatial(1,
+              ["x-velocity","y-velocity","z-velocity","SoundSpeed"])],
+          units=r"\rm{Mach}",take_log=False)
+
 def _gradPressureX(field, data):
     # We need to set up stencils
     if data.pf["HydroMethod"] == 2:

diff -r a9ba5d41376d6f08777b5e18c1c26d72c89ac9d4 -r c144fe080f8c3011e7e49d8e2d82557467bd459e yt/utilities/command_line.py
--- a/yt/utilities/command_line.py
+++ b/yt/utilities/command_line.py
@@ -1452,6 +1452,7 @@
         except ImportError:
             # pre-IPython v1.0
             from IPython.frontend.html.notebook.notebookapp import NotebookApp
+        print "You must choose a password so that others cannot connect to your notebook."
         pw = ytcfg.get("yt", "notebook_password")
         if len(pw) == 0 and not args.no_password:
             import IPython.lib

diff -r a9ba5d41376d6f08777b5e18c1c26d72c89ac9d4 -r c144fe080f8c3011e7e49d8e2d82557467bd459e yt/utilities/setup.py
--- a/yt/utilities/setup.py
+++ b/yt/utilities/setup.py
@@ -36,7 +36,7 @@
     _archs = ['lib64', 'lib']
     if platform.system() == 'Linux':
         distname, version, did = platform.linux_distribution()
-        if distname in ('Ubuntu', 'Debian'):
+        if distname.lower() in ('ubuntu', 'debian'):
             _archs.extend(
                 ['lib/x86_64-linux-gnu',
                  'lib/i686-linux-gnu',
@@ -73,11 +73,12 @@
 def check_prefix(inc_dir, lib_dir):
     if platform.system() == 'Linux':
         distname, version, did = platform.linux_distribution()
-        if distname in ('Ubuntu', 'Debian'):
+        if distname.lower() in ('ubuntu', 'debian'):
             print("Since you are using multiarch distro it's hard to detect")
             print("whether library matches the header file. We will assume")
             print("it does. If you encounter any build failures please use")
             print("proper cfg files to provide path to the dependencies")
+            print("")
             return (inc_dir, lib_dir)
     prefix = os.path.commonprefix([inc_dir, lib_dir]).rstrip('/\\')
     if prefix is not '' and prefix == os.path.dirname(inc_dir):

diff -r a9ba5d41376d6f08777b5e18c1c26d72c89ac9d4 -r c144fe080f8c3011e7e49d8e2d82557467bd459e yt/visualization/plot_modifications.py
--- a/yt/visualization/plot_modifications.py
+++ b/yt/visualization/plot_modifications.py
@@ -186,20 +186,30 @@
         plot._axes.hold(True)
         nx = plot.image._A.shape[0] / self.factor
         ny = plot.image._A.shape[1] / self.factor
+        # periodicity
+        ax = plot.data.axis
+        pf = plot.data.pf
+        period_x = pf.domain_width[x_dict[ax]]
+        period_y = pf.domain_width[y_dict[ax]]
+        periodic = int(any(pf.periodicity))
         pixX = _MPL.Pixelize(plot.data['px'],
                              plot.data['py'],
                              plot.data['pdx'],
                              plot.data['pdy'],
                              plot.data[self.field_x] - self.bv_x,
                              int(nx), int(ny),
-                           (x0, x1, y0, y1),).transpose()
+                             (x0, x1, y0, y1), 0, # bounds, antialias
+                             (period_x, period_y), periodic,
+                           ).transpose()
         pixY = _MPL.Pixelize(plot.data['px'],
                              plot.data['py'],
                              plot.data['pdx'],
                              plot.data['pdy'],
                              plot.data[self.field_y] - self.bv_y,
                              int(nx), int(ny),
-                           (x0, x1, y0, y1),).transpose()
+                             (x0, x1, y0, y1), 0, # bounds, antialias
+                             (period_x, period_y), periodic,
+                           ).transpose()
         X,Y = np.meshgrid(np.linspace(xx0,xx1,nx,endpoint=True),
                           np.linspace(yy0,yy1,ny,endpoint=True))
         if self.normalize:

diff -r a9ba5d41376d6f08777b5e18c1c26d72c89ac9d4 -r c144fe080f8c3011e7e49d8e2d82557467bd459e yt/visualization/plot_window.py
--- a/yt/visualization/plot_window.py
+++ b/yt/visualization/plot_window.py
@@ -533,19 +533,34 @@
         parameters
         ----------
         new_center : two element sequence of floats
-            The coordinates of the new center of the image.
-            If the unit keyword is not specified, the
-            coordinates are assumed to be in code units
+            The coordinates of the new center of the image in the
+            coordinate system defined by the plot axes. If the unit
+            keyword is not specified, the coordinates are assumed to
+            be in code units.
 
         unit : string
             The name of the unit new_center is given in.
 
         """
+        error = RuntimeError(
+            "\n"
+            "new_center must be a two-element list or tuple of floats \n"
+            "corresponding to a coordinate in the plot relative to \n"
+            "the plot coordinate system.\n"
+        )
         if new_center is None:
             self.center = None
-        else:
+        elif iterable(new_center):
+            try:
+                assert all(isinstance(el, Number) for el in new_center)
+            except AssertionError:
+                raise error
+            if len(new_center) != 2:
+                raise error
             new_center = [c / self.pf[unit] for c in new_center]
             self.center = new_center
+        else:
+            raise error
         self.set_window(self.bounds)
         return self
 
@@ -1049,7 +1064,7 @@
         field : string
             the field to set a transform
             if field == 'all', applies to all plots.
-        cmap_name : string
+        cmap : string
             name of the colormap
 
         """

Repository URL: https://bitbucket.org/yt_analysis/yt/

--

This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.



More information about the yt-svn mailing list