[yt-svn] commit/yt: 2 new changesets

commits-noreply at bitbucket.org commits-noreply at bitbucket.org
Fri Jul 24 09:41:43 PDT 2015


2 new commits in yt:

https://bitbucket.org/yt_analysis/yt/commits/28733726b2a7/
Changeset:   28733726b2a7
Branch:      stable
User:        brittonsmith
Date:        2015-07-24 16:39:29+00:00
Summary:     Merging in development for 3.2 release.
Affected #:  435 files

diff -r 665e2b6cdf78aa7722c300a6505d1bad3c5d44a3 -r 28733726b2a751e774c8b7ae46121aa57fd1060f .hgignore
--- a/.hgignore
+++ b/.hgignore
@@ -11,7 +11,10 @@
 yt/analysis_modules/halo_finding/rockstar/rockstar_interface.c
 yt/analysis_modules/ppv_cube/ppv_utils.c
 yt/frontends/ramses/_ramses_reader.cpp
+yt/frontends/sph/smoothing_kernel.c
 yt/geometry/fake_octree.c
+yt/geometry/grid_container.c
+yt/geometry/grid_visitors.c
 yt/geometry/oct_container.c
 yt/geometry/oct_visitors.c
 yt/geometry/particle_deposit.c
@@ -24,6 +27,7 @@
 yt/utilities/spatial/ckdtree.c
 yt/utilities/lib/alt_ray_tracers.c
 yt/utilities/lib/amr_kdtools.c
+yt/utilities/lib/bitarray.c
 yt/utilities/lib/CICDeposit.c
 yt/utilities/lib/ContourFinding.c
 yt/utilities/lib/DepthFirstOctree.c
@@ -37,7 +41,9 @@
 yt/utilities/lib/mesh_utilities.c
 yt/utilities/lib/misc_utilities.c
 yt/utilities/lib/Octree.c
+yt/utilities/lib/GridTree.c
 yt/utilities/lib/origami.c
+yt/utilities/lib/pixelization_routines.c
 yt/utilities/lib/png_writer.c
 yt/utilities/lib/PointsInVolume.c
 yt/utilities/lib/QuadTree.c
@@ -45,7 +51,6 @@
 yt/utilities/lib/ragged_arrays.c
 yt/utilities/lib/VolumeIntegrator.c
 yt/utilities/lib/grid_traversal.c
-yt/utilities/lib/GridTree.c
 yt/utilities/lib/marching_cubes.c
 yt/utilities/lib/png_writer.h
 yt/utilities/lib/write_array.c
@@ -59,3 +64,4 @@
 doc/source/reference/api/generated/*
 doc/_temp/*
 doc/source/bootcamp/.ipynb_checkpoints/
+dist

diff -r 665e2b6cdf78aa7722c300a6505d1bad3c5d44a3 -r 28733726b2a751e774c8b7ae46121aa57fd1060f .python-version
--- /dev/null
+++ b/.python-version
@@ -0,0 +1,1 @@
+2.7.9

diff -r 665e2b6cdf78aa7722c300a6505d1bad3c5d44a3 -r 28733726b2a751e774c8b7ae46121aa57fd1060f README
--- a/README
+++ b/README
@@ -21,3 +21,4 @@
 ways to help development, please visit our website.
 
 Enjoy!
+

diff -r 665e2b6cdf78aa7722c300a6505d1bad3c5d44a3 -r 28733726b2a751e774c8b7ae46121aa57fd1060f benchmarks/asv.conf.json
--- /dev/null
+++ b/benchmarks/asv.conf.json
@@ -0,0 +1,64 @@
+{
+    // The version of the config file format.  Do not change, unless
+    // you know what you are doing.
+    "version": 1,
+
+    // The name of the project being benchmarked
+    "project": "yt",
+
+    // The project's homepage
+    "project_url": "http://yt-project.org/",
+
+    // The URL of the source code repository for the project being
+    // benchmarked
+    "repo": "hg+https://bitbucket.org/yt_analysis/yt/",
+
+    // The tool to use to create environments.  May be "conda",
+    // "virtualenv" or other value depending on the plugins in use.
+    // If missing or the empty string, the tool will be automatically
+    // determined by looking for tools on the PATH environment
+    // variable.
+    "environment_type": "virtualenv",
+
+    // the base URL to show a commit for the project.
+    "show_commit_url": "http://bitbucket.org/yt_analysis/yt/commits/",
+
+    // The Pythons you'd like to test against.  If not provided, defaults
+    // to the current version of Python used to run `asv`.
+    // "pythons": ["2.7", "3.3"],
+
+    // The matrix of dependencies to test.  Each key is the name of a
+    // package (in PyPI) and the values are version numbers.  An empty
+    // list indicates to just test against the default (latest)
+    // version.
+    "matrix": {
+        "numpy":        ["1.9.1"],
+        "cython":       ["0.21.2"],
+        //"h5py":         ["2.4.0"],
+        "ipython":      ["2.2.0"],
+        "matplotlib":   ["1.4.0"],
+        "sympy":        ["0.7.6"],
+    },
+
+    // The directory (relative to the current directory) that benchmarks are
+    // stored in.  If not provided, defaults to "benchmarks"
+    // "benchmark_dir": "benchmarks",
+
+    // The directory (relative to the current directory) to cache the Python
+    // environments in.  If not provided, defaults to "env"
+    // "env_dir": "env",
+
+
+    // The directory (relative to the current directory) that raw benchmark
+    // results are stored in.  If not provided, defaults to "results".
+    // "results_dir": "results",
+
+    // The directory (relative to the current directory) that the html tree
+    // should be written to.  If not provided, defaults to "html".
+    // "html_dir": "html",
+
+    // The number of characters to retain in the commit hashes.
+    // "hash_length": 8
+    //
+    "plugins": [".mercurial"],
+}

diff -r 665e2b6cdf78aa7722c300a6505d1bad3c5d44a3 -r 28733726b2a751e774c8b7ae46121aa57fd1060f benchmarks/benchmarks/__init__.py
--- /dev/null
+++ b/benchmarks/benchmarks/__init__.py
@@ -0,0 +1,1 @@
+

diff -r 665e2b6cdf78aa7722c300a6505d1bad3c5d44a3 -r 28733726b2a751e774c8b7ae46121aa57fd1060f benchmarks/benchmarks/benchmarks.py
--- /dev/null
+++ b/benchmarks/benchmarks/benchmarks.py
@@ -0,0 +1,31 @@
+# Write the benchmarking functions here.
+# See "Writing benchmarks" in the asv docs for more information.
+import numpy as np
+from yt import YTArray, YTQuantity
+
+def time_quantity_init_scalar1():
+    3.0 * YTQuantity(1, "m/s")
+
+
+def time_quantity_init_scalar2():
+    YTQuantity(3.0, "m/s")
+
+
+def time_quantity_init_array():
+    np.arange(100000) * YTQuantity(1, "m/s")
+
+
+def time_quantity_init_array2():
+    YTArray(np.arange(100000), "m/s")
+
+
+def time_quantity_scalar_conversion():
+    YTQuantity(3.0, "m/s").in_units("km/hr")
+
+
+def time_quantity_array_conversion():
+    YTArray(np.arange(100000), "m/s").in_units("km/hr")
+
+
+def time_quantity_ufunc_sin():
+    np.sin(YTArray(np.arange(10000), "degree"))

diff -r 665e2b6cdf78aa7722c300a6505d1bad3c5d44a3 -r 28733726b2a751e774c8b7ae46121aa57fd1060f benchmarks/benchmarks/data_objects.py
--- /dev/null
+++ b/benchmarks/benchmarks/data_objects.py
@@ -0,0 +1,69 @@
+import numpy as np
+import yt
+
+if yt.__version__.startswith('2'):
+    from yt.mods import load, ColorTransferFunction
+# else:
+#     from yt.visualization.volume_rendering.old_camera import Camera
+
+class Suite:
+    dsname = "HiresIsolatedGalaxy/DD0044/DD0044"
+    def setup(self):
+        if yt.__version__.startswith('3'):
+            self.ds = yt.load(self.dsname)
+            self.ad = self.ds.all_data()
+            self.field_name = "density"
+        else:
+            self.ds = load(self.dsname)
+            self.ad = self.ds.h.all_data()
+            self.field_name = "Density"
+        # Warmup hdd
+        self.ad[self.field_name]
+        if yt.__version__.startswith('3'):
+            mi, ma = self.ad.quantities['Extrema'](self.field_name)
+            self.tf = yt.ColorTransferFunction((np.log10(mi)+1, np.log10(ma)))
+        else:
+            mi, ma = self.ad.quantities['Extrema'](self.field_name)[0]
+            self.tf = ColorTransferFunction((np.log10(mi)+1, np.log10(ma)))
+        self.tf.add_layers(5, w=0.02, colormap="spectral")
+        self.c = [0.5, 0.5, 0.5]
+        self.L = [0.5, 0.2, 0.7]
+        self.W = 1.0
+        self.Npixels = 512
+
+    if yt.__version__.startswith('3'):
+        def time_load_all_data(self):
+            dd = self.ds.all_data()
+            dd[self.field_name]
+    else:
+        def time_load_all_data(self):
+            self.ds.h.all_data()
+            dd[self.field_name]
+
+    def time_extrema_quantities(self):
+        self.ad.quantities['Extrema'](self.field_name)
+
+    if yt.__version__.startswith('3'):
+        def time_alldata_projection(self):
+            self.ds.proj(self.field_name, 0)
+    else:
+        def time_alldata_projection(self):
+           self.ds.h.proj(0, self.field_name)
+
+    if yt.__version__.startswith('3'):
+        def time_slice(self):
+            slc = self.ds.slice(0, 0.5)
+            slc[self.field_name]
+    else:
+        def time_slice(self):
+            slc = self.ds.h.slice(0, 0.5, self.field_name)
+            slc[self.field_name]
+
+#    if yt.__version__.startswith('3'):
+#        def command(self):
+#            cam = Camera(self.c, self.L, self.W, self.Npixels, self.tf, ds=self.ds)
+#            cam.snapshot("%s_volume_rendered.png" % self.ds, clip_ratio=8.0)
+#    else:
+#        def command(self):
+#            cam = self.ds.h.camera(self.c, self.L, self.W, self.Npixels, self.tf)
+#            cam.snapshot("%s_volume_rendered.png" % self.ds, clip_ratio=8.0)

diff -r 665e2b6cdf78aa7722c300a6505d1bad3c5d44a3 -r 28733726b2a751e774c8b7ae46121aa57fd1060f benchmarks/benchmarks/large_tipsy.py
--- /dev/null
+++ b/benchmarks/benchmarks/large_tipsy.py
@@ -0,0 +1,52 @@
+import numpy as np
+import yt
+from yt.utilities.answer_testing.framework import data_dir_load
+
+class PKDGravTipsySuite:
+    dsname = "halo1e11_run1.00400/halo1e11_run1.00400"
+    timeout = 360.0
+    def setup(self):
+        cosmology_parameters = dict(current_redshift = 0.0,
+                                    omega_lambda = 0.728,
+                                    omega_matter = 0.272,
+                                    hubble_constant = 0.702)
+        kwargs = dict(field_dtypes = {"Coordinates": "d"},
+                      cosmology_parameters = cosmology_parameters,
+                      unit_base = {'length': (1.0/60.0, "Mpccm/h")},
+                      n_ref = 64)
+        self.ds = data_dir_load(self.dsname, yt.TipsyDataset, (), kwargs)
+
+    def time_all_particles(self):
+        dd = self.ds.all_data()
+        dd["all", "particle_velocity_x"]
+        dd["all", "particle_velocity_y"]
+        dd["all", "particle_velocity_z"]
+
+    def time_all_particles_derived(self):
+        dd = self.ds.all_data()
+        dd["all", "particle_velocity_magnitude"]
+
+    def time_project_unweight(self):
+        proj = self.ds.proj("all_density", 0)
+
+    def time_project_weight(self):
+        proj = self.ds.proj("all_density", 0, "all_density")
+
+    def time_particle_quantities(self):
+        dd = self.ds.all_data()
+        dd.quantities.extrema("particle_mass")
+        dd.quantities.extrema("particle_velocity_magnitude")
+        dd.quantities.extrema(["particle_velocity_%s" % ax for ax in 'xyz'])
+
+class GasolineTipsySuite(PKDGravTipsySuite):
+    dsname = "agora_1e11.00400/agora_1e11.00400"
+    timeout = 360.0
+    def setup(self):
+        cosmology_parameters = dict(current_redshift = 0.0,
+                                    omega_lambda = 0.728,
+                                    omega_matter = 0.272,
+                                    hubble_constant = 0.702)
+        kwargs = dict(cosmology_parameters = cosmology_parameters,
+                      unit_base = {'length': (1.0/60.0, "Mpccm/h")},
+                      n_ref = 64)
+        self.ds = data_dir_load(self.dsname, yt.TipsyDataset, (), kwargs)

diff -r 665e2b6cdf78aa7722c300a6505d1bad3c5d44a3 -r 28733726b2a751e774c8b7ae46121aa57fd1060f benchmarks/benchmarks/small_enzo.py
--- /dev/null
+++ b/benchmarks/benchmarks/small_enzo.py
@@ -0,0 +1,46 @@
+import numpy as np
+import yt
+
+class SmallEnzoSuite:
+    dsname = "IsolatedGalaxy/galaxy0030/galaxy0030"
+    def setup(self):
+        self.ds = yt.load(self.dsname)
+
+    def time_all_particles(self):
+        dd = self.ds.all_data()
+        dd["all", "particle_velocity_x"]
+        dd["all", "particle_velocity_y"]
+        dd["all", "particle_velocity_z"]
+
+    def time_all_particles_derived(self):
+        dd = self.ds.all_data()
+        dd["all", "particle_velocity_magnitude"]
+
+    def time_gas_read(self):
+        dd = self.ds.all_data()
+        dd["gas", "density"]
+
+    def time_gas_derived(self):
+        dd = self.ds.all_data()
+        dd["gas", "velocity_magnitude"]
+
+    def time_project_unweight(self):
+        proj = self.ds.proj("density", 0)
+
+    def time_project_weight(self):
+        proj = self.ds.proj("density", 0, "density")
+
+    def time_ghostzones(self):
+        dd = self.ds.all_data()
+        dd["velocity_divergence"]
+
+    def time_particle_quantities(self):
+        dd = self.ds.all_data()
+        dd.quantities.extrema("particle_mass")
+        dd.quantities.extrema("particle_velocity_magnitude")
+        dd.quantities.extrema(["particle_velocity_%s" % ax for ax in 'xyz'])
+
+    def time_gas_quantites(self):
+        dd = self.ds.all_data()
+        dd.quantities.extrema("density")
+        dd.quantities.extrema(["velocity_x", "velocity_y", "velocity_z"])

diff -r 665e2b6cdf78aa7722c300a6505d1bad3c5d44a3 -r 28733726b2a751e774c8b7ae46121aa57fd1060f benchmarks/benchmarks/small_flash.py
--- /dev/null
+++ b/benchmarks/benchmarks/small_flash.py
@@ -0,0 +1,31 @@
+import numpy as np
+import yt
+
+class SmallFlashSuite:
+    dsname = "GasSloshingLowRes/sloshing_low_res_hdf5_plt_cnt_0690"
+    def setup(self):
+        self.ds = yt.load(self.dsname)
+
+    def time_gas_read(self):
+        dd = self.ds.all_data()
+        dd["gas", "density"]
+
+    def time_gas_derived(self):
+        dd = self.ds.all_data()
+        dd["gas", "velocity_magnitude"]
+
+    def time_project_unweight(self):
+        proj = self.ds.proj("density", 0)
+
+    def time_project_weight(self):
+        proj = self.ds.proj("density", 0, "density")
+
+    def time_ghostzones(self):
+        dd = self.ds.sphere(self.ds.domain_center,
+                            self.ds.domain_width[0] * 0.25)
+        dd["velocity_divergence"]
+
+    def time_gas_quantites(self):
+        dd = self.ds.all_data()
+        dd.quantities.extrema("density")
+        dd.quantities.extrema(["velocity_x", "velocity_y", "velocity_z"])

diff -r 665e2b6cdf78aa7722c300a6505d1bad3c5d44a3 -r 28733726b2a751e774c8b7ae46121aa57fd1060f benchmarks/benchmarks/small_gadget.py
--- /dev/null
+++ b/benchmarks/benchmarks/small_gadget.py
@@ -0,0 +1,35 @@
+import numpy as np
+import yt
+
+class SmallGadgetSuite:
+    dsname = "snapshot_033/snap_033.0.hdf5"
+    timeout = 360.0
+
+    def setup(self):
+        self.ds = yt.load(self.dsname)
+
+    def time_all_particles(self):
+        dd = self.ds.all_data()
+        dd["all", "particle_velocity_x"]
+        dd["all", "particle_velocity_y"]
+        dd["all", "particle_velocity_z"]
+
+    def time_all_particles_derived(self):
+        dd = self.ds.all_data()
+        dd["all", "particle_velocity_magnitude"]
+
+    def time_all_sph_kernel(self):
+        dd = self.ds.all_data()
+        dd["gas", "density"]
+
+    def time_project_unweight(self):
+        proj = self.ds.proj(("deposit", "all_density"), 0)
+
+    def time_project_weight(self):
+        proj = self.ds.proj(("deposit", "all_density"), 0, ("gas", "density"))
+
+    def time_particle_quantities(self):
+        dd = self.ds.all_data()
+        dd.quantities.extrema("particle_mass")
+        dd.quantities.extrema("particle_velocity_magnitude")
+        dd.quantities.extrema(["particle_velocity_%s" % ax for ax in 'xyz'])

diff -r 665e2b6cdf78aa7722c300a6505d1bad3c5d44a3 -r 28733726b2a751e774c8b7ae46121aa57fd1060f benchmarks/benchmarks/small_ramses.py
--- /dev/null
+++ b/benchmarks/benchmarks/small_ramses.py
@@ -0,0 +1,42 @@
+import numpy as np
+import yt
+
+class SmallRAMSESSuite:
+    dsname = "output_00080/info_00080.txt"
+    def setup(self):
+        self.ds = yt.load(self.dsname)
+
+    def time_all_particles(self):
+        dd = self.ds.all_data()
+        dd["all", "particle_velocity_x"]
+        dd["all", "particle_velocity_y"]
+        dd["all", "particle_velocity_z"]
+
+    def time_all_particles_derived(self):
+        dd = self.ds.all_data()
+        dd["all", "particle_velocity_magnitude"]
+
+    def time_gas_read(self):
+        dd = self.ds.all_data()
+        dd["gas", "density"]
+
+    def time_gas_derived(self):
+        dd = self.ds.all_data()
+        dd["gas", "velocity_magnitude"]
+
+    def time_project_unweight(self):
+        proj = self.ds.proj("density", 0)
+
+    def time_project_weight(self):
+        proj = self.ds.proj("density", 0, "density")
+
+    def time_particle_quantities(self):
+        dd = self.ds.all_data()
+        dd.quantities.extrema("particle_mass")
+        dd.quantities.extrema("particle_velocity_magnitude")
+        dd.quantities.extrema(["particle_velocity_%s" % ax for ax in 'xyz'])
+
+    def time_gas_quantites(self):
+        dd = self.ds.all_data()
+        dd.quantities.extrema("density")
+        dd.quantities.extrema(["velocity_x", "velocity_y", "velocity_z"])

diff -r 665e2b6cdf78aa7722c300a6505d1bad3c5d44a3 -r 28733726b2a751e774c8b7ae46121aa57fd1060f distribute_setup.py
--- a/distribute_setup.py
+++ /dev/null
@@ -1,541 +0,0 @@
-#!python
-"""Bootstrap distribute installation
-
-If you want to use setuptools in your package's setup.py, just include this
-file in the same directory with it, and add this to the top of your setup.py::
-
-    from distribute_setup import use_setuptools
-    use_setuptools()
-
-If you want to require a specific version of setuptools, set a download
-mirror, or use an alternate download directory, you can do so by supplying
-the appropriate options to ``use_setuptools()``.
-
-This file can also be run as a script to install or upgrade setuptools.
-"""
-import os
-import shutil
-import sys
-import time
-import fnmatch
-import tempfile
-import tarfile
-import optparse
-
-from distutils import log
-
-try:
-    from site import USER_SITE
-except ImportError:
-    USER_SITE = None
-
-try:
-    import subprocess
-
-    def _python_cmd(*args):
-        args = (sys.executable,) + args
-        return subprocess.call(args) == 0
-
-except ImportError:
-    # will be used for python 2.3
-    def _python_cmd(*args):
-        args = (sys.executable,) + args
-        # quoting arguments if windows
-        if sys.platform == 'win32':
-            def quote(arg):
-                if ' ' in arg:
-                    return '"%s"' % arg
-                return arg
-            args = [quote(arg) for arg in args]
-        return os.spawnl(os.P_WAIT, sys.executable, *args) == 0
-
-DEFAULT_VERSION = "0.6.32"
-DEFAULT_URL = "http://pypi.python.org/packages/source/d/distribute/"
-SETUPTOOLS_FAKED_VERSION = "0.6c11"
-
-SETUPTOOLS_PKG_INFO = """\
-Metadata-Version: 1.0
-Name: setuptools
-Version: %s
-Summary: xxxx
-Home-page: xxx
-Author: xxx
-Author-email: xxx
-License: xxx
-Description: xxx
-""" % SETUPTOOLS_FAKED_VERSION
-
-
-def _install(tarball, install_args=()):
-    # extracting the tarball
-    tmpdir = tempfile.mkdtemp()
-    log.warn('Extracting in %s', tmpdir)
-    old_wd = os.getcwd()
-    try:
-        os.chdir(tmpdir)
-        tar = tarfile.open(tarball)
-        _extractall(tar)
-        tar.close()
-
-        # going in the directory
-        subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
-        os.chdir(subdir)
-        log.warn('Now working in %s', subdir)
-
-        # installing
-        log.warn('Installing Distribute')
-        if not _python_cmd('setup.py', 'install', *install_args):
-            log.warn('Something went wrong during the installation.')
-            log.warn('See the error message above.')
-            # exitcode will be 2
-            return 2
-    finally:
-        os.chdir(old_wd)
-        shutil.rmtree(tmpdir)
-
-
-def _build_egg(egg, tarball, to_dir):
-    # extracting the tarball
-    tmpdir = tempfile.mkdtemp()
-    log.warn('Extracting in %s', tmpdir)
-    old_wd = os.getcwd()
-    try:
-        os.chdir(tmpdir)
-        tar = tarfile.open(tarball)
-        _extractall(tar)
-        tar.close()
-
-        # going in the directory
-        subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
-        os.chdir(subdir)
-        log.warn('Now working in %s', subdir)
-
-        # building an egg
-        log.warn('Building a Distribute egg in %s', to_dir)
-        _python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir)
-
-    finally:
-        os.chdir(old_wd)
-        shutil.rmtree(tmpdir)
-    # returning the result
-    log.warn(egg)
-    if not os.path.exists(egg):
-        raise IOError('Could not build the egg.')
-
-
-def _do_download(version, download_base, to_dir, download_delay):
-    egg = os.path.join(to_dir, 'distribute-%s-py%d.%d.egg'
-                       % (version, sys.version_info[0], sys.version_info[1]))
-    if not os.path.exists(egg):
-        tarball = download_setuptools(version, download_base,
-                                      to_dir, download_delay)
-        _build_egg(egg, tarball, to_dir)
-    sys.path.insert(0, egg)
-    import setuptools
-    setuptools.bootstrap_install_from = egg
-
-
-def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
-                   to_dir=os.curdir, download_delay=15, no_fake=True):
-    # making sure we use the absolute path
-    to_dir = os.path.abspath(to_dir)
-    was_imported = 'pkg_resources' in sys.modules or \
-        'setuptools' in sys.modules
-    try:
-        try:
-            import pkg_resources
-            if not hasattr(pkg_resources, '_distribute'):
-                if not no_fake:
-                    _fake_setuptools()
-                raise ImportError
-        except ImportError:
-            return _do_download(version, download_base, to_dir, download_delay)
-        try:
-            pkg_resources.require("distribute>=" + version)
-            return
-        except pkg_resources.VersionConflict:
-            e = sys.exc_info()[1]
-            if was_imported:
-                sys.stderr.write(
-                "The required version of distribute (>=%s) is not available,\n"
-                "and can't be installed while this script is running. Please\n"
-                "install a more recent version first, using\n"
-                "'easy_install -U distribute'."
-                "\n\n(Currently using %r)\n" % (version, e.args[0]))
-                sys.exit(2)
-            else:
-                del pkg_resources, sys.modules['pkg_resources']    # reload ok
-                return _do_download(version, download_base, to_dir,
-                                    download_delay)
-        except pkg_resources.DistributionNotFound:
-            return _do_download(version, download_base, to_dir,
-                                download_delay)
-    finally:
-        if not no_fake:
-            _create_fake_setuptools_pkg_info(to_dir)
-
-
-def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
-                        to_dir=os.curdir, delay=15):
-    """Download distribute from a specified location and return its filename
-
-    `version` should be a valid distribute version number that is available
-    as an egg for download under the `download_base` URL (which should end
-    with a '/'). `to_dir` is the directory where the egg will be downloaded.
-    `delay` is the number of seconds to pause before an actual download
-    attempt.
-    """
-    # making sure we use the absolute path
-    to_dir = os.path.abspath(to_dir)
-    try:
-        from urllib.request import urlopen
-    except ImportError:
-        from urllib2 import urlopen
-    tgz_name = "distribute-%s.tar.gz" % version
-    url = download_base + tgz_name
-    saveto = os.path.join(to_dir, tgz_name)
-    src = dst = None
-    if not os.path.exists(saveto):  # Avoid repeated downloads
-        try:
-            log.warn("Downloading %s", url)
-            src = urlopen(url)
-            # Read/write all in one block, so we don't create a corrupt file
-            # if the download is interrupted.
-            data = src.read()
-            dst = open(saveto, "wb")
-            dst.write(data)
-        finally:
-            if src:
-                src.close()
-            if dst:
-                dst.close()
-    return os.path.realpath(saveto)
-
-
-def _no_sandbox(function):
-    def __no_sandbox(*args, **kw):
-        try:
-            from setuptools.sandbox import DirectorySandbox
-            if not hasattr(DirectorySandbox, '_old'):
-                def violation(*args):
-                    pass
-                DirectorySandbox._old = DirectorySandbox._violation
-                DirectorySandbox._violation = violation
-                patched = True
-            else:
-                patched = False
-        except ImportError:
-            patched = False
-
-        try:
-            return function(*args, **kw)
-        finally:
-            if patched:
-                DirectorySandbox._violation = DirectorySandbox._old
-                del DirectorySandbox._old
-
-    return __no_sandbox
-
-
-def _patch_file(path, content):
-    """Will backup the file then patch it"""
-    existing_content = open(path).read()
-    if existing_content == content:
-        # already patched
-        log.warn('Already patched.')
-        return False
-    log.warn('Patching...')
-    _rename_path(path)
-    f = open(path, 'w')
-    try:
-        f.write(content)
-    finally:
-        f.close()
-    return True
-
-_patch_file = _no_sandbox(_patch_file)
-
-
-def _same_content(path, content):
-    return open(path).read() == content
-
-
-def _rename_path(path):
-    new_name = path + '.OLD.%s' % time.time()
-    log.warn('Renaming %s to %s', path, new_name)
-    os.rename(path, new_name)
-    return new_name
-
-
-def _remove_flat_installation(placeholder):
-    if not os.path.isdir(placeholder):
-        log.warn('Unknown installation at %s', placeholder)
-        return False
-    found = False
-    for file in os.listdir(placeholder):
-        if fnmatch.fnmatch(file, 'setuptools*.egg-info'):
-            found = True
-            break
-    if not found:
-        log.warn('Could not locate setuptools*.egg-info')
-        return
-
-    log.warn('Moving elements out of the way...')
-    pkg_info = os.path.join(placeholder, file)
-    if os.path.isdir(pkg_info):
-        patched = _patch_egg_dir(pkg_info)
-    else:
-        patched = _patch_file(pkg_info, SETUPTOOLS_PKG_INFO)
-
-    if not patched:
-        log.warn('%s already patched.', pkg_info)
-        return False
-    # now let's move the files out of the way
-    for element in ('setuptools', 'pkg_resources.py', 'site.py'):
-        element = os.path.join(placeholder, element)
-        if os.path.exists(element):
-            _rename_path(element)
-        else:
-            log.warn('Could not find the %s element of the '
-                     'Setuptools distribution', element)
-    return True
-
-_remove_flat_installation = _no_sandbox(_remove_flat_installation)
-
-
-def _after_install(dist):
-    log.warn('After install bootstrap.')
-    placeholder = dist.get_command_obj('install').install_purelib
-    _create_fake_setuptools_pkg_info(placeholder)
-
-
-def _create_fake_setuptools_pkg_info(placeholder):
-    if not placeholder or not os.path.exists(placeholder):
-        log.warn('Could not find the install location')
-        return
-    pyver = '%s.%s' % (sys.version_info[0], sys.version_info[1])
-    setuptools_file = 'setuptools-%s-py%s.egg-info' % \
-            (SETUPTOOLS_FAKED_VERSION, pyver)
-    pkg_info = os.path.join(placeholder, setuptools_file)
-    if os.path.exists(pkg_info):
-        log.warn('%s already exists', pkg_info)
-        return
-
-    log.warn('Creating %s', pkg_info)
-    try:
-        f = open(pkg_info, 'w')
-    except EnvironmentError:
-        log.warn("Don't have permissions to write %s, skipping", pkg_info)
-        return
-    try:
-        f.write(SETUPTOOLS_PKG_INFO)
-    finally:
-        f.close()
-
-    pth_file = os.path.join(placeholder, 'setuptools.pth')
-    log.warn('Creating %s', pth_file)
-    f = open(pth_file, 'w')
-    try:
-        f.write(os.path.join(os.curdir, setuptools_file))
-    finally:
-        f.close()
-
-_create_fake_setuptools_pkg_info = _no_sandbox(
-    _create_fake_setuptools_pkg_info
-)
-
-
-def _patch_egg_dir(path):
-    # let's check if it's already patched
-    pkg_info = os.path.join(path, 'EGG-INFO', 'PKG-INFO')
-    if os.path.exists(pkg_info):
-        if _same_content(pkg_info, SETUPTOOLS_PKG_INFO):
-            log.warn('%s already patched.', pkg_info)
-            return False
-    _rename_path(path)
-    os.mkdir(path)
-    os.mkdir(os.path.join(path, 'EGG-INFO'))
-    pkg_info = os.path.join(path, 'EGG-INFO', 'PKG-INFO')
-    f = open(pkg_info, 'w')
-    try:
-        f.write(SETUPTOOLS_PKG_INFO)
-    finally:
-        f.close()
-    return True
-
-_patch_egg_dir = _no_sandbox(_patch_egg_dir)
-
-
-def _before_install():
-    log.warn('Before install bootstrap.')
-    _fake_setuptools()
-
-
-def _under_prefix(location):
-    if 'install' not in sys.argv:
-        return True
-    args = sys.argv[sys.argv.index('install') + 1:]
-    for index, arg in enumerate(args):
-        for option in ('--root', '--prefix'):
-            if arg.startswith('%s=' % option):
-                top_dir = arg.split('root=')[-1]
-                return location.startswith(top_dir)
-            elif arg == option:
-                if len(args) > index:
-                    top_dir = args[index + 1]
-                    return location.startswith(top_dir)
-        if arg == '--user' and USER_SITE is not None:
-            return location.startswith(USER_SITE)
-    return True
-
-
-def _fake_setuptools():
-    log.warn('Scanning installed packages')
-    try:
-        import pkg_resources
-    except ImportError:
-        # we're cool
-        log.warn('Setuptools or Distribute does not seem to be installed.')
-        return
-    ws = pkg_resources.working_set
-    try:
-        setuptools_dist = ws.find(
-            pkg_resources.Requirement.parse('setuptools', replacement=False)
-            )
-    except TypeError:
-        # old distribute API
-        setuptools_dist = ws.find(
-            pkg_resources.Requirement.parse('setuptools')
-        )
-
-    if setuptools_dist is None:
-        log.warn('No setuptools distribution found')
-        return
-    # detecting if it was already faked
-    setuptools_location = setuptools_dist.location
-    log.warn('Setuptools installation detected at %s', setuptools_location)
-
-    # if --root or --preix was provided, and if
-    # setuptools is not located in them, we don't patch it
-    if not _under_prefix(setuptools_location):
-        log.warn('Not patching, --root or --prefix is installing Distribute'
-                 ' in another location')
-        return
-
-    # let's see if its an egg
-    if not setuptools_location.endswith('.egg'):
-        log.warn('Non-egg installation')
-        res = _remove_flat_installation(setuptools_location)
-        if not res:
-            return
-    else:
-        log.warn('Egg installation')
-        pkg_info = os.path.join(setuptools_location, 'EGG-INFO', 'PKG-INFO')
-        if (os.path.exists(pkg_info) and
-            _same_content(pkg_info, SETUPTOOLS_PKG_INFO)):
-            log.warn('Already patched.')
-            return
-        log.warn('Patching...')
-        # let's create a fake egg replacing setuptools one
-        res = _patch_egg_dir(setuptools_location)
-        if not res:
-            return
-    log.warn('Patching complete.')
-    _relaunch()
-
-
-def _relaunch():
-    log.warn('Relaunching...')
-    # we have to relaunch the process
-    # pip marker to avoid a relaunch bug
-    _cmd1 = ['-c', 'install', '--single-version-externally-managed']
-    _cmd2 = ['-c', 'install', '--record']
-    if sys.argv[:3] == _cmd1 or sys.argv[:3] == _cmd2:
-        sys.argv[0] = 'setup.py'
-    args = [sys.executable] + sys.argv
-    sys.exit(subprocess.call(args))
-
-
-def _extractall(self, path=".", members=None):
-    """Extract all members from the archive to the current working
-       directory and set owner, modification time and permissions on
-       directories afterwards. `path' specifies a different directory
-       to extract to. `members' is optional and must be a subset of the
-       list returned by getmembers().
-    """
-    import copy
-    import operator
-    from tarfile import ExtractError
-    directories = []
-
-    if members is None:
-        members = self
-
-    for tarinfo in members:
-        if tarinfo.isdir():
-            # Extract directories with a safe mode.
-            directories.append(tarinfo)
-            tarinfo = copy.copy(tarinfo)
-            tarinfo.mode = 448  # decimal for oct 0700
-        self.extract(tarinfo, path)
-
-    # Reverse sort directories.
-    if sys.version_info < (2, 4):
-        def sorter(dir1, dir2):
-            return cmp(dir1.name, dir2.name)
-        directories.sort(sorter)
-        directories.reverse()
-    else:
-        directories.sort(key=operator.attrgetter('name'), reverse=True)
-
-    # Set correct owner, mtime and filemode on directories.
-    for tarinfo in directories:
-        dirpath = os.path.join(path, tarinfo.name)
-        try:
-            self.chown(tarinfo, dirpath)
-            self.utime(tarinfo, dirpath)
-            self.chmod(tarinfo, dirpath)
-        except ExtractError:
-            e = sys.exc_info()[1]
-            if self.errorlevel > 1:
-                raise
-            else:
-                self._dbg(1, "tarfile: %s" % e)
-
-
-def _build_install_args(options):
-    """
-    Build the arguments to 'python setup.py install' on the distribute package
-    """
-    install_args = []
-    if options.user_install:
-        if sys.version_info < (2, 6):
-            log.warn("--user requires Python 2.6 or later")
-            raise SystemExit(1)
-        install_args.append('--user')
-    return install_args
-
-def _parse_args():
-    """
-    Parse the command line for options
-    """
-    parser = optparse.OptionParser()
-    parser.add_option(
-        '--user', dest='user_install', action='store_true', default=False,
-        help='install in user site package (requires Python 2.6 or later)')
-    parser.add_option(
-        '--download-base', dest='download_base', metavar="URL",
-        default=DEFAULT_URL,
-        help='alternative URL from where to download the distribute package')
-    options, args = parser.parse_args()
-    # positional arguments are ignored
-    return options
-
-def main(version=DEFAULT_VERSION):
-    """Install or upgrade setuptools and EasyInstall"""
-    options = _parse_args()
-    tarball = download_setuptools(download_base=options.download_base)
-    return _install(tarball, _build_install_args(options))
-
-if __name__ == '__main__':
-    sys.exit(main())

diff -r 665e2b6cdf78aa7722c300a6505d1bad3c5d44a3 -r 28733726b2a751e774c8b7ae46121aa57fd1060f doc/README
--- a/doc/README
+++ b/doc/README
@@ -7,4 +7,4 @@
 Because the documentation requires a number of dependencies, we provide
 pre-built versions online, accessible here:
 
-http://yt-project.org/docs/dev-3.0/
+http://yt-project.org/docs/dev/

diff -r 665e2b6cdf78aa7722c300a6505d1bad3c5d44a3 -r 28733726b2a751e774c8b7ae46121aa57fd1060f doc/extensions/notebook_sphinxext.py
--- a/doc/extensions/notebook_sphinxext.py
+++ b/doc/extensions/notebook_sphinxext.py
@@ -170,8 +170,8 @@
     try:
         nb_runner.run_notebook(skip_exceptions=skip_exceptions)
     except NotebookError as e:
-        print ''
-        print e
+        print('')
+        print(e)
         # Return the traceback, filtering out ANSI color codes.
         # http://stackoverflow.com/questions/13506033/filtering-out-ansi-escape-sequences
         return "Notebook conversion failed with the " \

diff -r 665e2b6cdf78aa7722c300a6505d1bad3c5d44a3 -r 28733726b2a751e774c8b7ae46121aa57fd1060f doc/extensions/numpydocmod/docscrape.py
--- a/doc/extensions/numpydocmod/docscrape.py
+++ b/doc/extensions/numpydocmod/docscrape.py
@@ -451,7 +451,7 @@
 
         if self._role:
             if not roles.has_key(self._role):
-                print "Warning: invalid role %s" % self._role
+                print("Warning: invalid role %s" % self._role)
             out += '.. %s:: %s\n    \n\n' % (roles.get(self._role,''),
                                              func_name)
 

diff -r 665e2b6cdf78aa7722c300a6505d1bad3c5d44a3 -r 28733726b2a751e774c8b7ae46121aa57fd1060f doc/extensions/numpydocmod/phantom_import.py
--- a/doc/extensions/numpydocmod/phantom_import.py
+++ b/doc/extensions/numpydocmod/phantom_import.py
@@ -23,7 +23,7 @@
 def initialize(app):
     fn = app.config.phantom_import_file
     if (fn and os.path.isfile(fn)):
-        print "[numpydoc] Phantom importing modules from", fn, "..."
+        print("[numpydoc] Phantom importing modules from", fn, "...")
         import_phantom_module(fn)
 
 #------------------------------------------------------------------------------

diff -r 665e2b6cdf78aa7722c300a6505d1bad3c5d44a3 -r 28733726b2a751e774c8b7ae46121aa57fd1060f doc/extensions/pythonscript_sphinxext.py
--- a/doc/extensions/pythonscript_sphinxext.py
+++ b/doc/extensions/pythonscript_sphinxext.py
@@ -37,9 +37,9 @@
             f.write(content)
 
         # Use sphinx logger?
-        print ""
-        print content
-        print ""
+        print("")
+        print(content)
+        print("")
 
         subprocess.call(['python', 'temp.py'])
 

diff -r 665e2b6cdf78aa7722c300a6505d1bad3c5d44a3 -r 28733726b2a751e774c8b7ae46121aa57fd1060f doc/helper_scripts/code_support.py
--- a/doc/helper_scripts/code_support.py
+++ b/doc/helper_scripts/code_support.py
@@ -82,14 +82,14 @@
                          LevelOfSupport = "None")
 )
 
-print "|| . ||",
+print("|| . ||", end=' ')
 for c in code_names:
-    print "%s || " % (c),
-print 
+    print("%s || " % (c), end=' ')
+print() 
 
 for vn in vals:
-    print "|| !%s ||" % (vn),
+    print("|| !%s ||" % (vn), end=' ')
     for c in code_names:
-        print "%s || " % (codes[c].support[vn]),
-    print
+        print("%s || " % (codes[c].support[vn]), end=' ')
+    print()
 

diff -r 665e2b6cdf78aa7722c300a6505d1bad3c5d44a3 -r 28733726b2a751e774c8b7ae46121aa57fd1060f doc/helper_scripts/parse_cb_list.py
--- a/doc/helper_scripts/parse_cb_list.py
+++ b/doc/helper_scripts/parse_cb_list.py
@@ -35,6 +35,6 @@
 
 for n,c in sorted(callback_registry.items()):
     write_docstring(output, n, c)
-    print ".. autoclass:: yt.visualization.plot_modifications.%s" % n
-    print "   :members:"
-    print
+    print(".. autoclass:: yt.visualization.plot_modifications.%s" % n)
+    print("   :members:")
+    print()

diff -r 665e2b6cdf78aa7722c300a6505d1bad3c5d44a3 -r 28733726b2a751e774c8b7ae46121aa57fd1060f doc/helper_scripts/parse_dq_list.py
--- a/doc/helper_scripts/parse_dq_list.py
+++ b/doc/helper_scripts/parse_dq_list.py
@@ -31,5 +31,5 @@
 
 dd = ds.all_data()
 for n,func in sorted(dd.quantities.functions.items()):
-    print n, func
+    print(n, func)
     write_docstring(output, n, func[1])

diff -r 665e2b6cdf78aa7722c300a6505d1bad3c5d44a3 -r 28733726b2a751e774c8b7ae46121aa57fd1060f doc/helper_scripts/parse_object_list.py
--- a/doc/helper_scripts/parse_object_list.py
+++ b/doc/helper_scripts/parse_object_list.py
@@ -29,5 +29,5 @@
 
 for n,c in sorted(ds.__dict__.items()):
     if hasattr(c, '_con_args'):
-        print n
+        print(n)
         write_docstring(output, n, c)

diff -r 665e2b6cdf78aa7722c300a6505d1bad3c5d44a3 -r 28733726b2a751e774c8b7ae46121aa57fd1060f doc/helper_scripts/run_recipes.py
--- a/doc/helper_scripts/run_recipes.py
+++ b/doc/helper_scripts/run_recipes.py
@@ -5,16 +5,21 @@
 import sys
 import shutil
 import tempfile
+import traceback
+import subprocess
+import matplotlib
+matplotlib.use('Agg')
 from multiprocessing import Pool
 from yt.config import ytcfg
 
 FPATTERNS = ['*.png', '*.txt', '*.h5', '*.dat']
-DPATTERNS = ['LC*', 'LR', 'DD0046', 'halo_analysis']
+DPATTERNS = ['LC*', 'LR', 'DD0046']
+BADF = ['cloudy_emissivity.h5', 'apec_emissivity.h5',
+        'xray_emissivity.h5', 'AMRGridData_Slice_x_density.png']
 CWD = os.getcwd()
-DIR = 'source/cookbook/_static'
-DESTDIR = os.path.join(CWD, DIR)
-#NEEDS_SERIAL = ["light_cone_with_halo_mask"]
 ytcfg["yt", "serialize"] = "False"
+PARALLEL_TEST = {"rockstar_nest": "3"}
+BLACKLIST = []
 
 
 def prep_dirs():
@@ -22,38 +27,55 @@
         os.symlink(directory, os.path.basename(directory))
 
 
-def run_receipe((receipe,)):
-    module_name, ext = os.path.splitext(os.path.basename(receipe))
+def run_recipe((recipe,)):
+    module_name, ext = os.path.splitext(os.path.basename(recipe))
+    dest = os.path.join(os.path.dirname(recipe), '_static', module_name)
+    if module_name in BLACKLIST:
+        return 0
     if not os.path.exists("%s/_temp/%s.done" % (CWD, module_name)):
         sys.stderr.write('Started %s\n' % module_name)
-        cwd = os.getcwd()
         tmpdir = tempfile.mkdtemp()
         os.chdir(tmpdir)
         prep_dirs()
-        module = __import__(module_name)
+        if module_name in PARALLEL_TEST:
+            cmd = ["mpiexec", "-n", PARALLEL_TEST[module_name],
+                   "python", recipe]
+        else:
+            cmd = ["python", recipe]
+        try:
+            subprocess.check_call(cmd)
+        except:
+            trace = "".join(traceback.format_exception(*sys.exc_info()))
+            trace += " in module: %s\n" % module_name
+            trace += " recipe: %s\n" % recipe
+            raise Exception(trace)
         open("%s/_temp/%s.done" % (CWD, module_name), 'wb').close()
         for pattern in FPATTERNS:
             for fname in glob.glob(pattern):
-                dst = os.path.join(DESTDIR, module_name)
-                shutil.move(fname, "%s__%s" % (dst, fname))
+                if fname not in BADF:
+                    shutil.move(fname, "%s__%s" % (dest, fname))
         for pattern in DPATTERNS:
             for dname in glob.glob(pattern):
-                dst = os.path.join(DESTDIR, module_name)
-                shutil.move(dname, dst)
-        os.chdir(cwd)
+                shutil.move(dname, dest)
+        os.chdir(CWD)
         shutil.rmtree(tmpdir, True)
         sys.stderr.write('Finished with %s\n' % module_name)
     return 0
 
-for path in ['_temp', DESTDIR]:
-    if os.path.exists(path):
-        shutil.rmtree(path)
-    os.makedirs(path)
+for path in ['_temp', 'source/cookbook/_static',
+             'source/visualizing/colormaps/_static']:
+    fpath = os.path.join(CWD, path)
+    if os.path.exists(fpath):
+        shutil.rmtree(fpath)
+    os.makedirs(fpath)
 
 os.chdir('_temp')
-sys.path.append(os.path.join(CWD, 'source/cookbook'))
-WPOOL = Pool(processes=8)
-RES = WPOOL.map_async(run_receipe, (
-    (receipe,) for receipe in glob.glob('%s/*.py' % sys.path[-1])))
+recipes = []
+for rpath in ['source/cookbook', 'source/visualizing/colormaps']:
+    fpath = os.path.join(CWD, rpath)
+    sys.path.append(fpath)
+    recipes += glob.glob('%s/*.py' % fpath)
+WPOOL = Pool(processes=6)
+RES = WPOOL.map_async(run_recipe, ((recipe,) for recipe in recipes))
 RES.get()
 os.chdir(CWD)

diff -r 665e2b6cdf78aa7722c300a6505d1bad3c5d44a3 -r 28733726b2a751e774c8b7ae46121aa57fd1060f doc/helper_scripts/show_fields.py
--- a/doc/helper_scripts/show_fields.py
+++ b/doc/helper_scripts/show_fields.py
@@ -117,7 +117,7 @@
    :backlinks: none
 
 """
-print header
+print(header)
 
 seen = []
 
@@ -126,39 +126,39 @@
     if in_cgs:
         unit_object = unit_object.get_cgs_equivalent()
     latex = unit_object.latex_representation()
-    return latex.replace('\/', '~')
+    return latex.replace('\ ', '~')
 
 def print_all_fields(fl):
     for fn in sorted(fl):
         df = fl[fn]
         f = df._function
         s = "%s" % (df.name,)
-        print s
-        print "^" * len(s)
-        print
+        print(s)
+        print("^" * len(s))
+        print()
         if len(df.units) > 0:
             # Most universal fields are in CGS except for these special fields
             if df.name[1] in ['particle_position', 'particle_position_x', \
                          'particle_position_y', 'particle_position_z', \
                          'entropy', 'kT', 'metallicity', 'dx', 'dy', 'dz',\
                          'cell_volume', 'x', 'y', 'z']:
-                print "   * Units: :math:`%s`" % fix_units(df.units)
+                print("   * Units: :math:`%s`" % fix_units(df.units))
             else:
-                print "   * Units: :math:`%s`" % fix_units(df.units, in_cgs=True)
-        print "   * Particle Type: %s" % (df.particle_type)
-        print
-        print "**Field Source**"
-        print
+                print("   * Units: :math:`%s`" % fix_units(df.units, in_cgs=True))
+        print("   * Particle Type: %s" % (df.particle_type))
+        print()
+        print("**Field Source**")
+        print()
         if f == NullFunc:
-            print "No source available."
-            print
+            print("No source available.")
+            print()
             continue
         else:
-            print ".. code-block:: python"
-            print
+            print(".. code-block:: python")
+            print()
             for line in inspect.getsource(f).split("\n"):
-                print "  " + line
-            print
+                print("  " + line)
+            print()
 
 ds.index
 print_all_fields(ds.field_info)
@@ -225,10 +225,10 @@
         else:
             known_particle_fields = []
         if nfields > 0:
-            print ".. _%s_specific_fields:\n" % dset_name.replace("Dataset", "")
+            print(".. _%s_specific_fields:\n" % dset_name.replace("Dataset", ""))
             h = "%s-Specific Fields" % dset_name.replace("Dataset", "")
-            print h
-            print "-" * len(h) + "\n"
+            print(h)
+            print("-" * len(h) + "\n")
 
             field_stuff = []
             for field in known_other_fields:
@@ -263,18 +263,18 @@
                               al="="*len_aliases, aw=len_aliases,
                               pt="="*len_part, pw=len_part,
                               dp="="*len_disp, dw=len_disp)
-            print div
-            print header
-            print div
+            print(div)
+            print(header)
+            print(div)
 
             for f in field_stuff:
-                print fstr.format(nm=f.name, nw=len_name,
+                print(fstr.format(nm=f.name, nw=len_name,
                                   un=f.units, uw=len_units,
                                   al=f.aliases, aw=len_aliases,
                                   pt=f.ptype, pw=len_part,
-                                  dp=f.dname, dw=len_disp)
+                                  dp=f.dname, dw=len_disp))
                 
-            print div
-            print ""
+            print(div)
+            print("")
 
-print footer
+print(footer)

diff -r 665e2b6cdf78aa7722c300a6505d1bad3c5d44a3 -r 28733726b2a751e774c8b7ae46121aa57fd1060f doc/helper_scripts/split_auto.py
--- a/doc/helper_scripts/split_auto.py
+++ b/doc/helper_scripts/split_auto.py
@@ -60,7 +60,7 @@
 for key, val in file_names.items():
     title, file = val
     fn = file.rsplit("/", 1)[0] + ".rst"
-    print fn
+    print(fn)
     f = open(fn, "w")
     dn = fn.split("/")[-1][:-4]
     dd = dict(header = "=" * len(title), title=title, dn = dn)

diff -r 665e2b6cdf78aa7722c300a6505d1bad3c5d44a3 -r 28733726b2a751e774c8b7ae46121aa57fd1060f doc/helper_scripts/table.py
--- a/doc/helper_scripts/table.py
+++ b/doc/helper_scripts/table.py
@@ -75,4 +75,4 @@
     for subheading in items:
         s += subheading_template % subheading
     t += heading_template % (heading, s)
-print t
+print(t)

diff -r 665e2b6cdf78aa7722c300a6505d1bad3c5d44a3 -r 28733726b2a751e774c8b7ae46121aa57fd1060f doc/helper_scripts/update_recipes.py
--- a/doc/helper_scripts/update_recipes.py
+++ b/doc/helper_scripts/update_recipes.py
@@ -8,7 +8,7 @@
 
 .. note::
    All of these scripts are located in the mercurial repository at
-   http://hg.yt-project.org/cookbook/
+   http://bitbucket.org/yt_analysis/cookbook/
 
 """
 footer = """ """
@@ -23,7 +23,7 @@
     recipes = cStringIO.StringIO()
 recipes.write(header)
 
-url = "here: http://hg.yt-project.org/cookbook/raw/tip/%s ."
+url = "here: http://bitbucket.org/yt_analysis/cookbook/raw/tip/%s ."
 
 def cond_output(f, v):
     if not v:
@@ -31,19 +31,19 @@
     return True
 
 repo = hg.repository(uii, "../cookbook/")
-commands.pull(uii, repo, "http://hg.yt-project.org/cookbook/")
+commands.pull(uii, repo, "http://bitbucket.org/yt_analysis/cookbook/")
 ctx = repo["tip"]
 for file in ctx:
     if not file.startswith("recipes/"): continue
-    print "Parsing %s" % (file)
+    print("Parsing %s" % (file))
     lines = ctx[file].data().split("\n")
     fn = file[8:-3]
     title = fn.replace("_", " ").capitalize()
     title += "\n" + "-" * len(title) + "\n"*2
     title = ".. _cookbook-%s:\n\n%s" % (fn, title)
     if lines[0] != '"""':
-        print "    Bad docstring: breaking."
-        print file
+        print("    Bad docstring: breaking.")
+        print(file)
     di = lines[1:].index('"""')
     docstring = lines[1:di+1]
     recipe = lines[di+2:]

diff -r 665e2b6cdf78aa7722c300a6505d1bad3c5d44a3 -r 28733726b2a751e774c8b7ae46121aa57fd1060f doc/install_script.sh
--- a/doc/install_script.sh
+++ b/doc/install_script.sh
@@ -1,18 +1,14 @@
 #
 # Hi there!  Welcome to the yt installation script.
 #
+# First things first, if you experience problems, please visit the Help 
+# section at http://yt-project.org.
+#
 # This script is designed to create a fully isolated Python installation
 # with the dependencies you need to run yt.
 #
-# There are a few options, but you only need to set *one* of them.  And
-# that's the next one, DEST_DIR.  But, if you want to use an existing HDF5
-# installation you can set HDF5_DIR, or if you want to use some other
-# subversion checkout of yt, you can set YT_DIR, too.  (It'll already
-# check the current directory and one up.
-#
-# If you experience problems, please visit the Help section at 
-# http://yt-project.org.
-#
+# There are a few options, but you only need to set *one* of them, which is 
+# the next one, DEST_DIR:
 
 DEST_SUFFIX="yt-`uname -m`"
 DEST_DIR="`pwd`/${DEST_SUFFIX/ /}"   # Installation location
@@ -23,16 +19,25 @@
     DEST_DIR=${YT_DEST}
 fi
 
+# What follows are some other options that you may or may not need to change.
+
 # Here's where you put the HDF5 path if you like; otherwise it'll download it
 # and install it on its own
 #HDF5_DIR=
 
+# If you've got yt some other place, set this to point to it. The script will
+# already check the current directory and the one above it in the tree.
+YT_DIR=""
+
 # If you need to supply arguments to the NumPy or SciPy build, supply them here
 # This one turns on gfortran manually:
 #NUMPY_ARGS="--fcompiler=gnu95"
 # If you absolutely can't get the fortran to work, try this:
 #NUMPY_ARGS="--fcompiler=fake"
 
+INST_PY3=0      # Install Python 3 along with Python 2. If this is turned
+                # on, all Python packages (including yt) will be installed
+                # in Python 3 (except Mercurial, which requires Python 2).
 INST_HG=1       # Install Mercurial or not?  If hg is not already
                 # installed, yt cannot be installed.
 INST_ZLIB=1     # On some systems (Kraken) matplotlib has issues with
@@ -50,9 +55,6 @@
 INST_ROCKSTAR=0 # Install the Rockstar halo finder?
 INST_SCIPY=0    # Install scipy?
 
-# If you've got yt some other place, set this to point to it.
-YT_DIR=""
-
 # If you need to pass anything to matplotlib, do so here.
 MPL_SUPP_LDFLAGS=""
 MPL_SUPP_CFLAGS=""
@@ -111,6 +113,7 @@
     echo INST_SQLITE3=${INST_SQLITE3} >> ${CONFIG_FILE}
     echo INST_PYX=${INST_PYX} >> ${CONFIG_FILE}
     echo INST_0MQ=${INST_0MQ} >> ${CONFIG_FILE}
+    echo INST_PY3=${INST_PY3} >> ${CONFIG_FILE}
     echo INST_ROCKSTAR=${INST_ROCKSTAR} >> ${CONFIG_FILE}
     echo INST_SCIPY=${INST_SCIPY} >> ${CONFIG_FILE}
     echo YT_DIR=${YT_DIR} >> ${CONFIG_FILE}
@@ -300,7 +303,7 @@
         echo "  * patch"
         echo 
         echo "You can accomplish this by executing:"
-        echo "$ sudo yum install gcc gcc-g++ gcc-gfortran make patch zip"
+        echo "$ sudo yum install gcc gcc-c++ gcc-gfortran make patch zip"
         echo "$ sudo yum install ncurses-devel uuid-devel openssl-devel readline-devel"
     fi
     if [ -f /etc/SuSE-release ] && [ `grep --count SUSE /etc/SuSE-release` -gt 0 ]
@@ -415,6 +418,10 @@
 get_willwont ${INST_SQLITE3}
 echo "be installing SQLite3"
 
+printf "%-15s = %s so I " "INST_PY3" "${INST_PY3}"
+get_willwont ${INST_PY3}
+echo "be installing Python 3"
+
 printf "%-15s = %s so I " "INST_HG" "${INST_HG}"
 get_willwont ${INST_HG}
 echo "be installing Mercurial"
@@ -487,6 +494,13 @@
     exit 1
 }
 
+if [ $INST_PY3 -eq 1 ]
+then
+	 PYTHON_EXEC='python3.4'
+else 
+	 PYTHON_EXEC='python2.7'
+fi
+
 function do_setup_py
 {
     [ -e $1/done ] && return
@@ -501,19 +515,27 @@
     [ ! -e $LIB/extracted ] && tar xfz $LIB.tar.gz
     touch $LIB/extracted
     BUILD_ARGS=""
+    if [[ $LIB =~ .*mercurial.* ]] 
+    then
+        PYEXE="python2.7"
+    else
+        PYEXE=${PYTHON_EXEC}
+    fi
     case $LIB in
         *h5py*)
-            BUILD_ARGS="--hdf5=${HDF5_DIR}"
+            pushd $LIB &> /dev/null
+            ( ${DEST_DIR}/bin/${PYTHON_EXEC} setup.py configure --hdf5=${HDF5_DIR} 2>&1 ) 1>> ${LOG_FILE} || do_exit
+            popd &> /dev/null
             ;;
         *numpy*)
-            if [ -e ${DEST_DIR}/lib/python2.7/site-packages/numpy/__init__.py ]
+            if [ -e ${DEST_DIR}/lib/${PYTHON_EXEC}/site-packages/numpy/__init__.py ]
             then
-                VER=$(${DEST_DIR}/bin/python -c 'from distutils.version import StrictVersion as SV; \
+                VER=$(${DEST_DIR}/bin/${PYTHON_EXEC} -c 'from distutils.version import StrictVersion as SV; \
                                                  import numpy; print SV(numpy.__version__) < SV("1.8.0")')
                 if [ $VER == "True" ]
                 then
                     echo "Removing previous NumPy instance (see issue #889)"
-                    rm -rf ${DEST_DIR}/lib/python2.7/site-packages/{numpy*,*.pth}
+                    rm -rf ${DEST_DIR}/lib/${PYTHON_EXEC}/site-packages/{numpy*,*.pth}
                 fi
             fi
             ;;
@@ -521,8 +543,8 @@
             ;;
     esac
     cd $LIB
-    ( ${DEST_DIR}/bin/python2.7 setup.py build ${BUILD_ARGS} $* 2>&1 ) 1>> ${LOG_FILE} || do_exit
-    ( ${DEST_DIR}/bin/python2.7 setup.py install    2>&1 ) 1>> ${LOG_FILE} || do_exit
+    ( ${DEST_DIR}/bin/${PYEXE} setup.py build ${BUILD_ARGS} $* 2>&1 ) 1>> ${LOG_FILE} || do_exit
+    ( ${DEST_DIR}/bin/${PYEXE} setup.py install    2>&1 ) 1>> ${LOG_FILE} || do_exit
     touch done
     cd ..
 }
@@ -590,60 +612,64 @@
 # Set paths to what they should be when yt is activated.
 export PATH=${DEST_DIR}/bin:$PATH
 export LD_LIBRARY_PATH=${DEST_DIR}/lib:$LD_LIBRARY_PATH
-export PYTHONPATH=${DEST_DIR}/lib/python2.7/site-packages
+export PYTHONPATH=${DEST_DIR}/lib/${PYTHON_EXEC}/site-packages
 
 mkdir -p ${DEST_DIR}/src
 cd ${DEST_DIR}/src
 
-CYTHON='Cython-0.20.2'
+PYTHON2='Python-2.7.9'
+PYTHON3='Python-3.4.3'
+CYTHON='Cython-0.22'
 PYX='PyX-0.12.1'
-PYTHON='Python-2.7.8'
 BZLIB='bzip2-1.0.6'
-FREETYPE_VER='freetype-2.4.12'
-H5PY='h5py-2.3.1'
-HDF5='hdf5-1.8.14'
-IPYTHON='ipython-2.2.0'
+FREETYPE_VER='freetype-2.4.12' 
+H5PY='h5py-2.5.0'
+HDF5='hdf5-1.8.14' 
+IPYTHON='ipython-2.4.1'
 LAPACK='lapack-3.4.2'
 PNG=libpng-1.6.3
-MATPLOTLIB='matplotlib-1.4.0'
-MERCURIAL='mercurial-3.1'
-NOSE='nose-1.3.4'
-NUMPY='numpy-1.8.2'
-PYTHON_HGLIB='python-hglib-1.0'
-PYZMQ='pyzmq-14.3.1'
+MATPLOTLIB='matplotlib-1.4.3'
+MERCURIAL='mercurial-3.4'
+NOSE='nose-1.3.6'
+NUMPY='numpy-1.9.2'
+PYTHON_HGLIB='python-hglib-1.6'
+PYZMQ='pyzmq-14.5.0'
 ROCKSTAR='rockstar-0.99.6'
-SCIPY='scipy-0.14.0'
+SCIPY='scipy-0.15.1'
 SQLITE='sqlite-autoconf-3071700'
-SYMPY='sympy-0.7.5'
-TORNADO='tornado-4.0.1'
-ZEROMQ='zeromq-4.0.4'
+SYMPY='sympy-0.7.6'
+TORNADO='tornado-4.0.2'
+ZEROMQ='zeromq-4.0.5'
 ZLIB='zlib-1.2.8'
+SETUPTOOLS='setuptools-18.0.1'
 
 # Now we dump all our SHA512 files out.
-echo '118e3ebd76f50bda8187b76654e65caab2c2c403df9b89da525c2c963dedc7b38d898ae0b92d44b278731d969a891eb3f7b5bcc138cfe3e037f175d4c87c29ec  Cython-0.20.2.tar.gz' > Cython-0.20.2.tar.gz.sha512
+echo '856220fa579e272ac38dcef091760f527431ff3b98df9af6e68416fcf77d9659ac5abe5c7dee41331f359614637a4ff452033085335ee499830ed126ab584267  Cython-0.22.tar.gz' > Cython-0.22.tar.gz.sha512
 echo '4941f5aa21aff3743546495fb073c10d2657ff42b2aff401903498638093d0e31e344cce778980f28a7170c6d29eab72ac074277b9d4088376e8692dc71e55c1  PyX-0.12.1.tar.gz' > PyX-0.12.1.tar.gz.sha512
-echo '4b05f0a490ddee37e8fc7970403bb8b72c38e5d173703db40310e78140d9d5c5732789d69c68dbd5605a623e4582f5b9671f82b8239ecdb34ad4261019dace6a  Python-2.7.8.tgz' > Python-2.7.8.tgz.sha512
+echo 'a42f28ed8e49f04cf89e2ea7434c5ecbc264e7188dcb79ab97f745adf664dd9ab57f9a913543731635f90859536244ac37dca9adf0fc2aa1b215ba884839d160  Python-2.7.9.tgz' > Python-2.7.9.tgz.sha512
+echo '609cc82586fabecb25f25ecb410f2938e01d21cde85dd3f8824fe55c6edde9ecf3b7609195473d3fa05a16b9b121464f5414db1a0187103b78ea6edfa71684a7  Python-3.4.3.tgz' > Python-3.4.3.tgz.sha512
 echo '276bd9c061ec9a27d478b33078a86f93164ee2da72210e12e2c9da71dcffeb64767e4460b93f257302b09328eda8655e93c4b9ae85e74472869afbeae35ca71e  blas.tar.gz' > blas.tar.gz.sha512
 echo '00ace5438cfa0c577e5f578d8a808613187eff5217c35164ffe044fbafdfec9e98f4192c02a7d67e01e5a5ccced630583ad1003c37697219b0f147343a3fdd12  bzip2-1.0.6.tar.gz' > bzip2-1.0.6.tar.gz.sha512
 echo 'a296dfcaef7e853e58eed4e24b37c4fa29cfc6ac688def048480f4bb384b9e37ca447faf96eec7b378fd764ba291713f03ac464581d62275e28eb2ec99110ab6  reason-js-20120623.zip' > reason-js-20120623.zip.sha512
 echo '609a68a3675087e0cc95268574f31e104549daa48efe15a25a33b8e269a93b4bd160f4c3e8178dca9c950ef5ca514b039d6fd1b45db6af57f25342464d0429ce  freetype-2.4.12.tar.gz' > freetype-2.4.12.tar.gz.sha512
-echo 'f0da1d2ac855c02fb828444d719a1b23a580adb049335f3e732ace67558a125ac8cd3b3a68ac6bf9d10aa3ab19e4672b814eb28cc8c66910750c62efb655d744  h5py-2.3.1.tar.gz' > h5py-2.3.1.tar.gz.sha512
+echo '4a83f9ae1855a7fad90133b327d426201c8ccfd2e7fbe9f39b2d61a2eee2f3ebe2ea02cf80f3d4e1ad659f8e790c173df8cc99b87d0b7ce63d34aa88cfdc7939  h5py-2.5.0.tar.gz' > h5py-2.5.0.tar.gz.sha512
 echo '4073fba510ccadaba41db0939f909613c9cb52ba8fb6c1062fc9118edc601394c75e102310be1af4077d07c9b327e6bbb1a6359939a7268dc140382d0c1e0199  hdf5-1.8.14.tar.gz' > hdf5-1.8.14.tar.gz.sha512
-echo '4953bf5e9d6d5c6ad538d07d62b5b100fd86a37f6b861238501581c0059bd4655345ca05cf395e79709c38ce4cb9c6293f5d11ac0252a618ad8272b161140d13  ipython-2.2.0.tar.gz' > ipython-2.2.0.tar.gz.sha512
+echo 'a9cffc08ba10c47b0371b05664e55eee0562a30ef0d4bbafae79e52e5b9727906c45840c0918122c06c5672ac65e6eb381399f103e1a836aca003eda81b2acde  ipython-2.4.1.tar.gz' > ipython-2.4.1.tar.gz.sha512
 echo '8770214491e31f0a7a3efaade90eee7b0eb20a8a6ab635c5f854d78263f59a1849133c14ef5123d01023f0110cbb9fc6f818da053c01277914ae81473430a952  lapack-3.4.2.tar.gz' > lapack-3.4.2.tar.gz.sha512
 echo '887582e5a22e4cde338aa8fec7a89f6dd31f2f02b8842735f00f970f64582333fa03401cea6d01704083403c7e8b7ebc26655468ce930165673b33efa4bcd586  libpng-1.6.3.tar.gz' > libpng-1.6.3.tar.gz.sha512
-echo '60aa386639dec17b4f579955df60f2aa7c8ccd589b3490bb9afeb2929ea418d5d1a36a0b02b8d4a6734293076e9069429956c56cf8bd099b756136f2657cf9d4  matplotlib-1.4.0.tar.gz' > matplotlib-1.4.0.tar.gz.sha512
-echo '1ee2fe7a241bf81087e55d9e4ee8fa986f41bb0655d4828d244322c18f3958a1f3111506e2df15aefcf86100b4fe530fcab2d4c041b5945599ed3b3a889d50f5  mercurial-3.1.tar.gz' > mercurial-3.1.tar.gz.sha512
-echo '19499ab08018229ea5195cdac739d6c7c247c5aa5b2c91b801cbd99bad12584ed84c5cfaaa6fa8b4893a46324571a2f8a1988a1381f4ddd58390e597bd7bdc24  nose-1.3.4.tar.gz' > nose-1.3.4.tar.gz.sha512
-echo '996e6b8e2d42f223e44660f56bf73eb8ab124f400d89218f8f5e4d7c9860ada44a4d7c54526137b0695c7a10f36e8834fbf0d42b7cb20bcdb5d5c245d673385c  numpy-1.8.2.tar.gz' > numpy-1.8.2.tar.gz.sha512
-echo '9c0a61299779aff613131aaabbc255c8648f0fa7ab1806af53f19fbdcece0c8a68ddca7880d25b926d67ff1b9201954b207919fb09f6a290acb078e8bbed7b68  python-hglib-1.0.tar.gz' > python-hglib-1.0.tar.gz.sha512
-echo '3d93a8fbd94fc3f1f90df68257cda548ba1adf3d7a819e7a17edc8681894003ac7ae6abd319473054340c11443a6a3817b931366fd7dae78e3807d549c544f8b  pyzmq-14.3.1.tar.gz' > pyzmq-14.3.1.tar.gz.sha512
-echo 'ad1278740c1dc44c5e1b15335d61c4552b66c0439325ed6eeebc5872a1c0ba3fce1dd8509116b318d01e2d41da2ee49ec168da330a7fafd22511138b29f7235d  scipy-0.14.0.tar.gz' > scipy-0.14.0.tar.gz.sha512
+echo '51b0f58b2618b47b653e17e4f6b6a1215d3a3b0f1331ce3555cc7435e365d9c75693f289ce12fe3bf8f69fd57b663e545f0f1c2c94e81eaa661cac0689e125f5  matplotlib-1.4.3.tar.gz' > matplotlib-1.4.3.tar.gz.sha512
+echo 'a61b0d4cf528136991243bb23ac972c11c50ab5681d09f8b2d12cf7d37d3a9d76262f7fe6e7a1834bf6d03e8dc0ebbd9231da982e049e09830341dabefe5d064  mercurial-3.4.tar.gz' > mercurial-3.4.tar.gz.sha512
+echo 'd0cede08dc33a8ac0af0f18063e57f31b615f06e911edb5ca264575174d8f4adb4338448968c403811d9dcc60f38ade3164662d6c7b69b499f56f0984bb6283c  nose-1.3.6.tar.gz' > nose-1.3.6.tar.gz.sha512
+echo '70470ebb9afef5dfd0c83ceb7a9d5f1b7a072b1a9b54b04f04f5ed50fbaedd5b4906bd500472268d478f94df9e749a88698b1ff30f2d80258e7f3fec040617d9  numpy-1.9.2.tar.gz' > numpy-1.9.2.tar.gz.sha512
+echo 'bfd10455e74e30df568c4c4827140fb6cc29893b0e062ce1764bd52852ec7487a70a0f5ea53c3fca7886f5d36365c9f4db52b8c93cad35fb67beeb44a2d56f2d  python-hglib-1.6.tar.gz' > python-hglib-1.6.tar.gz.sha512
+echo '20164f7b05c308e0f089c07fc46b1c522094f3ac136f2e0bba84f19cb63dfd36152a2465df723dd4d93c6fbd2de4f0d94c160e2bbc353a92cfd680eb03cbdc87  pyzmq-14.5.0.tar.gz' > pyzmq-14.5.0.tar.gz.sha512
+echo 'fff4412d850c431a1b4e6ee3b17958ee5ab3beb81e6cb8a8e7d56d368751eaa8781d7c3e69d932dc002d718fddc66a72098acfe74cfe29ec80b24e6736317275  scipy-0.15.1.tar.gz' > scipy-0.15.1.tar.gz.sha512
 echo '96f3e51b46741450bc6b63779c10ebb4a7066860fe544385d64d1eda52592e376a589ef282ace2e1df73df61c10eab1a0d793abbdaf770e60289494d4bf3bcb4  sqlite-autoconf-3071700.tar.gz' > sqlite-autoconf-3071700.tar.gz.sha512
-echo '8a46e75abc3ed2388b5da9cb0e5874ae87580cf3612e2920b662d8f8eee8047efce5aa998eee96661d3565070b1a6b916c8bed74138b821f4e09115f14b6677d  sympy-0.7.5.tar.gz' > sympy-0.7.5.tar.gz.sha512
-echo 'a4e0231e77ebbc2885bab648b292b842cb15c84d66a1972de18cb00fcc611eae2794b872f070ab7d5af32dd0c6c1773527fe1332bd382c1821e1f2d5d76808fb  tornado-4.0.1.tar.gz' > tornado-4.0.1.tar.gz.sha512
-echo '7d70855d0537971841810a66b7a943a88304f6991ce445df19eea034aadc53dbce9d13be92bf44cfef1f3e19511a754eb01006a3968edc1ec3d1766ea4730cda  zeromq-4.0.4.tar.gz' > zeromq-4.0.4.tar.gz.sha512
+echo 'ce0f1a17ac01eb48aec31fc0ad431d9d7ed9907f0e8584a6d79d0ffe6864fe62e203fe3f2a3c3e4e3d485809750ce07507a6488e776a388a7a9a713110882fcf  sympy-0.7.6.tar.gz' > sympy-0.7.6.tar.gz.sha512
+echo '93591068dc63af8d50a7925d528bc0cccdd705232c529b6162619fe28dddaf115e8a460b1842877d35160bd7ed480c1bd0bdbec57d1f359085bd1814e0c1c242  tornado-4.0.2.tar.gz' > tornado-4.0.2.tar.gz.sha512
+echo '0d928ed688ed940d460fa8f8d574a9819dccc4e030d735a8c7db71b59287ee50fa741a08249e356c78356b03c2174f2f2699f05aa7dc3d380ed47d8d7bab5408  zeromq-4.0.5.tar.gz' > zeromq-4.0.5.tar.gz.sha512
 echo 'ece209d4c7ec0cb58ede791444dc754e0d10811cbbdebe3df61c0fd9f9f9867c1c3ccd5f1827f847c005e24eef34fb5bf87b5d3f894d75da04f1797538290e4a  zlib-1.2.8.tar.gz' > zlib-1.2.8.tar.gz.sha512
+echo '9b318ce2ee2cf787929dcb886d76c492b433e71024fda9452d8b4927652a298d6bd1bdb7a4c73883a98e100024f89b46ea8aa14b250f896e549e6dd7e10a6b41  setuptools-18.0.1.tar.gz' > setuptools-18.0.1.tar.gz.sha512
 # Individual processes
 [ -z "$HDF5_DIR" ] && get_ytproject $HDF5.tar.gz
 [ $INST_ZLIB -eq 1 ] && get_ytproject $ZLIB.tar.gz
@@ -658,10 +684,11 @@
 [ $INST_SCIPY -eq 1 ] && get_ytproject $SCIPY.tar.gz
 [ $INST_SCIPY -eq 1 ] && get_ytproject blas.tar.gz
 [ $INST_SCIPY -eq 1 ] && get_ytproject $LAPACK.tar.gz
-get_ytproject $PYTHON.tgz
+[ $INST_HG -eq 1 ] && get_ytproject $MERCURIAL.tar.gz
+[ $INST_PY3 -eq 1 ] && get_ytproject $PYTHON3.tgz
+get_ytproject $PYTHON2.tgz
 get_ytproject $NUMPY.tar.gz
 get_ytproject $MATPLOTLIB.tar.gz
-get_ytproject $MERCURIAL.tar.gz
 get_ytproject $IPYTHON.tar.gz
 get_ytproject $H5PY.tar.gz
 get_ytproject $CYTHON.tar.gz
@@ -669,6 +696,7 @@
 get_ytproject $NOSE.tar.gz
 get_ytproject $PYTHON_HGLIB.tar.gz
 get_ytproject $SYMPY.tar.gz
+get_ytproject $SETUPTOOLS.tar.gz
 if [ $INST_BZLIB -eq 1 ]
 then
     if [ ! -e $BZLIB/done ]
@@ -785,11 +813,11 @@
     fi
 fi
 
-if [ ! -e $PYTHON/done ]
+if [ ! -e $PYTHON2/done ]
 then
-    echo "Installing Python.  This may take a while, but don't worry.  yt loves you."
-    [ ! -e $PYTHON ] && tar xfz $PYTHON.tgz
-    cd $PYTHON
+    echo "Installing Python 2. This may take a while, but don't worry. yt loves you."
+    [ ! -e $PYTHON2 ] && tar xfz $PYTHON2.tgz
+    cd $PYTHON2
     ( ./configure --prefix=${DEST_DIR}/ ${PYCONF_ARGS} 2>&1 ) 1>> ${LOG_FILE} || do_exit
 
     ( make ${MAKE_PROCS} 2>&1 ) 1>> ${LOG_FILE} || do_exit
@@ -800,7 +828,30 @@
     cd ..
 fi
 
-export PYTHONPATH=${DEST_DIR}/lib/python2.7/site-packages/
+if [ $INST_PY3 -eq 1 ]
+then
+    if [ ! -e $PYTHON3/done ]
+    then
+        echo "Installing Python 3. Because two Pythons are better than one."
+        [ ! -e $PYTHON3 ] && tar xfz $PYTHON3.tgz
+        cd $PYTHON3
+        ( ./configure --prefix=${DEST_DIR}/ ${PYCONF_ARGS} 2>&1 ) 1>> ${LOG_FILE} || do_exit
+
+        ( make ${MAKE_PROCS} 2>&1 ) 1>> ${LOG_FILE} || do_exit
+        ( make install 2>&1 ) 1>> ${LOG_FILE} || do_exit
+        ( ln -sf ${DEST_DIR}/bin/python3.4 ${DEST_DIR}/bin/pyyt 2>&1 ) 1>> ${LOG_FILE}
+        ( ln -sf ${DEST_DIR}/bin/python3.4 ${DEST_DIR}/bin/python 2>&1 ) 1>> ${LOG_FILE}
+        ( ln -sf ${DEST_DIR}/bin/python3-config ${DEST_DIR}/bin/python-config 2>&1 ) 1>> ${LOG_FILE}
+        ( make clean 2>&1) 1>> ${LOG_FILE} || do_exit
+        touch done
+        cd ..
+    fi
+fi
+
+export PYTHONPATH=${DEST_DIR}/lib/${PYTHON_EXEC}/site-packages/
+
+# Install setuptools
+do_setup_py $SETUPTOOLS
 
 if [ $INST_HG -eq 1 ]
 then
@@ -845,12 +896,10 @@
 
 # This fixes problems with gfortran linking.
 unset LDFLAGS
-
-echo "Installing distribute"
-( ${DEST_DIR}/bin/python2.7 ${YT_DIR}/distribute_setup.py 2>&1 ) 1>> ${LOG_FILE} || do_exit
-
+ 
 echo "Installing pip"
-( ${DEST_DIR}/bin/easy_install-2.7 pip 2>&1 ) 1>> ${LOG_FILE} || do_exit
+( ${GETFILE} https://bootstrap.pypa.io/get-pip.py 2>&1 ) 1>> ${LOG_FILE} || do_exit
+( ${DEST_DIR}/bin/${PYTHON_EXEC} get-pip.py 2>&1 ) 1>> ${LOG_FILE} || do_exit
 
 if [ $INST_SCIPY -eq 0 ]
 then
@@ -914,7 +963,11 @@
    echo "[gui_support]" >> ${DEST_DIR}/src/$MATPLOTLIB/setup.cfg
    echo "macosx = False" >> ${DEST_DIR}/src/$MATPLOTLIB/setup.cfg
 fi
+
+_user_DISPLAY=$DISPLAY
+unset DISPLAY   # see (yt-user link missing: "Installation failure" 01/29/15)
 do_setup_py $MATPLOTLIB
+export DISPLAY=${_user_DISPLAY}
 if [ -n "${OLD_LDFLAGS}" ]
 then
     export LDFLAG=${OLD_LDFLAGS}
@@ -942,8 +995,8 @@
 fi
 
 do_setup_py $IPYTHON
+do_setup_py $CYTHON
 do_setup_py $H5PY
-do_setup_py $CYTHON
 do_setup_py $NOSE
 do_setup_py $PYTHON_HGLIB
 do_setup_py $SYMPY
@@ -980,13 +1033,14 @@
 
 echo "Installing yt"
 [ $INST_PNG -eq 1 ] && echo $PNG_DIR > png.cfg
-( export PATH=$DEST_DIR/bin:$PATH ; ${DEST_DIR}/bin/python2.7 setup.py develop 2>&1 ) 1>> ${LOG_FILE} || do_exit
+( export PATH=$DEST_DIR/bin:$PATH ; ${DEST_DIR}/bin/${PYTHON_EXEC} setup.py develop 2>&1 ) 1>> ${LOG_FILE} || do_exit
 touch done
 cd $MY_PWD
 
-if !( ( ${DEST_DIR}/bin/python2.7 -c "import readline" 2>&1 )>> ${LOG_FILE})
+if !( ( ${DEST_DIR}/bin/${PYTHON_EXEC} -c "import readline" 2>&1 )>> ${LOG_FILE}) || \
+	[[ "${MYOS##Darwin}" != "${MYOS}" && $INST_PY3 -eq 1 ]] 
 then
-    if !( ( ${DEST_DIR}/bin/python2.7 -c "import gnureadline" 2>&1 )>> ${LOG_FILE})
+    if !( ( ${DEST_DIR}/bin/${PYTHON_EXEC} -c "import gnureadline" 2>&1 )>> ${LOG_FILE})
     then
         echo "Installing pure-python readline"
         ( ${DEST_DIR}/bin/pip install gnureadline 2>&1 ) 1>> ${LOG_FILE}

diff -r 665e2b6cdf78aa7722c300a6505d1bad3c5d44a3 -r 28733726b2a751e774c8b7ae46121aa57fd1060f doc/source/about/index.rst
--- a/doc/source/about/index.rst
+++ b/doc/source/about/index.rst
@@ -31,7 +31,7 @@
 `our members website. <http://yt-project.org/members.html>`_
 
 For an up-to-date list of everyone who has contributed to the yt codebase, 
-see the current `CREDITS <http://hg.yt-project.org/yt/src/yt/CREDITS>`_ file.  
+see the current `CREDITS <http://bitbucket.org/yt_analysis/yt/src/yt/CREDITS>`_ file.  
 For a more detailed breakup of contributions made by individual users, see out 
 `Open HUB page <https://www.openhub.net/p/yt_amr/contributors?query=&sort=commits>`_.
 

diff -r 665e2b6cdf78aa7722c300a6505d1bad3c5d44a3 -r 28733726b2a751e774c8b7ae46121aa57fd1060f doc/source/analyzing/analysis_modules/SZ_projections.ipynb
--- a/doc/source/analyzing/analysis_modules/SZ_projections.ipynb
+++ b/doc/source/analyzing/analysis_modules/SZ_projections.ipynb
@@ -1,7 +1,7 @@
 {
  "metadata": {
   "name": "",
-  "signature": "sha256:2cc168b2c1737c67647aa29892c0213e7a58233fa53c809f9cd975a4306e9bc8"
+  "signature": "sha256:487383ec23a092310522ec25bd02ad2eb16a3402c5ed3d2b103d33fe17697b3c"
  },
  "nbformat": 3,
  "nbformat_minor": 0,
@@ -70,6 +70,13 @@
      ]
     },
     {
+     "cell_type": "markdown",
+     "metadata": {},
+     "source": [
+      "<font color='red'>**NOTE**</font>: Currently, use of the SZpack library to create S-Z projections in yt is limited to Python 2.x."
+     ]
+    },
+    {
      "cell_type": "heading",
      "level": 2,
      "metadata": {},

diff -r 665e2b6cdf78aa7722c300a6505d1bad3c5d44a3 -r 28733726b2a751e774c8b7ae46121aa57fd1060f doc/source/analyzing/analysis_modules/_images/SED.png
Binary file doc/source/analyzing/analysis_modules/_images/SED.png has changed

diff -r 665e2b6cdf78aa7722c300a6505d1bad3c5d44a3 -r 28733726b2a751e774c8b7ae46121aa57fd1060f doc/source/analyzing/analysis_modules/_images/SFR.png
Binary file doc/source/analyzing/analysis_modules/_images/SFR.png has changed

diff -r 665e2b6cdf78aa7722c300a6505d1bad3c5d44a3 -r 28733726b2a751e774c8b7ae46121aa57fd1060f doc/source/analyzing/analysis_modules/absorption_spectrum.rst
--- a/doc/source/analyzing/analysis_modules/absorption_spectrum.rst
+++ b/doc/source/analyzing/analysis_modules/absorption_spectrum.rst
@@ -5,24 +5,24 @@
 
 .. sectionauthor:: Britton Smith <brittonsmith at gmail.com>
 
-Absorption line spectra, such as shown below, can be made with data created by the 
-(:ref:`light-ray-generator`).  For each element of the ray, column densities are 
-calculated multiplying the number density within a grid cell with the path length 
-of the ray through the cell.  Line profiles are generated using a voigt profile based 
-on the temperature field.  The lines are then shifted according to the redshift 
-recorded by the light ray tool and (optionally) the line of sight peculiar velocity.  
-Inclusion of the peculiar velocity requires setting ``get_los_velocity`` to True in 
-the call to 
+Absorption line spectra, such as shown below, can be made with data created 
+by the (:ref:`light-ray-generator`).  For each element of the ray, column 
+densities are calculated multiplying the number density within a grid cell 
+with the path length of the ray through the cell.  Line profiles are 
+generated using a voigt profile based on the temperature field.  The lines 
+are then shifted according to the redshift recorded by the light ray tool 
+and (optionally) the line of sight peculiar velocity.  Inclusion of the 
+peculiar velocity requires setting ``get_los_velocity`` to True in the call to 
 :meth:`~yt.analysis_modules.cosmological_observation.light_ray.light_ray.LightRay.make_light_ray`.
 
-The spectrum generator will output a file containing the wavelength and normalized flux.  
-It will also output a text file listing all important lines.
+The spectrum generator will output a file containing the wavelength and 
+normalized flux.  It will also output a text file listing all important lines.
 
 .. image:: _images/spectrum_full.png
    :width: 500
 
-An absorption spectrum for the wavelength range from 900 to 1800 Angstroms made with 
-a light ray extending from z = 0 to z = 0.4.
+An absorption spectrum for the wavelength range from 900 to 1800 Angstroms 
+made with a light ray extending from z = 0 to z = 0.4.
 
 .. image:: _images/spectrum_zoom.png
    :width: 500
@@ -32,8 +32,8 @@
 Creating an Absorption Spectrum
 -------------------------------
 
-To instantiate an AbsorptionSpectrum object, the arguments required are the minimum and 
-maximum wavelengths, and the number of wavelength bins.
+To instantiate an AbsorptionSpectrum object, the arguments required are the 
+minimum and maximum wavelengths, and the number of wavelength bins.
 
 .. code-block:: python
 
@@ -44,14 +44,18 @@
 Adding Features to the Spectrum
 -------------------------------
 
-Absorption lines and continuum features can then be added to the spectrum.  To add a 
-line, you must know some properties of the line: the rest wavelength, f-value, gamma value, 
-and the atomic mass in amu of the atom.  Below, we will add the H Lyman-alpha line.
+Absorption lines and continuum features can then be added to the spectrum.  
+To add a line, you must know some properties of the line: the rest wavelength, 
+f-value, gamma value, and the atomic mass in amu of the atom.  That line must 
+be tied in some way to a field in the dataset you are loading, and this field
+must be added to the LightRay object when it is created.  Below, we will 
+add the H Lyman-alpha line, which is tied to the neutral hydrogen field 
+('H_number_density').
 
 .. code-block:: python
   
   my_label = 'HI Lya'
-  field = 'HI_NumberDensity'
+  field = 'H_number_density'
   wavelength = 1215.6700 # Angstroms
   f_value = 4.164E-01
   gamma = 6.265e+08
@@ -59,19 +63,22 @@
   
   sp.add_line(my_label, field, wavelength, f_value, gamma, mass, label_threshold=1.e10)
 
-In the above example, the *field* argument tells the spectrum generator which field from the 
-ray data to use to calculate the column density.  The ``label_threshold`` keyword tells the 
-spectrum generator to add all lines above a column density of 10 :superscript:`10` 
-cm :superscript:`-2` to the text line list.  If None is provided, as is the default, no 
-lines of this type will be added to the text list.
+In the above example, the *field* argument tells the spectrum generator which 
+field from the ray data to use to calculate the column density.  The 
+``label_threshold`` keyword tells the spectrum generator to add all lines 
+above a column density of 10 :superscript:`10` cm :superscript:`-2` to the 
+text line list.  If None is provided, as is the default, no lines of this 
+type will be added to the text list.
 
-Continuum features who optical depths follow a power law can also be added.  Below, we will add 
-H Lyman continuum.
+Continuum features with optical depths that follow a power law can also be 
+added.  Like adding lines, you must specify details like the wavelength
+and the field in the dataset and LightRay that is tied to this feature.
+Below, we will add H Lyman continuum.
 
 .. code-block:: python
 
   my_label = 'HI Lya'
-  field = 'HI_NumberDensity'
+  field = 'H_number_density'
   wavelength = 912.323660 # Angstroms
   normalization = 1.6e17
   index = 3.0
@@ -81,25 +88,26 @@
 Making the Spectrum
 -------------------
 
-Once all the lines and continuum are added, it is time to make a spectrum out of 
-some light ray data.
+Once all the lines and continuum are added, it is time to make a spectrum out 
+of some light ray data.
 
 .. code-block:: python
 
-  wavelength, flux = sp.make_spectrum('lightray.h5', output_file='spectrum.fits', 
+  wavelength, flux = sp.make_spectrum('lightray.h5', 
+                                      output_file='spectrum.fits', 
                                       line_list_file='lines.txt',
                                       use_peculiar_velocity=True)
 
-A spectrum will be made using the specified ray data and the wavelength and flux arrays 
-will also be returned.  If ``use_peculiar_velocity`` is set to False, the lines will only 
-be shifted according to the redshift.
+A spectrum will be made using the specified ray data and the wavelength and 
+flux arrays will also be returned.  If ``use_peculiar_velocity`` is set to 
+False, the lines will only be shifted according to the redshift.
 
 Three output file formats are supported for writing out the spectrum: fits, 
 hdf5, and ascii.  The file format used is based on the extension provided 
 in the ``output_file`` keyword: ``.fits`` for a fits file, 
 ``.h5`` for an hdf5 file, and anything else for an ascii file.
 
-.. note:: To write out a fits file, you must install the `pyfits <http://www.stsci.edu/resources/software_hardware/pyfits>`_ module.
+.. note:: To write out a fits file, you must install the `astropy <http://www.astropy.org>`_ python library in order to access the astropy.io.fits module.  You can usually do this by simply running `pip install astropy` at the command line.
 
 Fitting an Absorption Spectrum
 ------------------------------
@@ -244,13 +252,13 @@
 
 Line complexes are found using the 
 :func:`~yt.analysis_modules.absorption_spectrum.absorption_spectrum_fit.find_complexes`
-function. The process by which line complexes are found involves walking through
-the array of flux in order from minimum to maximum wavelength, and finding
-series of spatially contiguous cells whose flux is less than some limit.
-These regions are then checked in terms of an additional flux limit and size.
-The bounds of all the passing regions are then listed and returned. Those
-bounds that cover an exceptionally large region of wavelength space will be
-broken up if a suitable cut point is found. This method is only appropriate
+function. The process by which line complexes are found involves walking 
+through the array of flux in order from minimum to maximum wavelength, and 
+finding series of spatially contiguous cells whose flux is less than some 
+limit.  These regions are then checked in terms of an additional flux limit 
+and size.  The bounds of all the passing regions are then listed and returned. 
+Those bounds that cover an exceptionally large region of wavelength space will 
+be broken up if a suitable cut point is found. This method is only appropriate
 for noiseless spectra.
 
 The optional parameter ``complexLim`` (default = 0.999), controls the limit
@@ -264,8 +272,8 @@
 The ``fitLim`` parameter controls what is the maximum flux that the trough
 of the region can have and still be considered a line complex. This 
 effectively controls the sensitivity to very low column absorbers. Default
-value is ``fitLim`` = 0.99. If a region is identified where the flux of the trough
-is greater than this value, the region is simply ignored.
+value is ``fitLim`` = 0.99. If a region is identified where the flux of the 
+trough is greater than this value, the region is simply ignored.
 
 The ``minLength`` parameter controls the minimum number of array elements 
 that an identified region must have. This value must be greater than or

This diff is so big that we needed to truncate the remainder.

https://bitbucket.org/yt_analysis/yt/commits/329b5039b031/
Changeset:   329b5039b031
Branch:      stable
User:        brittonsmith
Date:        2015-07-24 16:41:05+00:00
Summary:     Added tag yt-3.2 for changeset 28733726b2a7
Affected #:  1 file

diff -r 28733726b2a751e774c8b7ae46121aa57fd1060f -r 329b5039b031d0ba8d3dff8de9127b623726fc11 .hgtags
--- a/.hgtags
+++ b/.hgtags
@@ -5181,3 +5181,4 @@
 0cf350f11a551f5a5b4039a70e9ff6d98342d1da yt-3.0.1
 511887af4c995a78fe606e58ce8162c88380ecdc yt-3.0.2
 fd7cdc4836188a3badf81adb477bcc1b9632e485 yt-3.1.0
+28733726b2a751e774c8b7ae46121aa57fd1060f yt-3.2

Repository URL: https://bitbucket.org/yt_analysis/yt/

--

This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.



More information about the yt-svn mailing list